Skip to content

Commit

Permalink
Add Support for Morphir SDK Aggregate and some arithmetic functions (f…
Browse files Browse the repository at this point in the history
…inos#88)

* Add support for the groupBy and aggregate from Morphir.SDK

Add support to the pattern of groupBy and Aggregate from the Moprhir.SDK in Snowpark

* Support multiple variables in the lambda's call for the SDK Aggregate function

Support multiple variable in the lambda's call for the SDK aggregate function

* Support the alias function from the column in order to generate a custom column name

* Refactoring Aggregate mapping

* Fix alias generation and refactoring constants and map functions file
  • Loading branch information
sfc-gh-aramirezfuentes authored and sfc-gh-lfallasavendano committed Dec 13, 2023
1 parent 5b26c74 commit a5deb05
Show file tree
Hide file tree
Showing 4 changed files with 149 additions and 14 deletions.
96 changes: 96 additions & 0 deletions src/Morphir/Snowpark/AggregateMapping.elm
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
module Morphir.Snowpark.AggregateMapping exposing (processAggregateLambdaBody)

import Morphir.IR.Name as Name
import Morphir.IR.Type as TypeIR
import Morphir.IR.Value as ValueIR
import Morphir.Scala.AST as Scala
import Morphir.Snowpark.Constants as Constants
import Morphir.Snowpark.MappingContext exposing (ValueMappingContext, getFieldsNamesIfRecordType)
import Morphir.Snowpark.Operatorsmaps as Operatorsmaps


processAggregateLambdaBody : ValueIR.Value ta (TypeIR.Type ()) -> Constants.MapValueType ta -> ValueMappingContext -> List Scala.ArgValue
processAggregateLambdaBody body mapValue ctx =
case body of
ValueIR.Record _ _ ->
[Scala.ArgValue Nothing (Scala.Literal (Scala.StringLit "To Do - Processing Record"))]
ValueIR.Apply _ x
((ValueIR.Apply _ _
(ValueIR.Apply _
( ValueIR.Reference _ ([["morphir"],["s","d","k"]],[["aggregate"]], _ ) )
( ValueIR.FieldFunction _ _ )
)
) as y) ->
variablesFromAggregate (ValueIR.uncurryApply x y) mapValue ctx
|> concatFunctions
_ ->
[Scala.ArgValue Nothing (Scala.Literal (Scala.StringLit "To Do - Processing Other"))]

concatFunctions : Constants.VariableInformation -> List Scala.ArgValue
concatFunctions (aliasList, functions) =
functions
|> List.map processList
|> List.map2 joinWithAlias aliasList

processList : (String, Scala.Value) -> Scala.Value
processList (funcName, columnName) =
Constants.applySnowparkFunc funcName [columnName]

variablesFromAggregate : (ValueIR.Value ta (TypeIR.Type ()),List (ValueIR.Value ta (TypeIR.Type ()))) -> Constants.MapValueType ta -> ValueMappingContext -> Constants.VariableInformation
variablesFromAggregate body mapValue ctx =
case body of
(ValueIR.Constructor tpe _, array) ->
let
aliasApplies = aliasMap <| getFieldsFromType tpe ctx
in
joinAliasInfo aliasApplies <| (array
|> List.map (getVariablesFromApply mapValue ctx)
|> List.filterMap (\x -> x ))
_ ->
( [], [("Error", Scala.Literal (Scala.StringLit "To Do - Not Support in variablesFromApply"))])

joinAliasInfo : a -> b -> ( a, b )
joinAliasInfo aliasList variableList =
(aliasList, variableList)

getFieldsFromType : TypeIR.Type () -> ValueMappingContext -> Maybe (List Name.Name)
getFieldsFromType tpe ctx =
case tpe of
TypeIR.Function _ _ ftpe ->
getFieldsFromType ftpe ctx
_ ->
getFieldsNamesIfRecordType tpe ctx.typesContextInfo

aliasMap : Maybe (List (Name.Name)) -> List Scala.Value
aliasMap fields =
case fields of
Just list ->
list |>
List.map convertNameToApply
Nothing ->
[]

convertNameToApply : Name.Name -> Scala.Value
convertNameToApply name =
Scala.Literal (Scala.StringLit (Name.toCamelCase name))


joinWithAlias : Scala.Value -> Scala.Value -> Scala.ArgValue
joinWithAlias aliasApply columnsApply =
let
columnAlias = Scala.Select columnsApply "alias"
in
Scala.ArgValue Nothing (Scala.Apply columnAlias [Scala.ArgValue Nothing aliasApply])

getVariablesFromApply : Constants.MapValueType ta -> ValueMappingContext -> ValueIR.Value ta (TypeIR.Type ()) -> Maybe (String, Scala.Value)
getVariablesFromApply mapValue ctx value =
case value of
ValueIR.Apply _ _
(ValueIR.Apply _ (ValueIR.Reference _ ( _, _, name )) property) ->
let
func = Operatorsmaps.mapOperator name
column = mapValue property ctx
in
Just (func, column)
_ ->
Nothing
10 changes: 9 additions & 1 deletion src/Morphir/Snowpark/Constants.elm
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
module Morphir.Snowpark.Constants exposing (..)
import Morphir.Scala.AST as Scala
import Morphir.IR.Type as TypeIR
import Morphir.IR.Value as ValueIR exposing (Pattern(..), Value(..))
import Morphir.Snowpark.MappingContext exposing (ValueMappingContext)

snowflakeNamespace : List String
snowflakeNamespace = ["com", "snowflake", "snowpark"]
Expand All @@ -13,6 +16,11 @@ applySnowparkFunc name args =
(Scala.Ref functionsNamespace name)
(args |> List.map (\v -> Scala.ArgValue Nothing v))


typeRefForSnowparkType : String -> Scala.Type
typeRefForSnowparkType typeName =
(Scala.TypeRef snowflakeNamespace typeName)
(Scala.TypeRef snowflakeNamespace typeName)

type alias MapValueType ta = ValueIR.Value ta (TypeIR.Type ()) -> ValueMappingContext -> Scala.Value

type alias VariableInformation = (List Scala.Value, List (String, Scala.Value))
49 changes: 36 additions & 13 deletions src/Morphir/Snowpark/MapFunctionsMapping.elm
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,9 @@ import Morphir.IR.FQName as FQName
import Morphir.Snowpark.MappingContext exposing (isTypeRefToRecordWithSimpleTypes)
import Morphir.Snowpark.TypeRefMapping exposing (generateRecordTypeWrapperExpression)
import Morphir.Snowpark.MappingContext exposing (getFieldsNamesIfRecordType)
import Morphir.Snowpark.AggregateMapping as AggregateMapping

type alias MapValueType ta = ValueIR.Value ta (TypeIR.Type ()) -> ValueMappingContext -> Scala.Value

mapFunctionsMapping : ValueIR.Value ta (TypeIR.Type ()) -> MapValueType ta -> ValueMappingContext -> Scala.Value
mapFunctionsMapping : ValueIR.Value ta (TypeIR.Type ()) -> Constants.MapValueType ta -> ValueMappingContext -> Scala.Value
mapFunctionsMapping value mapValue ctx =

case value of
Expand All @@ -46,6 +45,31 @@ mapFunctionsMapping value mapValue ctx =
mapWithDefaultCall default maybeValue mapValue ctx
ValueIR.Apply _ (ValueIR.Apply _ (ValueIR.Reference _ ( [ [ "morphir" ], [ "s", "d", "k" ] ], [ [ "maybe" ] ], [ "map" ] )) action) maybeValue ->
mapMaybeMapCall action maybeValue mapValue ctx
ValueIR.Apply _
(ValueIR.Apply _
(ValueIR.Reference _
([["morphir"],["s","d","k"]],[["aggregate"]],["aggregate"])
)
(ValueIR.Lambda _ _
( ValueIR.Lambda _ _ lambdaBody )
)
)
(ValueIR.Apply _
(ValueIR.Apply _
(ValueIR.Reference _ ([["morphir"],["s","d","k"]],[["aggregate"]], ["group","by"]))
(ValueIR.FieldFunction _ groupByCategory )
)
dfName
) ->
let
variables = AggregateMapping.processAggregateLambdaBody lambdaBody mapValue ctx
collection = Scala.Select (mapValue dfName ctx) "groupBy"
dfGroupBy = Scala.Apply collection [ Scala.ArgValue Nothing (Scala.Literal (Scala.StringLit (Name.toCamelCase groupByCategory)))]
aggFunction = Scala.Select dfGroupBy "agg"
groupBySum = Scala.Apply aggFunction variables
in
groupBySum

ValueIR.Apply
(TypeIR.Reference () ([["morphir"],["s","d","k"]],[["basics"]], _) [])
(ValueIR.Apply
Expand Down Expand Up @@ -92,8 +116,7 @@ mapFunctionsMapping value mapValue ctx =
_ ->
Scala.Literal (Scala.StringLit "To Do")


mapForOperatorCall : Name.Name -> Value ta (Type ()) -> Value ta (Type ()) -> MapValueType ta -> ValueMappingContext -> Scala.Value
mapForOperatorCall : Name.Name -> Value ta (Type ()) -> Value ta (Type ()) -> Constants.MapValueType ta -> ValueMappingContext -> Scala.Value
mapForOperatorCall optname left right mapValue ctx =
case (optname, left, right) of
(["equal"], _ , ValueIR.Constructor _ ([ [ "morphir" ], [ "s", "d", "k" ] ], [ [ "maybe" ] ], [ "nothing" ])) ->
Expand All @@ -109,7 +132,7 @@ mapForOperatorCall optname left right mapValue ctx =
Scala.BinOp leftValue operatorname rightValue


tryToConvertUserFunctionCall : ((Value a (Type ())), List (Value a (Type ()))) -> MapValueType a -> ValueMappingContext -> Scala.Value
tryToConvertUserFunctionCall : ((Value a (Type ())), List (Value a (Type ()))) -> Constants.MapValueType a -> ValueMappingContext -> Scala.Value
tryToConvertUserFunctionCall (func, args) mapValue ctx =
case func of
ValueIR.Reference _ functionName ->
Expand Down Expand Up @@ -151,11 +174,11 @@ whenConditionElseValueCall condition thenExpr elseExpr =
[Scala.ArgValue Nothing elseExpr]


mapWithDefaultCall : Value ta (Type ()) -> Value ta (Type ()) -> (MapValueType ta) -> ValueMappingContext -> Scala.Value
mapWithDefaultCall : Value ta (Type ()) -> Value ta (Type ()) -> (Constants.MapValueType ta) -> ValueMappingContext -> Scala.Value
mapWithDefaultCall default maybeValue mapValue ctx =
Constants.applySnowparkFunc "coalesce" [mapValue maybeValue ctx, mapValue default ctx]

mapMaybeMapCall : Value ta (Type ()) -> Value ta (Type ()) -> (MapValueType ta) -> ValueMappingContext -> Scala.Value
mapMaybeMapCall : Value ta (Type ()) -> Value ta (Type ()) -> (Constants.MapValueType ta) -> ValueMappingContext -> Scala.Value
mapMaybeMapCall action maybeValue mapValue ctx =
case action of
ValueIR.Lambda _ (AsPattern _ (WildcardPattern _) lambdaParam) body ->
Expand All @@ -170,7 +193,7 @@ mapMaybeMapCall action maybeValue mapValue ctx =
Scala.Literal (Scala.StringLit "Unsupported withDefault call")


generateForListSum : Value ta (Type ()) -> ValueMappingContext -> MapValueType ta -> Scala.Value
generateForListSum : Value ta (Type ()) -> ValueMappingContext -> Constants.MapValueType ta -> Scala.Value
generateForListSum collection ctx mapValue =
case collection of
ValueIR.Apply _ (ValueIR.Apply _ (ValueIR.Reference _ ( [ [ "morphir" ], [ "s", "d", "k" ] ], [ [ "list" ] ], [ "map" ] )) _) sourceRelation ->
Expand All @@ -191,7 +214,7 @@ generateForListSum collection ctx mapValue =
_ ->
Scala.Literal (Scala.StringLit "Unsupported sum scenario")

generateForListFilter : Value ta (Type ()) -> (Value ta (Type ())) -> ValueMappingContext -> MapValueType ta -> Scala.Value
generateForListFilter : Value ta (Type ()) -> (Value ta (Type ())) -> ValueMappingContext -> Constants.MapValueType ta -> Scala.Value
generateForListFilter predicate sourceRelation ctx mapValue =
let
generateFilterCall functionExpr =
Expand All @@ -217,7 +240,7 @@ generateForListFilter predicate sourceRelation ctx mapValue =
Scala.Literal (Scala.StringLit "Unsupported filter scenario")


generateForListFilterMap : Value ta (Type ()) -> (Value ta (Type ())) -> ValueMappingContext -> MapValueType ta -> Scala.Value
generateForListFilterMap : Value ta (Type ()) -> (Value ta (Type ())) -> ValueMappingContext -> Constants.MapValueType ta -> Scala.Value
generateForListFilterMap predicate sourceRelation ctx mapValue =
if isCandidateForDataFrame (valueAttribute sourceRelation) ctx.typesContextInfo then
case predicate of
Expand Down Expand Up @@ -266,7 +289,7 @@ generateProjectionForJsonColumnIfRequired tpe ctx selectExpr =
|> Maybe.map generateJsonUpackingProjection
_ -> Nothing

generateForListMap : Value ta (Type ()) -> (Value ta (Type ())) -> ValueMappingContext -> MapValueType ta -> Scala.Value
generateForListMap : Value ta (Type ()) -> (Value ta (Type ())) -> ValueMappingContext -> Constants.MapValueType ta -> Scala.Value
generateForListMap projection sourceRelation ctx mapValue =
if isCandidateForDataFrame (valueAttribute sourceRelation) ctx.typesContextInfo then
case processLambdaWithRecordBody projection ctx mapValue of
Expand All @@ -277,7 +300,7 @@ generateForListMap projection sourceRelation ctx mapValue =
else
Scala.Literal (Scala.StringLit "Unsupported map scenario 2")

processLambdaWithRecordBody : Value ta (Type ()) -> ValueMappingContext -> MapValueType ta -> Maybe (List Scala.ArgValue)
processLambdaWithRecordBody : Value ta (Type ()) -> ValueMappingContext -> Constants.MapValueType ta -> Maybe (List Scala.ArgValue)
processLambdaWithRecordBody functionExpr ctx mapValue =
case functionExpr of
ValueIR.Lambda (TypeIR.Function _ _ returnType) (ValueIR.AsPattern _ _ _) (ValueIR.Record _ fields) ->
Expand Down
8 changes: 8 additions & 0 deletions src/Morphir/Snowpark/Operatorsmaps.elm
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,13 @@ mapOperator name =
"||"
["mod", "by"] ->
"%"
["sum", "of"] ->
"sum"
["average", "of"] ->
"avg"
["maximum", "of"] ->
"max"
["minimum", "of"] ->
"min"
_ ->
"UnsupportedOperator"

0 comments on commit a5deb05

Please sign in to comment.