Skip to content

Commit

Permalink
Small changes LCR's Unsecured (#83)
Browse files Browse the repository at this point in the history
Changes to allow the conversion of must of the subset for Unsecured.elm.

Adds basic unpacking of JSON results.

Fixes #75
  • Loading branch information
sfc-gh-lfallasavendano committed Jan 8, 2024
1 parent 010a323 commit a2abc15
Show file tree
Hide file tree
Showing 6 changed files with 141 additions and 46 deletions.
49 changes: 41 additions & 8 deletions src/Morphir/Snowpark/MapFunctionsMapping.elm
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@ import Morphir.IR.Name as Name
import Morphir.Snowpark.Constants as Constants
import Morphir.Snowpark.MappingContext exposing (isBasicType)
import Morphir.Snowpark.Operatorsmaps exposing (mapOperator)
import Morphir.Snowpark.ReferenceUtils exposing (scalaPathToModule)
import Morphir.Snowpark.ReferenceUtils exposing (scalaPathToModule, getCustomTypeParameterFieldAccess)
import Morphir.Visual.BoolOperatorTree exposing (functionName)
import Morphir.IR.FQName as FQName
import Morphir.Snowpark.MappingContext exposing (isTypeRefToRecordWithSimpleTypes)
import Morphir.Snowpark.TypeRefMapping exposing (generateRecordTypeWrapperExpression)
import Morphir.Snowpark.MappingContext exposing (getFieldsNamesIfRecordType)

type alias MapValueType ta = ValueIR.Value ta (TypeIR.Type ()) -> ValueMappingContext -> Scala.Value

Expand Down Expand Up @@ -134,7 +135,7 @@ tryToConvertUserFunctionCall (func, args) mapValue ctx =
let
argsToUse =
args
|> List.indexedMap (\i arg -> ("field" ++ (String.fromInt i), mapValue arg ctx))
|> List.indexedMap (\i arg -> (getCustomTypeParameterFieldAccess i, mapValue arg ctx))
|> List.concatMap (\(field, value) -> [Constants.applySnowparkFunc "lit" [Scala.Literal (Scala.StringLit field)], value])
tag = [ Constants.applySnowparkFunc "lit" [Scala.Literal (Scala.StringLit "__tag")],
Constants.applySnowparkFunc "lit" [ Scala.Literal (Scala.StringLit <| ( constructorName |> FQName.getLocalName |> Name.toTitleCase))]]
Expand Down Expand Up @@ -220,19 +221,51 @@ generateForListFilterMap : Value ta (Type ()) -> (Value ta (Type ())) -> ValueMa
generateForListFilterMap predicate sourceRelation ctx mapValue =
if isCandidateForDataFrame (valueAttribute sourceRelation) ctx.typesContextInfo then
case predicate of
ValueIR.Lambda _ _ binExpr ->
ValueIR.Lambda tpe _ binExpr ->
let
selectCall = Scala.Apply (Scala.Select (mapValue sourceRelation ctx) "select") [Scala.ArgValue Nothing <| mapValue binExpr ctx]
resultId = Scala.Literal <| Scala.StringLit "result"
selectColumnAlias = Scala.Apply (Scala.Select selectCall "as ") [ Scala.ArgValue Nothing resultId ]
isNotNullCall = Scala.Select (Constants.applySnowparkFunc "col" [ resultId ]) "is_not_null"
selectColumnAlias =
Scala.Apply (Scala.Select (mapValue binExpr ctx) "as ") [ Scala.ArgValue Nothing resultId ]
selectCall =
Scala.Apply (Scala.Select (mapValue sourceRelation ctx) "select") [Scala.ArgValue Nothing <| selectColumnAlias]
resultId =
Scala.Literal <| Scala.StringLit "result"
isNotNullCall =
Scala.Select (Constants.applySnowparkFunc "col" [ resultId ]) "is_not_null"
filterCall =
Scala.Apply (Scala.Select selectCall "filter") [Scala.ArgValue Nothing isNotNullCall]
in
Scala.Apply (Scala.Select selectColumnAlias "filter") [Scala.ArgValue Nothing isNotNullCall]
Maybe.withDefault filterCall (generateProjectionForJsonColumnIfRequired tpe ctx filterCall)
_ ->
Scala.Literal (Scala.StringLit "Unsupported filterMap scenario")
else
Scala.Literal (Scala.StringLit "Unsupported filterMap scenario")

generateProjectionForJsonColumnIfRequired : Type () -> ValueMappingContext -> Scala.Value -> Maybe Scala.Value
generateProjectionForJsonColumnIfRequired tpe ctx selectExpr =
let
resultColumn =
Constants.applySnowparkFunc "col" [Scala.Literal (Scala.StringLit "result")]
generateFieldAccess : Int -> Scala.Value
generateFieldAccess idx =
Scala.Literal (Scala.StringLit (getCustomTypeParameterFieldAccess idx))
generateAsCall expr name =
Scala.Apply (Scala.Select expr "as")
[Scala.ArgValue Nothing (Scala.Literal (Scala.StringLit (Name.toCamelCase name )))]
resultFieldAccess idx =
Scala.Apply resultColumn [Scala.ArgValue Nothing <| generateFieldAccess idx]
generateJsonUpackingProjection : List Name.Name -> Scala.Value
generateJsonUpackingProjection names =
Scala.Apply
(Scala.Select selectExpr "select")
(names
|> List.indexedMap (\i name -> Scala.ArgValue Nothing <| generateAsCall (resultFieldAccess i) name))
in
case tpe of
TypeIR.Function _ _ (TypeIR.Reference _ _ [itemsType]) ->
(getFieldsNamesIfRecordType itemsType ctx.typesContextInfo)
|> Maybe.map generateJsonUpackingProjection
_ -> Nothing

generateForListMap : Value ta (Type ()) -> (Value ta (Type ())) -> ValueMappingContext -> MapValueType ta -> Scala.Value
generateForListMap projection sourceRelation ctx mapValue =
if isCandidateForDataFrame (valueAttribute sourceRelation) ctx.typesContextInfo then
Expand Down
25 changes: 20 additions & 5 deletions src/Morphir/Snowpark/MappingContext.elm
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ module Morphir.Snowpark.MappingContext exposing
, isLocalFunctionName
, isTypeRefToRecordWithSimpleTypes
, isAliasedBasicType
, getLocalVariableIfDataFrameReference )
, getLocalVariableIfDataFrameReference
, getFieldsNamesIfRecordType )

{-| This module contains functions to collect information about type definitions in a distribution.
It classifies type definitions in the following kinds:
Expand All @@ -43,9 +44,9 @@ import Morphir.IR.Name exposing (Name)
import Morphir.IR.Path as Path

type TypeDefinitionClassification a =
RecordWithSimpleTypes
RecordWithSimpleTypes (List Name)
| RecordWithComplexTypes
| UnionTypeWithoutParams
| UnionTypeWithoutParams
| UnionTypeWithParams
| TypeAlias (Type a)

Expand Down Expand Up @@ -82,7 +83,7 @@ isLocalFunctionName name ctx =
isRecordWithSimpleTypes : FQName -> MappingContextInfo a -> Bool
isRecordWithSimpleTypes name ctx =
case Dict.get name ctx of
Just (TypeClassified RecordWithSimpleTypes) -> True
Just (TypeClassified (RecordWithSimpleTypes _)) -> True
_ -> False

isTypeRefToRecordWithSimpleTypes : Type a -> MappingContextInfo a -> Bool
Expand Down Expand Up @@ -260,12 +261,26 @@ isDataFrameFriendlyType tpe ctx =
|| (isAliasOfDataFrameFriendlyType tpe ctx)
|| (isMaybeOfDataFrameFriendlyType tpe ctx)


getFieldsNamesIfRecordType : Type a -> MappingContextInfo a -> Maybe (List Name)
getFieldsNamesIfRecordType tpe ctx =
case tpe of
Reference _ typeName _ ->
case Dict.get typeName ctx of
Just (TypeClassified (RecordWithSimpleTypes fieldNames)) ->
Just fieldNames
_ ->
Nothing
_ ->
Nothing


classifyActualType : Type a -> MappingContextInfo a -> TypeClassificationState a
classifyActualType tpe ctx =
case tpe of
Record _ members ->
if List.all (\t -> isDataFrameFriendlyType t.tpe ctx) members then
TypeClassified RecordWithSimpleTypes
TypeClassified (RecordWithSimpleTypes (members |> List.map .name))
else
TypeWithPendingClassification (Just tpe)
Reference _ _ _ ->
Expand Down
16 changes: 3 additions & 13 deletions src/Morphir/Snowpark/PatternMatchMapping.elm
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ import Morphir.IR.Type as Type
import Morphir.IR.Name as Name
import Morphir.IR.FQName as FQName
import Morphir.Snowpark.MappingContext exposing (isUnionTypeWithParams)
import Morphir.Snowpark.Utils exposing (tryAlternatives)
import Morphir.Snowpark.ReferenceUtils exposing (getCustomTypeParameterFieldAccess)

type alias PatternMatchValues ta = (Type (), Value ta (Type ()), List ( Pattern (Type ()), Value ta (Type ()) ))

Expand Down Expand Up @@ -107,7 +109,7 @@ addBindingReplacementsToContext ctxt bindingVariables referenceExpr =
let
newReplacements =
bindingVariables
|> List.indexedMap (\i name -> (name, generateBindingVariableExpr ("field" ++ (String.fromInt i)) referenceExpr))
|> List.indexedMap (\i name -> (name, generateBindingVariableExpr (getCustomTypeParameterFieldAccess i) referenceExpr))
|> Dict.fromList
in
{ ctxt | inlinedIds = Dict.union ctxt.inlinedIds newReplacements }
Expand Down Expand Up @@ -238,18 +240,6 @@ tryAlternative nextAction currentResult =
_ ->
currentResult

tryAlternatives : List (() -> Maybe a) -> Maybe a
tryAlternatives cases =
case cases of
first::rest ->
case first() of
Just _ as result ->
result
_ ->
tryAlternatives rest
[] ->
Nothing

classifyScenario : (Value ta (Type ())) -> List (Pattern (Type ()), Value ta (Type ())) -> ValueMappingContext -> PatternMatchScenario ta
classifyScenario value cases ctx =
Maybe.withDefault
Expand Down
7 changes: 6 additions & 1 deletion src/Morphir/Snowpark/ReferenceUtils.elm
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ module Morphir.Snowpark.ReferenceUtils exposing (
, isTypeReferenceToSimpleTypesRecord
, isValueReferenceToSimpleTypesRecord
, mapLiteral
, scalaReferenceToUnionTypeCase)
, scalaReferenceToUnionTypeCase
, getCustomTypeParameterFieldAccess)

import Morphir.IR.Name as Name
import Morphir.IR.Type as IrType
Expand Down Expand Up @@ -71,3 +72,7 @@ scalaReferenceToUnionTypeCase typeName constructorName =
containerObjectFieldName = FQName.getLocalName constructorName |> Name.toTitleCase
in
Scala.Ref (nsName ++ [containerObjectName]) containerObjectFieldName

getCustomTypeParameterFieldAccess : Int -> String
getCustomTypeParameterFieldAccess paramIndex =
"field" ++ (String.fromInt paramIndex)
76 changes: 57 additions & 19 deletions src/Morphir/Snowpark/TypeRefMapping.elm
Original file line number Diff line number Diff line change
Expand Up @@ -8,32 +8,70 @@ import Morphir.Snowpark.MappingContext exposing (MappingContextInfo, isAliasedBa
import Morphir.Snowpark.ReferenceUtils exposing (isTypeReferenceToSimpleTypesRecord)
import Morphir.Snowpark.MappingContext exposing (ValueMappingContext)
import Morphir.Snowpark.MappingContext exposing (getLocalVariableIfDataFrameReference)
import Morphir.Snowpark.Utils exposing (tryAlternatives)
import Morphir.IR.Type as Type


mapTypeReference : Type () -> MappingContextInfo () -> Scala.Type
mapTypeReference typeReference ctx =
checkDataFrameCase : Type () -> MappingContextInfo () -> Maybe Scala.Type
checkDataFrameCase typeReference ctx =
if isCandidateForDataFrame typeReference ctx then
typeRefForSnowparkType "DataFrame"

else if isBasicType typeReference || isAliasedBasicType typeReference ctx || isDataFrameFriendlyType typeReference ctx then
typeRefForSnowparkType "Column"

Just <| typeRefForSnowparkType "DataFrame"
else
let
nameInfo =
isTypeReferenceToSimpleTypesRecord typeReference ctx
Nothing

typeNameInfo =
Maybe.map
(\( typePath, simpleTypeName ) -> Just (Scala.TypeRef typePath (simpleTypeName |> Name.toTitleCase)))
nameInfo
in
typeNameInfo
|> Maybe.withDefault Nothing
|> Maybe.withDefault (Scala.TypeVar "TypeNotConverted")
checkForColumnCase : Type () -> MappingContextInfo () -> Maybe Scala.Type
checkForColumnCase typeReference ctx =
if isBasicType typeReference ||
isAliasedBasicType typeReference ctx ||
isDataFrameFriendlyType typeReference ctx ||
isMaybeWithGenericType typeReference then
Just <| typeRefForSnowparkType "Column"
else
Nothing

checkDefaultCase : Type () -> MappingContextInfo () -> Maybe Scala.Type
checkDefaultCase typeReference ctx =
let
nameInfo =
isTypeReferenceToSimpleTypesRecord typeReference ctx
typeNameInfo =
Maybe.map
(\( typePath, simpleTypeName ) -> Just (Scala.TypeRef typePath (simpleTypeName |> Name.toTitleCase)))
nameInfo
in
typeNameInfo |> Maybe.withDefault Nothing

mapTypeReference : Type () -> MappingContextInfo () -> Scala.Type
mapTypeReference typeReference ctx =
tryAlternatives [ (\_ -> checkDataFrameCase typeReference ctx)
, (\_ -> checkForColumnCase typeReference ctx)
, (\_ -> checkDefaultCase typeReference ctx)
, (\_ -> checkForListOfSimpleTypes typeReference ctx) ]
|> Maybe.withDefault (Scala.TypeVar "TypeNotConverted")

generateRecordTypeWrapperExpression : Type () -> ValueMappingContext -> Maybe Scala.Value
generateRecordTypeWrapperExpression typeReference ctx =
getLocalVariableIfDataFrameReference typeReference ctx
|> Maybe.map Scala.Variable

isMaybeWithGenericType : Type () -> Bool
isMaybeWithGenericType tpe =
case tpe of
Type.Reference _ ( [ [ "morphir" ], [ "s", "d", "k" ] ], [ [ "maybe" ] ], [ "maybe" ] ) [_] ->
True
_ ->
False

checkForListOfSimpleTypes : Type () -> MappingContextInfo () -> Maybe Scala.Type
checkForListOfSimpleTypes typeReference ctx =
if isListOfSimpleType typeReference ctx then
Just <| Scala.TypeApply (Scala.TypeRef [] "Seq") [typeRefForSnowparkType "Column"]
else
Nothing

isListOfSimpleType : Type () -> MappingContextInfo () -> Bool
isListOfSimpleType tpe ctx =
case tpe of
Type.Reference _ ( [ [ "morphir" ], [ "s", "d", "k" ] ], [ [ "list" ] ], [ "list" ] ) [ elementType ] ->
isDataFrameFriendlyType elementType ctx
_ ->
False
14 changes: 14 additions & 0 deletions src/Morphir/Snowpark/Utils.elm
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
module Morphir.Snowpark.Utils exposing (tryAlternatives)


tryAlternatives : List (() -> Maybe a) -> Maybe a
tryAlternatives cases =
case cases of
first::rest ->
case first () of
Just _ as result ->
result
_ ->
tryAlternatives rest
[] ->
Nothing

0 comments on commit a2abc15

Please sign in to comment.