diff --git a/docs/haskell-use-cases.rst b/docs/haskell-use-cases.rst index 8ebdd85142..da6612bf24 100644 --- a/docs/haskell-use-cases.rst +++ b/docs/haskell-use-cases.rst @@ -337,3 +337,26 @@ There a couple of notes regarding the coverage analysis functionality: ``build`` / ``test`` performance. .. _hpc: https://hackage.haskell.org/package/hpc + +Persistent Worker Mode (experimental) +------------------------------------- + +Bazel supports the special `persistent worker mode`_ when instead of calling the compiler +from scratch to build every target separately, it spawns a resident process for this purpose +and sends all compilation requests to it in the client-server fashion. This worker strategy +may improve compilation times. We implemented a worker for GHC using GHC API. + +.. _persistent worker mode: https://blog.bazel.build/2015/12/10/java-workers.html + +To activate the persistent worker mode in ``rules_haskell`` the user adds a couple of lines +in the ``WORKSPACE`` file to load worker's dependencies: :: + + load("//tools:repositories.bzl", "rules_haskell_worker_dependencies") + rules_haskell_worker_dependencies() + +Then, the user will add ``--define use_worker=True`` in the command line when calling +``bazel build`` or ``bazel test``. + +It is worth noting that Bazel's worker strategy is not sandboxed by default. This may +confuse our worker relatively easily. Therefore, it is recommended to supply +``--worker_sandboxing`` to ``bazel build`` -- possibly, via your ``.bazelrc.local`` file. diff --git a/haskell/BUILD.bazel b/haskell/BUILD.bazel index 266a023d3a..a7e39f759a 100644 --- a/haskell/BUILD.bazel +++ b/haskell/BUILD.bazel @@ -72,3 +72,10 @@ haskell_toolchain_libraries( name = "toolchain-libraries", visibility = ["//visibility:public"], ) + +config_setting( + name = "use_worker", + define_values = { + "use_worker": "True", + }, +) diff --git a/haskell/defs.bzl b/haskell/defs.bzl index 61c931ca11..94ae2550b2 100644 --- a/haskell/defs.bzl +++ b/haskell/defs.bzl @@ -91,6 +91,12 @@ _haskell_common_attrs = { cfg = "host", default = Label("@rules_haskell//haskell:ghc_wrapper"), ), + "worker": attr.label( + default = None, + executable = True, + cfg = "host", + doc = "Experimental. Worker binary employed by Bazel's persistent worker mode. See docs/haskell-use-cases.rst", + ), } def _mk_binary_rule(**kwargs): @@ -180,42 +186,11 @@ def _mk_binary_rule(**kwargs): **kwargs ) -haskell_test = _mk_binary_rule(test = True) -"""Build a test suite. - -Additionally, it accepts [all common bazel test rule -fields][bazel-test-attrs]. This allows you to influence things like -timeout and resource allocation for the test. - -[bazel-test-attrs]: https://docs.bazel.build/versions/master/be/common-definitions.html#common-attributes-tests -""" +_haskell_test = _mk_binary_rule(test = True) -haskell_binary = _mk_binary_rule() -"""Build an executable from Haskell source. - -Example: - ```bzl - haskell_binary( - name = "hello", - srcs = ["Main.hs", "Other.hs"], - deps = ["//lib:some_lib"] - ) - ``` +_haskell_binary = _mk_binary_rule() -Every `haskell_binary` target also defines an optional REPL target that is -not built by default, but can be built on request. The name of the REPL -target is the same as the name of binary with `"@repl"` added at the end. -For example, the target above also defines `main@repl`. - -You can call the REPL like this (requires Bazel 0.15 or later): - -``` -$ bazel run //:hello@repl -``` - -""" - -haskell_library = rule( +_haskell_library = rule( _haskell_library_impl, attrs = dict( _haskell_common_attrs, @@ -253,27 +228,83 @@ haskell_library = rule( ], fragments = ["cpp"], ) -"""Build a library from Haskell source. -Example: - ```bzl - haskell_library( - name = "hello-lib", - srcs = glob(["src/**/*.hs"]), - src_strip_prefix = "src", - deps = [ - "//hello-sublib:lib", - ], - reexported_modules = { - "//hello-sublib:lib": "Lib1 as HelloLib1, Lib2", - }, - ) - ``` +def _haskell_worker_wrapper(rule_type, **kwargs): + defaults = dict( + worker = select({ + "@rules_haskell//haskell:use_worker": Label("@rules_haskell//tools/worker:bin"), + "//conditions:default": None, + }), + ) + defaults.update(kwargs) + + if rule_type == 1: + _haskell_binary(**defaults) + elif rule_type == 2: + _haskell_test(**defaults) + elif rule_type == 3: + _haskell_library(**defaults) + +def haskell_binary(**kwargs): + """Build an executable from Haskell source. + + Example: + ```bzl + haskell_binary( + name = "hello", + srcs = ["Main.hs", "Other.hs"], + deps = ["//lib:some_lib"] + ) + ``` -Every `haskell_library` target also defines an optional REPL target that is -not built by default, but can be built on request. It works the same way as -for `haskell_binary`. -""" + Every `haskell_binary` target also defines an optional REPL target that is + not built by default, but can be built on request. The name of the REPL + target is the same as the name of binary with `"@repl"` added at the end. + For example, the target above also defines `main@repl`. + + You can call the REPL like this (requires Bazel 0.15 or later): + + ``` + $ bazel run //:hello@repl + ``` + + """ + _haskell_worker_wrapper(1, **kwargs) + +def haskell_test(**kwargs): + """Build a test suite. + + Additionally, it accepts [all common bazel test rule + fields][bazel-test-attrs]. This allows you to influence things like + timeout and resource allocation for the test. + + [bazel-test-attrs]: https://docs.bazel.build/versions/master/be/common-definitions.html#common-attributes-tests + """ + _haskell_worker_wrapper(2, **kwargs) + +def haskell_library(**kwargs): + """Build a library from Haskell source. + + Example: + ```bzl + haskell_library( + name = "hello-lib", + srcs = glob(["src/**/*.hs"]), + src_strip_prefix = "src", + deps = [ + "//hello-sublib:lib", + ], + reexported_modules = { + "//hello-sublib:lib": "Lib1 as HelloLib1, Lib2", + }, + ) + ``` + + Every `haskell_library` target also defines an optional REPL target that is + not built by default, but can be built on request. It works the same way as + for `haskell_binary`. + """ + _haskell_worker_wrapper(3, **kwargs) haskell_import = rule( _haskell_import_impl, diff --git a/haskell/nixpkgs.bzl b/haskell/nixpkgs.bzl index 7042fa6b9c..bb3a7c710c 100644 --- a/haskell/nixpkgs.bzl +++ b/haskell/nixpkgs.bzl @@ -83,7 +83,6 @@ haskell_toolchain( # hack in Nixpkgs. {locale_archive_arg} locale = {locale}, - use_worker = {use_worker}, ) """.format( toolchain_libraries = toolchain_libraries, @@ -96,7 +95,6 @@ haskell_toolchain( repl_ghci_args = repository_ctx.attr.repl_ghci_args, locale_archive_arg = "locale_archive = {},".format(repr(locale_archive)) if locale_archive else "", locale = repr(repository_ctx.attr.locale), - use_worker = repository_ctx.attr.use_worker, ), ) @@ -122,7 +120,6 @@ _ghc_nixpkgs_haskell_toolchain = repository_rule( "locale": attr.string( default = "en_US.UTF-8", ), - "use_worker": attr.bool(), }, ) @@ -176,14 +173,9 @@ def haskell_register_ghc_nixpkgs( locale = None, repositories = {}, repository = None, - nix_file_content = None, - use_worker = False): + nix_file_content = None): """Register a package from Nixpkgs as a toolchain. - Args: - use_worker: This is a part of experimental support for the persistent worker mode. - It is not intended for production usage, yet. - Toolchains can be used to compile Haskell code. To have this toolchain selected during [toolchain resolution][toolchain-resolution], set a host platform that @@ -244,7 +236,6 @@ def haskell_register_ghc_nixpkgs( repl_ghci_args = repl_ghci_args, locale_archive = locale_archive, locale = locale, - use_worker = use_worker, ) # toolchain definition. diff --git a/haskell/private/cabal_wrapper.sh.tpl b/haskell/private/cabal_wrapper.sh.tpl index 29929ad1ec..c4bd9b68c4 100644 --- a/haskell/private/cabal_wrapper.sh.tpl +++ b/haskell/private/cabal_wrapper.sh.tpl @@ -95,7 +95,7 @@ cd - >/dev/null # There were plans for controlling this, but they died. See: # https://github.com/haskell/cabal/pull/3982#issuecomment-254038734 library=($libdir/libHS*.a) -if [[ -n ${library+x} ]] +if [[ -n ${library+x} && -f $package_database/$name.conf ]] then mv $libdir/libHS*.a $dynlibdir sed 's,library-dirs:.*,library-dirs: ${pkgroot}/lib,' \ diff --git a/haskell/private/context.bzl b/haskell/private/context.bzl index 9d5f2b4ca3..64ca4a4ee3 100644 --- a/haskell/private/context.bzl +++ b/haskell/private/context.bzl @@ -40,6 +40,8 @@ def haskell_context(ctx, attr = None): if hasattr(ctx.executable, "_ghc_wrapper"): ghc_wrapper = ctx.executable._ghc_wrapper + worker = getattr(ctx.executable, "worker", None) + return HaskellContext( # Fields name = attr.name, @@ -47,6 +49,7 @@ def haskell_context(ctx, attr = None): toolchain = toolchain, tools = toolchain.tools, ghc_wrapper = ghc_wrapper, + worker = worker, package_ids = package_ids, src_root = src_root, package_root = ctx.label.workspace_root + ctx.label.package, diff --git a/haskell/private/ghc_wrapper.sh b/haskell/private/ghc_wrapper.sh index c73d59b351..e7e7805c8b 100755 --- a/haskell/private/ghc_wrapper.sh +++ b/haskell/private/ghc_wrapper.sh @@ -8,10 +8,9 @@ while IFS= read -r line; do compile_flags+=("$line"); done < $1 # Detect if we are in the persistent worker mode if [ "$2" == "--persistent_worker" ]; then - compile_flags=("${compile_flags[@]:1}") # remove ghc executable - # This is a proof-of-concept implementation, not ready for production usage: - # it assumes https://github.com/tweag/bazel-worker/ installed globally as ~/bin/worker - exec ~/bin/worker ${compile_flags[@]} --persistent_worker + # This runs our proof-of-concept implementation of a persistent worker + # wrapping GHC. Not ready for production usage. + exec ${compile_flags[@]} --persistent_worker else while IFS= read -r line; do extra_args+=("$line"); done < "$2" "${compile_flags[@]}" "${extra_args[@]}" 2>&1 \ diff --git a/haskell/protobuf.bzl b/haskell/protobuf.bzl index 916cdc7001..7c339fd26d 100644 --- a/haskell/protobuf.bzl +++ b/haskell/protobuf.bzl @@ -174,6 +174,7 @@ def _haskell_proto_aspect_impl(target, ctx): executable = struct( _ls_modules = ctx.executable._ls_modules, _ghc_wrapper = ctx.executable._ghc_wrapper, + worker = None, ), # Necessary for CC interop (see cc.bzl). features = ctx.rule.attr.features, diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index 2ad0e1cf10..8b4ba4a9b7 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -23,7 +23,20 @@ def _run_ghc(hs, cc, inputs, outputs, mnemonic, arguments, params_file = None, e env = hs.env args = hs.actions.args() - args.add(hs.tools.ghc) + extra_inputs = [] + + # Detect persistent worker support + flagsfile_prefix = "" + execution_requirements = {} + tools = [] + if hs.worker != None: + flagsfile_prefix = "@" + execution_requirements = {"supports-workers": "1"} + args.add(hs.worker.path) + tools = [hs.worker] + else: + args.add(hs.tools.ghc) + extra_inputs += [hs.tools.ghc] # Do not use Bazel's CC toolchain on Windows, as it leads to linker and librarty compatibility issues. # XXX: We should also tether Bazel's CC toolchain to GHC's, so that we can properly mix Bazel-compiled @@ -57,8 +70,7 @@ def _run_ghc(hs, cc, inputs, outputs, mnemonic, arguments, params_file = None, e hs.actions.write(compile_flags_file, args) hs.actions.write(extra_args_file, arguments) - extra_inputs = [ - hs.tools.ghc, + extra_inputs += [ compile_flags_file, extra_args_file, ] + cc.files @@ -73,15 +85,9 @@ def _run_ghc(hs, cc, inputs, outputs, mnemonic, arguments, params_file = None, e else: inputs += extra_inputs - # Detect persistent worker support - flagsfile_prefix = "" - execution_requirements = {} - if hs.toolchain.use_worker: - flagsfile_prefix = "@" - execution_requirements = {"supports-workers": "1"} - hs.actions.run( inputs = inputs, + tools = tools, input_manifests = input_manifests, outputs = outputs, executable = hs.ghc_wrapper, diff --git a/tools/repositories.bzl b/tools/repositories.bzl new file mode 100644 index 0000000000..3aa5862aff --- /dev/null +++ b/tools/repositories.bzl @@ -0,0 +1,28 @@ +"""Workspace rules (tools/repositories)""" + +load("@rules_haskell//haskell:cabal.bzl", "stack_snapshot") + +def rules_haskell_worker_dependencies(): + """Provide all repositories that are necessary for `rules_haskell`'s tools to + function. + """ + excludes = native.existing_rules().keys() + + if "rules_haskell_worker_dependencies" not in excludes: + stack_snapshot( + name = "rules_haskell_worker_dependencies", + packages = [ + "base", + "bytestring", + "filepath", + "ghc", + "ghc-paths", + "microlens", + "process", + "proto-lens", + "proto-lens-runtime", + "text", + "vector", + ], + snapshot = "lts-14.1", + ) diff --git a/tools/worker/BUILD.bazel b/tools/worker/BUILD.bazel new file mode 100644 index 0000000000..9f9c7392af --- /dev/null +++ b/tools/worker/BUILD.bazel @@ -0,0 +1,20 @@ +load("@rules_haskell//haskell:cabal.bzl", "haskell_cabal_binary") + +haskell_cabal_binary( + name = "bin", + srcs = glob(["**"]), + visibility = ["//visibility:public"], + deps = [ + "@rules_haskell_worker_dependencies//:base", + "@rules_haskell_worker_dependencies//:bytestring", + "@rules_haskell_worker_dependencies//:filepath", + "@rules_haskell_worker_dependencies//:ghc", + "@rules_haskell_worker_dependencies//:ghc-paths", + "@rules_haskell_worker_dependencies//:microlens", + "@rules_haskell_worker_dependencies//:process", + "@rules_haskell_worker_dependencies//:proto-lens", + "@rules_haskell_worker_dependencies//:proto-lens-runtime", + "@rules_haskell_worker_dependencies//:text", + "@rules_haskell_worker_dependencies//:vector", + ], +) diff --git a/tools/worker/Compile.hs b/tools/worker/Compile.hs new file mode 100644 index 0000000000..29c17b1268 --- /dev/null +++ b/tools/worker/Compile.hs @@ -0,0 +1,98 @@ +{-# LANGUAGE NondecreasingIndentation #-} +{-# LANGUAGE CPP #-} + +module Compile (compile) where + +import Control.Monad +import Control.Monad.IO.Class (liftIO) +import Data.List +import System.FilePath +import System.Exit + +import GHC +import GHC.Paths ( libdir ) +import DynFlags ( defaultFatalMessager, defaultFlushOut, Option(..) ) +import DriverPhases +import DriverPipeline ( compileFile, oneShot ) +import Util + +compile :: [String] -> IO () +compile flags = + defaultErrorHandler defaultFatalMessager defaultFlushOut $ do + runGhc (Just libdir) $ do + + -- Parse flags + dflags <- getSessionDynFlags + (dflags2, fileish_args, _warns) <- + parseDynamicFlags dflags (map noLoc flags) + + -- Normilize paths + let + normalise_hyp fp + | strt_dot_sl && "-" `isPrefixOf` nfp = cur_dir ++ nfp + | otherwise = nfp + where +# if defined(mingw32_HOST_OS) + strt_dot_sl = "./" `isPrefixOf` fp || ".\\" `isPrefixOf` fp +# else + strt_dot_sl = "./" `isPrefixOf` fp +# endif + cur_dir = '.' : [pathSeparator] + nfp = normalise fp + normal_fileish_paths = map (normalise_hyp . unLoc) fileish_args + (srcs, objs) = partition_args normal_fileish_paths [] [] + + -- Update flags with normalized + dflags3 = dflags2 { ldInputs = map (FileOption "") objs + ++ ldInputs dflags2 } + + _ <- setSessionDynFlags dflags3 + + doMake srcs + +doMake :: [(String,Maybe Phase)] -> Ghc () +doMake srcs = do + let (hs_srcs, non_hs_srcs) = partition isHaskellishTarget srcs + + hsc_env <- GHC.getSession + + -- if we have no haskell sources from which to do a dependency + -- analysis, then just do one-shot compilation and/or linking. + -- This means that "ghc Foo.o Bar.o -o baz" links the program as + -- we expect. + if (null hs_srcs) + then liftIO (oneShot hsc_env StopLn srcs) + else do + + o_files <- mapM (\x -> liftIO $ compileFile hsc_env StopLn x) + non_hs_srcs + dflags <- GHC.getSessionDynFlags + let dflags' = dflags { ldInputs = map (FileOption "") o_files + ++ ldInputs dflags } + _ <- GHC.setSessionDynFlags dflags' + + targets <- mapM (uncurry GHC.guessTarget) hs_srcs + GHC.setTargets targets + ok_flag <- GHC.load LoadAllTargets + + when (failed ok_flag) (liftIO $ exitWith (ExitFailure 1)) + +partition_args :: [String] -> [(String, Maybe Phase)] -> [String] + -> ([(String, Maybe Phase)], [String]) +partition_args [] srcs objs = (reverse srcs, reverse objs) +partition_args ("-x":suff:args) srcs objs + | "none" <- suff = partition_args args srcs objs + | StopLn <- phase = partition_args args srcs (slurp ++ objs) + | otherwise = partition_args rest (these_srcs ++ srcs) objs + where phase = startPhase suff + (slurp,rest) = break (== "-x") args + these_srcs = zip slurp (repeat (Just phase)) +partition_args (arg:args) srcs objs + | looks_like_an_input arg = partition_args args ((arg,Nothing):srcs) objs + | otherwise = partition_args args srcs (arg:objs) + +looks_like_an_input :: String -> Bool +looks_like_an_input m = isSourceFilename m + || looksLikeModuleName m + || "-" `isPrefixOf` m + || not (hasExtension m) diff --git a/tools/worker/Main.hs b/tools/worker/Main.hs new file mode 100644 index 0000000000..fc85311f69 --- /dev/null +++ b/tools/worker/Main.hs @@ -0,0 +1,31 @@ +module Main where + +import Server (server) + +import System.Environment (getArgs) +import System.Exit (exitFailure) +import System.IO + +--import GHC.IO.Handle (hDuplicate, hDuplicateTo) + +pwFlag :: String +pwFlag = "--persistent_worker" + +main :: IO () +main = do + + -- Redirecting stdout to stderr trick is, albeit convenient, fragile under + -- heavy parallelism https://gitlab.haskell.org/ghc/ghc/issues/16819 + -- it fails, e.g., when Bazel spawns multiple workers while running + -- the test suite; it's left here because may be useful for debugging + -- stdout_dup <- hDuplicate stdout + -- hDuplicateTo stderr stdout + let stdout_dup = stdout + + args <- getArgs + hPutStrLn stderr $ "Args taken: " ++ show args + if pwFlag `elem` args + then server stdin stdout_dup $ filter (/= pwFlag) args + else + print "Worker should be called with --persistent_worker" + >> exitFailure diff --git a/tools/worker/Proto/Worker.hs b/tools/worker/Proto/Worker.hs new file mode 100644 index 0000000000..e5b1900225 --- /dev/null +++ b/tools/worker/Proto/Worker.hs @@ -0,0 +1,516 @@ +{- This file was auto-generated from worker.proto by the proto-lens-protoc program. -} +{-# LANGUAGE ScopedTypeVariables, DataKinds, TypeFamilies, + UndecidableInstances, GeneralizedNewtypeDeriving, + MultiParamTypeClasses, FlexibleContexts, FlexibleInstances, + PatternSynonyms, MagicHash, NoImplicitPrelude, DataKinds, + BangPatterns, TypeApplications #-} +{-# OPTIONS_GHC -fno-warn-unused-imports#-} +{-# OPTIONS_GHC -fno-warn-duplicate-exports#-} +module Proto.Worker (Input(), WorkRequest(), WorkResponse()) where +import qualified Data.ProtoLens.Runtime.Control.DeepSeq + as Control.DeepSeq +import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Prism + as Data.ProtoLens.Prism +import qualified Data.ProtoLens.Runtime.Prelude as Prelude +import qualified Data.ProtoLens.Runtime.Data.Int as Data.Int +import qualified Data.ProtoLens.Runtime.Data.Monoid as Data.Monoid +import qualified Data.ProtoLens.Runtime.Data.Word as Data.Word +import qualified Data.ProtoLens.Runtime.Data.ProtoLens + as Data.ProtoLens +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Bytes + as Data.ProtoLens.Encoding.Bytes +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Growing + as Data.ProtoLens.Encoding.Growing +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Parser.Unsafe + as Data.ProtoLens.Encoding.Parser.Unsafe +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Wire + as Data.ProtoLens.Encoding.Wire +import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Field + as Data.ProtoLens.Field +import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Message.Enum + as Data.ProtoLens.Message.Enum +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Service.Types + as Data.ProtoLens.Service.Types +import qualified Data.ProtoLens.Runtime.Lens.Family2 + as Lens.Family2 +import qualified Data.ProtoLens.Runtime.Lens.Family2.Unchecked + as Lens.Family2.Unchecked +import qualified Data.ProtoLens.Runtime.Data.Text as Data.Text +import qualified Data.ProtoLens.Runtime.Data.Map as Data.Map +import qualified Data.ProtoLens.Runtime.Data.ByteString + as Data.ByteString +import qualified Data.ProtoLens.Runtime.Data.ByteString.Char8 + as Data.ByteString.Char8 +import qualified Data.ProtoLens.Runtime.Data.Text.Encoding + as Data.Text.Encoding +import qualified Data.ProtoLens.Runtime.Data.Vector as Data.Vector +import qualified Data.ProtoLens.Runtime.Data.Vector.Generic + as Data.Vector.Generic +import qualified Data.ProtoLens.Runtime.Data.Vector.Unboxed + as Data.Vector.Unboxed +import qualified Data.ProtoLens.Runtime.Text.Read as Text.Read + +{- | Fields : + + * 'Proto.Worker_Fields.path' @:: Lens' Input Data.Text.Text@ + * 'Proto.Worker_Fields.digest' @:: Lens' Input Data.ByteString.ByteString@ + -} +data Input = Input{_Input'path :: !Data.Text.Text, + _Input'digest :: !Data.ByteString.ByteString, + _Input'_unknownFields :: !Data.ProtoLens.FieldSet} + deriving (Prelude.Eq, Prelude.Ord) +instance Prelude.Show Input where + showsPrec _ __x __s + = Prelude.showChar '{' + (Prelude.showString (Data.ProtoLens.showMessageShort __x) + (Prelude.showChar '}' __s)) +instance Data.ProtoLens.Field.HasField Input "path" + (Data.Text.Text) + where + fieldOf _ + = (Lens.Family2.Unchecked.lens _Input'path + (\ x__ y__ -> x__{_Input'path = y__})) + Prelude.. Prelude.id +instance Data.ProtoLens.Field.HasField Input "digest" + (Data.ByteString.ByteString) + where + fieldOf _ + = (Lens.Family2.Unchecked.lens _Input'digest + (\ x__ y__ -> x__{_Input'digest = y__})) + Prelude.. Prelude.id +instance Data.ProtoLens.Message Input where + messageName _ = Data.Text.pack "blaze.worker.Input" + fieldsByTag + = let path__field_descriptor + = Data.ProtoLens.FieldDescriptor "path" + (Data.ProtoLens.ScalarField Data.ProtoLens.StringField :: + Data.ProtoLens.FieldTypeDescriptor Data.Text.Text) + (Data.ProtoLens.PlainField Data.ProtoLens.Optional + (Data.ProtoLens.Field.field @"path")) + :: Data.ProtoLens.FieldDescriptor Input + digest__field_descriptor + = Data.ProtoLens.FieldDescriptor "digest" + (Data.ProtoLens.ScalarField Data.ProtoLens.BytesField :: + Data.ProtoLens.FieldTypeDescriptor Data.ByteString.ByteString) + (Data.ProtoLens.PlainField Data.ProtoLens.Optional + (Data.ProtoLens.Field.field @"digest")) + :: Data.ProtoLens.FieldDescriptor Input + in + Data.Map.fromList + [(Data.ProtoLens.Tag 1, path__field_descriptor), + (Data.ProtoLens.Tag 2, digest__field_descriptor)] + unknownFields + = Lens.Family2.Unchecked.lens _Input'_unknownFields + (\ x__ y__ -> x__{_Input'_unknownFields = y__}) + defMessage + = Input{_Input'path = Data.ProtoLens.fieldDefault, + _Input'digest = Data.ProtoLens.fieldDefault, + _Input'_unknownFields = ([])} + parseMessage + = let loop :: Input -> Data.ProtoLens.Encoding.Bytes.Parser Input + loop x + = do end <- Data.ProtoLens.Encoding.Bytes.atEnd + if end then + do let missing = [] in + if Prelude.null missing then Prelude.return () else + Prelude.fail + (("Missing required fields: ") Prelude.++ + Prelude.show (missing :: ([Prelude.String]))) + Prelude.return + (Lens.Family2.over Data.ProtoLens.unknownFields + (\ !t -> Prelude.reverse t) + x) + else + do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt + case tag of + 10 -> do y <- (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getBytes + (Prelude.fromIntegral len) + Data.ProtoLens.Encoding.Bytes.runEither + (case Data.Text.Encoding.decodeUtf8' value of + Prelude.Left err -> Prelude.Left + (Prelude.show err) + Prelude.Right r -> Prelude.Right r)) + Data.ProtoLens.Encoding.Bytes. "path" + loop + (Lens.Family2.set (Data.ProtoLens.Field.field @"path") y + x) + 18 -> do y <- (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getBytes + (Prelude.fromIntegral len)) + Data.ProtoLens.Encoding.Bytes. "digest" + loop + (Lens.Family2.set (Data.ProtoLens.Field.field @"digest") + y + x) + wire -> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire + wire + loop + (Lens.Family2.over Data.ProtoLens.unknownFields + (\ !t -> (:) y t) + x) + in + (do loop Data.ProtoLens.defMessage) + Data.ProtoLens.Encoding.Bytes. "Input" + buildMessage + = (\ _x -> + (let _v = Lens.Family2.view (Data.ProtoLens.Field.field @"path") _x + in + if (_v) Prelude.== Data.ProtoLens.fieldDefault then + Data.Monoid.mempty else + (Data.ProtoLens.Encoding.Bytes.putVarInt 10) Data.Monoid.<> + (((\ bs -> + (Data.ProtoLens.Encoding.Bytes.putVarInt + (Prelude.fromIntegral (Data.ByteString.length bs))) + Data.Monoid.<> Data.ProtoLens.Encoding.Bytes.putBytes bs)) + Prelude.. Data.Text.Encoding.encodeUtf8) + _v) + Data.Monoid.<> + (let _v + = Lens.Family2.view (Data.ProtoLens.Field.field @"digest") _x + in + if (_v) Prelude.== Data.ProtoLens.fieldDefault then + Data.Monoid.mempty else + (Data.ProtoLens.Encoding.Bytes.putVarInt 18) Data.Monoid.<> + (\ bs -> + (Data.ProtoLens.Encoding.Bytes.putVarInt + (Prelude.fromIntegral (Data.ByteString.length bs))) + Data.Monoid.<> Data.ProtoLens.Encoding.Bytes.putBytes bs) + _v) + Data.Monoid.<> + Data.ProtoLens.Encoding.Wire.buildFieldSet + (Lens.Family2.view Data.ProtoLens.unknownFields _x)) +instance Control.DeepSeq.NFData Input where + rnf + = (\ x__ -> + Control.DeepSeq.deepseq (_Input'_unknownFields x__) + (Control.DeepSeq.deepseq (_Input'path x__) + (Control.DeepSeq.deepseq (_Input'digest x__) (())))) +{- | Fields : + + * 'Proto.Worker_Fields.arguments' @:: Lens' WorkRequest [Data.Text.Text]@ + * 'Proto.Worker_Fields.vec'arguments' @:: Lens' WorkRequest (Data.Vector.Vector Data.Text.Text)@ + * 'Proto.Worker_Fields.inputs' @:: Lens' WorkRequest [Input]@ + * 'Proto.Worker_Fields.vec'inputs' @:: Lens' WorkRequest (Data.Vector.Vector Input)@ + -} +data WorkRequest = WorkRequest{_WorkRequest'arguments :: + !(Data.Vector.Vector Data.Text.Text), + _WorkRequest'inputs :: !(Data.Vector.Vector Input), + _WorkRequest'_unknownFields :: !Data.ProtoLens.FieldSet} + deriving (Prelude.Eq, Prelude.Ord) +instance Prelude.Show WorkRequest where + showsPrec _ __x __s + = Prelude.showChar '{' + (Prelude.showString (Data.ProtoLens.showMessageShort __x) + (Prelude.showChar '}' __s)) +instance Data.ProtoLens.Field.HasField WorkRequest "arguments" + ([Data.Text.Text]) + where + fieldOf _ + = (Lens.Family2.Unchecked.lens _WorkRequest'arguments + (\ x__ y__ -> x__{_WorkRequest'arguments = y__})) + Prelude.. + Lens.Family2.Unchecked.lens Data.Vector.Generic.toList + (\ _ y__ -> Data.Vector.Generic.fromList y__) +instance Data.ProtoLens.Field.HasField WorkRequest "vec'arguments" + (Data.Vector.Vector Data.Text.Text) + where + fieldOf _ + = (Lens.Family2.Unchecked.lens _WorkRequest'arguments + (\ x__ y__ -> x__{_WorkRequest'arguments = y__})) + Prelude.. Prelude.id +instance Data.ProtoLens.Field.HasField WorkRequest "inputs" + ([Input]) + where + fieldOf _ + = (Lens.Family2.Unchecked.lens _WorkRequest'inputs + (\ x__ y__ -> x__{_WorkRequest'inputs = y__})) + Prelude.. + Lens.Family2.Unchecked.lens Data.Vector.Generic.toList + (\ _ y__ -> Data.Vector.Generic.fromList y__) +instance Data.ProtoLens.Field.HasField WorkRequest "vec'inputs" + (Data.Vector.Vector Input) + where + fieldOf _ + = (Lens.Family2.Unchecked.lens _WorkRequest'inputs + (\ x__ y__ -> x__{_WorkRequest'inputs = y__})) + Prelude.. Prelude.id +instance Data.ProtoLens.Message WorkRequest where + messageName _ = Data.Text.pack "blaze.worker.WorkRequest" + fieldsByTag + = let arguments__field_descriptor + = Data.ProtoLens.FieldDescriptor "arguments" + (Data.ProtoLens.ScalarField Data.ProtoLens.StringField :: + Data.ProtoLens.FieldTypeDescriptor Data.Text.Text) + (Data.ProtoLens.RepeatedField Data.ProtoLens.Unpacked + (Data.ProtoLens.Field.field @"arguments")) + :: Data.ProtoLens.FieldDescriptor WorkRequest + inputs__field_descriptor + = Data.ProtoLens.FieldDescriptor "inputs" + (Data.ProtoLens.MessageField Data.ProtoLens.MessageType :: + Data.ProtoLens.FieldTypeDescriptor Input) + (Data.ProtoLens.RepeatedField Data.ProtoLens.Unpacked + (Data.ProtoLens.Field.field @"inputs")) + :: Data.ProtoLens.FieldDescriptor WorkRequest + in + Data.Map.fromList + [(Data.ProtoLens.Tag 1, arguments__field_descriptor), + (Data.ProtoLens.Tag 2, inputs__field_descriptor)] + unknownFields + = Lens.Family2.Unchecked.lens _WorkRequest'_unknownFields + (\ x__ y__ -> x__{_WorkRequest'_unknownFields = y__}) + defMessage + = WorkRequest{_WorkRequest'arguments = Data.Vector.Generic.empty, + _WorkRequest'inputs = Data.Vector.Generic.empty, + _WorkRequest'_unknownFields = ([])} + parseMessage + = let loop :: + WorkRequest -> + Data.ProtoLens.Encoding.Growing.Growing Data.Vector.Vector + Data.ProtoLens.Encoding.Growing.RealWorld + Data.Text.Text + -> + Data.ProtoLens.Encoding.Growing.Growing Data.Vector.Vector + Data.ProtoLens.Encoding.Growing.RealWorld + Input + -> Data.ProtoLens.Encoding.Bytes.Parser WorkRequest + loop x mutable'arguments mutable'inputs + = do end <- Data.ProtoLens.Encoding.Bytes.atEnd + if end then + do frozen'arguments <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO + (Data.ProtoLens.Encoding.Growing.unsafeFreeze + mutable'arguments) + frozen'inputs <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO + (Data.ProtoLens.Encoding.Growing.unsafeFreeze + mutable'inputs) + let missing = [] in + if Prelude.null missing then Prelude.return () else + Prelude.fail + (("Missing required fields: ") Prelude.++ + Prelude.show (missing :: ([Prelude.String]))) + Prelude.return + (Lens.Family2.over Data.ProtoLens.unknownFields + (\ !t -> Prelude.reverse t) + (Lens.Family2.set (Data.ProtoLens.Field.field @"vec'arguments") + frozen'arguments + (Lens.Family2.set (Data.ProtoLens.Field.field @"vec'inputs") + frozen'inputs + x))) + else + do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt + case tag of + 10 -> do !y <- (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getBytes + (Prelude.fromIntegral len) + Data.ProtoLens.Encoding.Bytes.runEither + (case Data.Text.Encoding.decodeUtf8' value of + Prelude.Left err -> Prelude.Left + (Prelude.show err) + Prelude.Right r -> Prelude.Right r)) + Data.ProtoLens.Encoding.Bytes. "arguments" + v <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO + (Data.ProtoLens.Encoding.Growing.append + mutable'arguments + y) + loop x v mutable'inputs + 18 -> do !y <- (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.isolate + (Prelude.fromIntegral len) + Data.ProtoLens.parseMessage) + Data.ProtoLens.Encoding.Bytes. "inputs" + v <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO + (Data.ProtoLens.Encoding.Growing.append + mutable'inputs + y) + loop x mutable'arguments v + wire -> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire + wire + loop + (Lens.Family2.over Data.ProtoLens.unknownFields + (\ !t -> (:) y t) + x) + mutable'arguments + mutable'inputs + in + (do mutable'arguments <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO + Data.ProtoLens.Encoding.Growing.new + mutable'inputs <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO + Data.ProtoLens.Encoding.Growing.new + loop Data.ProtoLens.defMessage mutable'arguments mutable'inputs) + Data.ProtoLens.Encoding.Bytes. "WorkRequest" + buildMessage + = (\ _x -> + (Data.ProtoLens.Encoding.Bytes.foldMapBuilder + (\ _v -> + (Data.ProtoLens.Encoding.Bytes.putVarInt 10) Data.Monoid.<> + (((\ bs -> + (Data.ProtoLens.Encoding.Bytes.putVarInt + (Prelude.fromIntegral (Data.ByteString.length bs))) + Data.Monoid.<> Data.ProtoLens.Encoding.Bytes.putBytes bs)) + Prelude.. Data.Text.Encoding.encodeUtf8) + _v) + (Lens.Family2.view (Data.ProtoLens.Field.field @"vec'arguments") + _x)) + Data.Monoid.<> + (Data.ProtoLens.Encoding.Bytes.foldMapBuilder + (\ _v -> + (Data.ProtoLens.Encoding.Bytes.putVarInt 18) Data.Monoid.<> + (((\ bs -> + (Data.ProtoLens.Encoding.Bytes.putVarInt + (Prelude.fromIntegral (Data.ByteString.length bs))) + Data.Monoid.<> Data.ProtoLens.Encoding.Bytes.putBytes bs)) + Prelude.. Data.ProtoLens.encodeMessage) + _v) + (Lens.Family2.view (Data.ProtoLens.Field.field @"vec'inputs") _x)) + Data.Monoid.<> + Data.ProtoLens.Encoding.Wire.buildFieldSet + (Lens.Family2.view Data.ProtoLens.unknownFields _x)) +instance Control.DeepSeq.NFData WorkRequest where + rnf + = (\ x__ -> + Control.DeepSeq.deepseq (_WorkRequest'_unknownFields x__) + (Control.DeepSeq.deepseq (_WorkRequest'arguments x__) + (Control.DeepSeq.deepseq (_WorkRequest'inputs x__) (())))) +{- | Fields : + + * 'Proto.Worker_Fields.exitCode' @:: Lens' WorkResponse Data.Int.Int32@ + * 'Proto.Worker_Fields.output' @:: Lens' WorkResponse Data.Text.Text@ + -} +data WorkResponse = WorkResponse{_WorkResponse'exitCode :: + !Data.Int.Int32, + _WorkResponse'output :: !Data.Text.Text, + _WorkResponse'_unknownFields :: !Data.ProtoLens.FieldSet} + deriving (Prelude.Eq, Prelude.Ord) +instance Prelude.Show WorkResponse where + showsPrec _ __x __s + = Prelude.showChar '{' + (Prelude.showString (Data.ProtoLens.showMessageShort __x) + (Prelude.showChar '}' __s)) +instance Data.ProtoLens.Field.HasField WorkResponse "exitCode" + (Data.Int.Int32) + where + fieldOf _ + = (Lens.Family2.Unchecked.lens _WorkResponse'exitCode + (\ x__ y__ -> x__{_WorkResponse'exitCode = y__})) + Prelude.. Prelude.id +instance Data.ProtoLens.Field.HasField WorkResponse "output" + (Data.Text.Text) + where + fieldOf _ + = (Lens.Family2.Unchecked.lens _WorkResponse'output + (\ x__ y__ -> x__{_WorkResponse'output = y__})) + Prelude.. Prelude.id +instance Data.ProtoLens.Message WorkResponse where + messageName _ = Data.Text.pack "blaze.worker.WorkResponse" + fieldsByTag + = let exitCode__field_descriptor + = Data.ProtoLens.FieldDescriptor "exit_code" + (Data.ProtoLens.ScalarField Data.ProtoLens.Int32Field :: + Data.ProtoLens.FieldTypeDescriptor Data.Int.Int32) + (Data.ProtoLens.PlainField Data.ProtoLens.Optional + (Data.ProtoLens.Field.field @"exitCode")) + :: Data.ProtoLens.FieldDescriptor WorkResponse + output__field_descriptor + = Data.ProtoLens.FieldDescriptor "output" + (Data.ProtoLens.ScalarField Data.ProtoLens.StringField :: + Data.ProtoLens.FieldTypeDescriptor Data.Text.Text) + (Data.ProtoLens.PlainField Data.ProtoLens.Optional + (Data.ProtoLens.Field.field @"output")) + :: Data.ProtoLens.FieldDescriptor WorkResponse + in + Data.Map.fromList + [(Data.ProtoLens.Tag 1, exitCode__field_descriptor), + (Data.ProtoLens.Tag 2, output__field_descriptor)] + unknownFields + = Lens.Family2.Unchecked.lens _WorkResponse'_unknownFields + (\ x__ y__ -> x__{_WorkResponse'_unknownFields = y__}) + defMessage + = WorkResponse{_WorkResponse'exitCode = + Data.ProtoLens.fieldDefault, + _WorkResponse'output = Data.ProtoLens.fieldDefault, + _WorkResponse'_unknownFields = ([])} + parseMessage + = let loop :: + WorkResponse -> Data.ProtoLens.Encoding.Bytes.Parser WorkResponse + loop x + = do end <- Data.ProtoLens.Encoding.Bytes.atEnd + if end then + do let missing = [] in + if Prelude.null missing then Prelude.return () else + Prelude.fail + (("Missing required fields: ") Prelude.++ + Prelude.show (missing :: ([Prelude.String]))) + Prelude.return + (Lens.Family2.over Data.ProtoLens.unknownFields + (\ !t -> Prelude.reverse t) + x) + else + do tag <- Data.ProtoLens.Encoding.Bytes.getVarInt + case tag of + 8 -> do y <- (Prelude.fmap Prelude.fromIntegral + Data.ProtoLens.Encoding.Bytes.getVarInt) + Data.ProtoLens.Encoding.Bytes. "exit_code" + loop + (Lens.Family2.set (Data.ProtoLens.Field.field @"exitCode") + y + x) + 18 -> do y <- (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getBytes + (Prelude.fromIntegral len) + Data.ProtoLens.Encoding.Bytes.runEither + (case Data.Text.Encoding.decodeUtf8' value of + Prelude.Left err -> Prelude.Left + (Prelude.show err) + Prelude.Right r -> Prelude.Right r)) + Data.ProtoLens.Encoding.Bytes. "output" + loop + (Lens.Family2.set (Data.ProtoLens.Field.field @"output") + y + x) + wire -> do !y <- Data.ProtoLens.Encoding.Wire.parseTaggedValueFromWire + wire + loop + (Lens.Family2.over Data.ProtoLens.unknownFields + (\ !t -> (:) y t) + x) + in + (do loop Data.ProtoLens.defMessage) + Data.ProtoLens.Encoding.Bytes. "WorkResponse" + buildMessage + = (\ _x -> + (let _v + = Lens.Family2.view (Data.ProtoLens.Field.field @"exitCode") _x + in + if (_v) Prelude.== Data.ProtoLens.fieldDefault then + Data.Monoid.mempty else + (Data.ProtoLens.Encoding.Bytes.putVarInt 8) Data.Monoid.<> + ((Data.ProtoLens.Encoding.Bytes.putVarInt) Prelude.. + Prelude.fromIntegral) + _v) + Data.Monoid.<> + (let _v + = Lens.Family2.view (Data.ProtoLens.Field.field @"output") _x + in + if (_v) Prelude.== Data.ProtoLens.fieldDefault then + Data.Monoid.mempty else + (Data.ProtoLens.Encoding.Bytes.putVarInt 18) Data.Monoid.<> + (((\ bs -> + (Data.ProtoLens.Encoding.Bytes.putVarInt + (Prelude.fromIntegral (Data.ByteString.length bs))) + Data.Monoid.<> Data.ProtoLens.Encoding.Bytes.putBytes bs)) + Prelude.. Data.Text.Encoding.encodeUtf8) + _v) + Data.Monoid.<> + Data.ProtoLens.Encoding.Wire.buildFieldSet + (Lens.Family2.view Data.ProtoLens.unknownFields _x)) +instance Control.DeepSeq.NFData WorkResponse where + rnf + = (\ x__ -> + Control.DeepSeq.deepseq (_WorkResponse'_unknownFields x__) + (Control.DeepSeq.deepseq (_WorkResponse'exitCode x__) + (Control.DeepSeq.deepseq (_WorkResponse'output x__) (())))) \ No newline at end of file diff --git a/tools/worker/Proto/Worker_Fields.hs b/tools/worker/Proto/Worker_Fields.hs new file mode 100644 index 0000000000..5c15947c78 --- /dev/null +++ b/tools/worker/Proto/Worker_Fields.hs @@ -0,0 +1,97 @@ +{- This file was auto-generated from worker.proto by the proto-lens-protoc program. -} +{-# LANGUAGE ScopedTypeVariables, DataKinds, TypeFamilies, + UndecidableInstances, GeneralizedNewtypeDeriving, + MultiParamTypeClasses, FlexibleContexts, FlexibleInstances, + PatternSynonyms, MagicHash, NoImplicitPrelude, DataKinds, + BangPatterns, TypeApplications #-} +{-# OPTIONS_GHC -fno-warn-unused-imports#-} +{-# OPTIONS_GHC -fno-warn-duplicate-exports#-} +module Proto.Worker_Fields where +import qualified Data.ProtoLens.Runtime.Prelude as Prelude +import qualified Data.ProtoLens.Runtime.Data.Int as Data.Int +import qualified Data.ProtoLens.Runtime.Data.Monoid as Data.Monoid +import qualified Data.ProtoLens.Runtime.Data.Word as Data.Word +import qualified Data.ProtoLens.Runtime.Data.ProtoLens + as Data.ProtoLens +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Bytes + as Data.ProtoLens.Encoding.Bytes +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Growing + as Data.ProtoLens.Encoding.Growing +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Parser.Unsafe + as Data.ProtoLens.Encoding.Parser.Unsafe +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Wire + as Data.ProtoLens.Encoding.Wire +import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Field + as Data.ProtoLens.Field +import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Message.Enum + as Data.ProtoLens.Message.Enum +import qualified + Data.ProtoLens.Runtime.Data.ProtoLens.Service.Types + as Data.ProtoLens.Service.Types +import qualified Data.ProtoLens.Runtime.Lens.Family2 + as Lens.Family2 +import qualified Data.ProtoLens.Runtime.Lens.Family2.Unchecked + as Lens.Family2.Unchecked +import qualified Data.ProtoLens.Runtime.Data.Text as Data.Text +import qualified Data.ProtoLens.Runtime.Data.Map as Data.Map +import qualified Data.ProtoLens.Runtime.Data.ByteString + as Data.ByteString +import qualified Data.ProtoLens.Runtime.Data.ByteString.Char8 + as Data.ByteString.Char8 +import qualified Data.ProtoLens.Runtime.Data.Text.Encoding + as Data.Text.Encoding +import qualified Data.ProtoLens.Runtime.Data.Vector as Data.Vector +import qualified Data.ProtoLens.Runtime.Data.Vector.Generic + as Data.Vector.Generic +import qualified Data.ProtoLens.Runtime.Data.Vector.Unboxed + as Data.Vector.Unboxed +import qualified Data.ProtoLens.Runtime.Text.Read as Text.Read + +arguments :: + forall f s a . + (Prelude.Functor f, + Data.ProtoLens.Field.HasField s "arguments" a) => + Lens.Family2.LensLike' f s a +arguments = Data.ProtoLens.Field.field @"arguments" +digest :: + forall f s a . + (Prelude.Functor f, Data.ProtoLens.Field.HasField s "digest" a) => + Lens.Family2.LensLike' f s a +digest = Data.ProtoLens.Field.field @"digest" +exitCode :: + forall f s a . + (Prelude.Functor f, + Data.ProtoLens.Field.HasField s "exitCode" a) => + Lens.Family2.LensLike' f s a +exitCode = Data.ProtoLens.Field.field @"exitCode" +inputs :: + forall f s a . + (Prelude.Functor f, Data.ProtoLens.Field.HasField s "inputs" a) => + Lens.Family2.LensLike' f s a +inputs = Data.ProtoLens.Field.field @"inputs" +output :: + forall f s a . + (Prelude.Functor f, Data.ProtoLens.Field.HasField s "output" a) => + Lens.Family2.LensLike' f s a +output = Data.ProtoLens.Field.field @"output" +path :: + forall f s a . + (Prelude.Functor f, Data.ProtoLens.Field.HasField s "path" a) => + Lens.Family2.LensLike' f s a +path = Data.ProtoLens.Field.field @"path" +vec'arguments :: + forall f s a . + (Prelude.Functor f, + Data.ProtoLens.Field.HasField s "vec'arguments" a) => + Lens.Family2.LensLike' f s a +vec'arguments = Data.ProtoLens.Field.field @"vec'arguments" +vec'inputs :: + forall f s a . + (Prelude.Functor f, + Data.ProtoLens.Field.HasField s "vec'inputs" a) => + Lens.Family2.LensLike' f s a +vec'inputs = Data.ProtoLens.Field.field @"vec'inputs" \ No newline at end of file diff --git a/tools/worker/Server.hs b/tools/worker/Server.hs new file mode 100644 index 0000000000..2d318d4030 --- /dev/null +++ b/tools/worker/Server.hs @@ -0,0 +1,68 @@ +{-# LANGUAGE OverloadedStrings, NondecreasingIndentation #-} + +module Server (server) where + +{- ProtoBuf -} +import qualified Proto.Worker as W +import qualified Proto.Worker_Fields as W +import Data.ProtoLens (defMessage, pprintMessage) +import Data.ProtoLens.Encoding + ( buildMessageDelimited + , decodeMessageDelimitedH + ) +import Data.ProtoLens.Encoding.Bytes ( runBuilder ) +import Lens.Micro + +import Control.Monad +import qualified Data.ByteString as S +import qualified Data.Text as T +import System.IO + +import Compile (compile) + +server :: Handle -> Handle -> [String] -> IO () +server hIn hOut extra_args = do + hSetBuffering stderr NoBuffering + hSetBuffering hIn NoBuffering + hSetBuffering hOut NoBuffering + hSetBinaryMode hIn True + hSetBinaryMode hOut True + hPutStrLn stderr "Server Starts" + _ <- forever loop + hPutStrLn stderr "Server: Bye" + where + loop = do + let logH = stderr + + hPutStrLn logH $ "Server: Waiting for a message..." + msg <- decodeMessageDelimitedH hIn + + hPutStrLn logH "Server: got a message, decoding..." + req <- either fail return msg :: IO W.WorkRequest + hPutStrLn logH $ "Server: msg received:\n" ++ (show . pprintMessage $ req) + + -- Processing a request + resp <- processRequest req extra_args + + let msgresp = runBuilder . buildMessageDelimited $ resp + S.hPut hOut msgresp + hPutStrLn logH $ "Server sent response..." + +processRequest :: W.WorkRequest -> [String] -> IO W.WorkResponse +processRequest req extra_args = do + let (args, _inputs) = destructRequest req + + compile (args ++ extra_args) + + return sampleResponse + +destructRequest :: W.WorkRequest -> ([String], [String]) +destructRequest req = (flags, inputs) + where + flags = map T.unpack $ req ^. W.arguments + inputs = map (T.unpack . (^. W.path)) $ req ^. W.inputs + +sampleResponse :: W.WorkResponse +sampleResponse = defMessage + & W.exitCode .~ 0 + & W.output .~ "All good.\n" diff --git a/tools/worker/Setup.hs b/tools/worker/Setup.hs new file mode 100644 index 0000000000..9a994af677 --- /dev/null +++ b/tools/worker/Setup.hs @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff --git a/tools/worker/worker.cabal b/tools/worker/worker.cabal new file mode 100644 index 0000000000..4c5eb145f8 --- /dev/null +++ b/tools/worker/worker.cabal @@ -0,0 +1,24 @@ +cabal-version: 2.4 + +name: bin +version: 0.1.0.0 +license: BSD-3-Clause +author: Artem Pelenitsyn +maintainer: a.pelenitsyn@gmail.com + +executable bin + main-is: Main.hs + other-modules: Server, Compile, Proto.Worker, Proto.Worker_Fields + ghc-options: -threaded "-with-rtsopts=-N2" + build-depends: base ^>=4.12.0.0, + bytestring, + filepath, + ghc, + ghc-paths, + microlens, + process, + proto-lens >= 0.5.1.0, + proto-lens-runtime, + text, + default-language: Haskell2010 + ghc-options: -Wall