"""
txt &= "Chalk version | " & getChalkExeVersion() & " |
"
txt &= "Commit ID | " & getChalkCommitID() & " |
"
diff --git a/src/configs/README.md b/src/configs/README.md
index bd032d19..9e87112e 100644
--- a/src/configs/README.md
+++ b/src/configs/README.md
@@ -2,13 +2,12 @@ This directory contains con4m code that Chalk uses for various purposes:
| File | Purpose |
| ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| chalk.c42spec | This is a specification for what makes a valid chalk file. It is used to validate other files in this directory, and user configuration files, when provided. It contains definitions for the sections and fields allowed, and does extensive input validation. |
-| baseconfig.c4m | This is where the default chalk metadata keys are set up, along with other defaults. |
+| chalk.c42spec | This is a specification for what makes a valid chalk configuration. It is used to validate other files in this directory, and user configuration files, when provided. It contains definitions for the sections and fields allowed, and does extensive input validation. |
+| base*.c4m | This is where the default chalk metadata keys are set up, along with other defaults. |
| getopts.c4m | This specifies what is allowed at the command line, validates inputs and provides documentation for all the options. It's loaded together with baseconfig.c4m (as if #include'd in C). |
| ioconfig.c4m | Sets up defaults for output and reporting. Run after the previous two, so that it can be influenced by command-line arguments. |
-| signconfig.c4m | Sets up running external signing tools (currently only GPG). Whether this runs unless --no-load-sign-tools is passed at the command line. It too is run together with ioconfig.c4m. |
| sbomconfig.c4m | Sets up external sbom collection tools, if --load-sbom-tools is passed. Also run with ioconfig.c4m |
| sastconfig.c4m | Sets up external static analysis collection tools, if --load-sast-tools is passed, in which case it runs with ioconfig.c4m |
-| defaultconfig.c4m | This is a 'default' user config file that runs, if no other user configuration file is embedded in the binary. It runs after the above, but before any on-filesystem config, if provided. |
+| defaultconfig.c4m | This is a 'default' user config file that runs, if no other user configuration file is embedded in the binary. It runs after the above, but before any on-filesystem config, if provided. It doesn't actually do anything! |
| dockercmd.c4m | This config file is used in parsing the _docker_ command line, or our container chalking and wrapping. It accepts a superset of valid docker command lines. |
| entrypoint.c4m | This isn't a valid con4m file; it's a template for a valid con4m file. When wrapping docker entrypoints, this will be used to generate the configuration file we inject into the chalk binary to properly handle entry point execution. |
diff --git a/src/configs/base_chalk_templates.c4m b/src/configs/base_chalk_templates.c4m
index b4fce064..f9f50079 100644
--- a/src/configs/base_chalk_templates.c4m
+++ b/src/configs/base_chalk_templates.c4m
@@ -33,6 +33,7 @@ or vice versa.
key.DATETIME_WHEN_CHALKED.use = true
key.EARLIEST_VERSION.use = true
key.HOSTINFO_WHEN_CHALKED.use = true
+ key.PUBLIC_IPV4_ADDR_WHEN_CHALKED.use = true
key.NODENAME_WHEN_CHALKED.use = true
key.INJECTOR_CHALK_ID.use = true
key.INJECTOR_PUBLIC_KEY.use = true
@@ -109,6 +110,7 @@ mark_template mark_large {
key.DATETIME_WHEN_CHALKED.use = true
key.EARLIEST_VERSION.use = true
key.HOSTINFO_WHEN_CHALKED.use = true
+ key.PUBLIC_IPV4_ADDR_WHEN_CHALKED.use = true
key.NODENAME_WHEN_CHALKED.use = true
key.INJECTOR_CHALK_ID.use = true
key.INJECTOR_VERSION.use = true
diff --git a/src/configs/base_init.c4m b/src/configs/base_init.c4m
index 693a2265..2378b732 100644
--- a/src/configs/base_init.c4m
+++ b/src/configs/base_init.c4m
@@ -25,7 +25,6 @@ docker { }
load { }
-
cloud_provider {
cloud_instance_hw_identifiers {
}
diff --git a/src/configs/base_keyspecs.c4m b/src/configs/base_keyspecs.c4m
index fb787576..b0d8a98c 100644
--- a/src/configs/base_keyspecs.c4m
+++ b/src/configs/base_keyspecs.c4m
@@ -51,7 +51,7 @@
## the more basic per-op stuff such as "_CHALKS" and "ACTION_ID" I did not.
## CHALK SCHEMA
-chalk_version := "0.1.2"
+chalk_version := "0.1.3"
ascii_magic := "dadfedabbadabbed"
# Field starting with an underscore (_) are "system" metadata fields, that
@@ -237,6 +237,19 @@ the `uname()` system call.
"""
}
+keyspec PUBLIC_IPV4_ADDR_WHEN_CHALKED {
+ kind: ChalkTimeHost
+ type: string
+ standard: true
+ since: "0.1.3"
+ shortdoc: "IPv4 address at Chalk time"
+ doc: """
+This returns the IPv4 address on the local machine used to route
+external traffic. It's determined by setting up a UDP connection to
+Cloudflare's public DNS service, but does not involve sending any data.
+"""
+}
+
keyspec NODENAME_WHEN_CHALKED {
kind: ChalkTimeHost
type: string
@@ -3591,6 +3604,24 @@ the `uname()` system call.
"""
}
+keyspec _OP_PUBLIC_IPV4_ADDR {
+ kind: RunTimeHost
+ type: string
+ standard: true
+ since: "0.1.3"
+ shortdoc: "IPv4 address"
+ doc: """
+This returns the IPv4 address on the local machine used to route
+external traffic. It's determined by setting up a UDP connection to
+Cloudflare's public DNS service, but does not involve sending any
+data.
+
+There are other keys for reported IPs via other systems, including
+cloud provider APIs, docker, procfs, etc.
+"""
+}
+
+
keyspec _OP_NODENAME {
kind: RunTimeHost
type: string
diff --git a/src/configs/base_plugins.c4m b/src/configs/base_plugins.c4m
index 5e469cc6..e79eb527 100644
--- a/src/configs/base_plugins.c4m
+++ b/src/configs/base_plugins.c4m
@@ -15,8 +15,8 @@ plugin system {
"INJECTOR_COMMIT_ID", "DATE_CHALKED",
"TZ_OFFSET_WHEN_CHALKED", "DATETIME_WHEN_CHALKED",
"INJECTOR_ENV", "HOSTINFO_WHEN_CHALKED",
- "NODENAME_WHEN_CHALKED", "PLATFORM_WHEN_CHALKED",
- "INJECTOR_PUBLIC_KEY"]
+ "PUBLIC_IPV4_ADDR_WHEN_CHALKED", "INJECTOR_PUBLIC_KEY",
+ "NODENAME_WHEN_CHALKED", "PLATFORM_WHEN_CHALKED"]
artifact_keys: ["MAGIC", "OLD_CHALK_METADATA_HASH", "OLD_CHALK_METADATA_ID",
"PRE_CHALK_HASH", "TIMESTAMP_WHEN_CHALKED"]
@@ -27,10 +27,10 @@ plugin system {
"_INVALID_SIGNATURE"]
post_run_keys: ["_UNMARKED", "_OP_ERRORS", "_OPERATION", "_OP_SEARCH_PATH",
- "_OP_HOSTINFO", "_OP_NODENAME", "_OP_PLATFORM",
- "_OP_CHALKER_COMMIT_ID", "_OP_CHALKER_VERSION",
- "_OP_CHALK_COUNT", "_OP_CMD_FLAGS", "_OP_EXE_NAME",
- "_OP_EXE_PATH", "_OP_ARGV", "_OP_HOSTNAME",
+ "_OP_HOSTINFO", "_OP_PUBLIC_IPV4_ADDR", "_OP_NODENAME",
+ "_OP_PLATFORM", "_OP_CHALKER_COMMIT_ID",
+ "_OP_CHALKER_VERSION", "_OP_CHALK_COUNT", "_OP_CMD_FLAGS",
+ "_OP_EXE_NAME", "_OP_EXE_PATH", "_OP_ARGV", "_OP_HOSTNAME",
"_OP_HOST_REPORT_KEYS", "_OP_UNMARKED_COUNT", "_TIMESTAMP",
"_DATE", "_TIME", "_TZ_OFFSET", "_DATETIME", "_ENV"]
diff --git a/src/configs/base_report_templates.c4m b/src/configs/base_report_templates.c4m
index a89debfa..ca270720 100644
--- a/src/configs/base_report_templates.c4m
+++ b/src/configs/base_report_templates.c4m
@@ -29,6 +29,7 @@ report and subtract from it.
key.DATETIME_WHEN_CHALKED.use = true
key.EARLIEST_VERSION.use = true
key.HOSTINFO_WHEN_CHALKED.use = true
+ key.PUBLIC_IPV4_ADDR_WHEN_CHALKED.use = true
key.NODENAME_WHEN_CHALKED.use = true
key.INJECTOR_CHALK_ID.use = true
key.INJECTOR_PUBLIC_KEY.use = true
@@ -303,6 +304,7 @@ report and subtract from it.
key._OP_CHALKER_VERSION.use = true
key._OP_PLATFORM.use = true
key._OP_HOSTNAME.use = true
+ key._OP_PUBLIC_IPV4_ADDR.use = true
key._OP_HOSTINFO.use = true
key._OP_NODENAME.use = true
key._OP_CLOUD_METADATA.use = true
@@ -407,6 +409,7 @@ doc: """
key.DATETIME_WHEN_CHALKED.use = true
key.EARLIEST_VERSION.use = true
key.HOSTINFO_WHEN_CHALKED.use = true
+ key.PUBLIC_IPV4_ADDR_WHEN_CHALKED.use = true
key.NODENAME_WHEN_CHALKED.use = true
key.INJECTOR_CHALK_ID.use = true
key.INJECTOR_VERSION.use = true
@@ -442,6 +445,7 @@ doc: """
key._OP_CHALKER_VERSION.use = true
key._OP_PLATFORM.use = true
key._OP_HOSTNAME.use = true
+ key._OP_PUBLIC_IPV4_ADDR.use = true
key._OP_HOSTINFO.use = true
key._OP_NODENAME.use = true
key._OP_CLOUD_METADATA.use = true
@@ -533,6 +537,7 @@ doc: """
key.DATETIME_WHEN_CHALKED.use = true
key.EARLIEST_VERSION.use = true
key.HOSTINFO_WHEN_CHALKED.use = true
+ key.PUBLIC_IPV4_ADDR_WHEN_CHALKED.use = true
key.NODENAME_WHEN_CHALKED.use = true
key.INJECTOR_CHALK_ID.use = true
key.INJECTOR_PUBLIC_KEY.use = true
@@ -828,6 +833,7 @@ container.
key.DATETIME_WHEN_CHALKED.use = true
key.EARLIEST_VERSION.use = true
key.HOSTINFO_WHEN_CHALKED.use = true
+ key.PUBLIC_IPV4_ADDR_WHEN_CHALKED.use = true
key.NODENAME_WHEN_CHALKED.use = true
key.INJECTOR_CHALK_ID.use = true
key.INJECTOR_VERSION.use = true
@@ -865,6 +871,7 @@ container.
key._OP_CHALKER_VERSION.use = true
key._OP_PLATFORM.use = true
key._OP_HOSTNAME.use = true
+ key._OP_PUBLIC_IPV4_ADDR.use = true
key._OP_HOSTINFO.use = true
key._OP_NODENAME.use = true
key._OP_CLOUD_METADATA.use = true
@@ -1232,6 +1239,7 @@ and keep the run-time key.
key.DATETIME_WHEN_CHALKED.use = false
key.EARLIEST_VERSION.use = false
key.HOSTINFO_WHEN_CHALKED.use = false
+ key.PUBLIC_IPV4_ADDR_WHEN_CHALKED.use = false
key.NODENAME_WHEN_CHALKED.use = false
key.INJECTOR_CHALK_ID.use = true
key.INJECTOR_VERSION.use = true
@@ -1267,6 +1275,7 @@ and keep the run-time key.
key._OP_CHALKER_VERSION.use = true
key._OP_PLATFORM.use = true
key._OP_HOSTNAME.use = true
+ key._OP_PUBLIC_IPV4_ADDR.use = true
key._OP_HOSTINFO.use = true
key._OP_NODENAME.use = true
key._OP_CLOUD_METADATA.use = true
diff --git a/src/configs/chalk.c42spec b/src/configs/chalk.c42spec
index f1f5f9ae..4e12e552 100644
--- a/src/configs/chalk.c42spec
+++ b/src/configs/chalk.c42spec
@@ -13,7 +13,7 @@ default_key_priority := 4611686018427387904 # 2^62.
# These are the valid command-line commands.
valid_chalk_cmds := ["help", "insert", "extract", "delete", "config",
"load", "dump", "docker", "version", "env", "exec",
- "setup", "login", "logout"]
+ "setup", "login", "logout", "docgen"]
all_cmds_that_insert := ["insert", "build", "load", "setup", "login", "logout"]
@@ -1423,7 +1423,9 @@ singleton load {
gen_setters: false
user_def_ok: false
doc: """
-Options that control how the `chalk load` command works.
+Options that control how the `chalk load` command works. Note that
+these values are taken from the starting configuration, not any
+configuration being loaded.
"""
field replace_conf {
@@ -1447,7 +1449,6 @@ Otherwise, the passed configuration is treated like a component:
This flag is ignored when running `chalk load default`, which will
_always_ reset the embedded configuration to the default.
-
"""
}
@@ -1462,20 +1463,51 @@ Suppress validation of configuration files on loading. Please don't do this!
field validation_warning {
type: bool
- default: true
- shortdoc: "Show 'chalk load' validation warning"
+ default: false
+ shortdoc: "Show 'chalk load' validation warning"
doc: """
Show the (admittedly verbose) warning you get when running 'chalk load'.
+This is off by default, under the assumption that most people are going
+to use the component system exclusively, and everyone else can read the
+docs :)
+"""
+ }
+
+ field params_via_stdin {
+ type: bool
+ default: false
+ doc: """
+When this is on, loads will not use the interactive interface for
+configuring parameters. Instead, chalk will read parameters from
+stdin.
+
+Parameters should be in the format Chalk uses internally. You can
+get the parameters via `chalk dump --params` or by setting the
+configuration parameter `dump.params`
"""
}
+
+ field update_arch_binaries {
+ type: bool
+ default: true
+ doc: """
+When this is true, if you run a `chalk load` on this binary, it will
+try to (via docker) load the exact same configuration into any
+cross-architecture binaries listed in docker.arch_binary_locations.
+
+Note that, if you source config components from a local directory, you
+currently will need to update them manually, as those directories will
+not be mapped into the container.
+"""
+ }
}
singleton exec {
-gen_fieldname: "execConfig"
-gen_typename: "ExecConfig"
-gen_setters: false
-user_def_ok: false
-doc: """
+ gen_fieldname: "execConfig"
+ gen_typename: "ExecConfig"
+ gen_setters: false
+ user_def_ok: false
+ doc: """
When the `chalk docker` command wraps a container, it inserts a
version of itself into the container, to be able to do data collection
in the runtime environment. Although we do this by replacing the
diff --git a/src/configs/getopts.c4m b/src/configs/getopts.c4m
index a3361a3c..f7a00a51 100644
--- a/src/configs/getopts.c4m
+++ b/src/configs/getopts.c4m
@@ -9,7 +9,7 @@
default_recursive_doc := """
-"Determines whether a file scan will recursively walk paths to find artifacts.
+Determines whether a file scan will recursively walk paths to find artifacts.
"""
getopts {
@@ -132,31 +132,40 @@ This is similar to the `chalk config` command, except it shows state information
flag_yn run_sbom_tools {
field_to_set: "run_sbom_tools"
doc: """
-For insertion operations, this flag forces running any configured tools for SBOM collection. It does not guarantee reporting or chalking; that is up to the reporting configuration.
+For insertion operations, this flag forces running any configured
+tools for SBOM collection. It does not guarantee reporting or
+chalking; that is up to the reporting configuration.
In the default chalk configuration, these tools do not run at all.
-This flag is defined for all chalk commands, but currently is ignored for any command except "insert" or "docker".
+This flag is defined for all chalk commands, but currently is ignored
+for any command except "insert" or "docker".
"""
}
flag_yn run_sast_tools {
field_to_set: "run_sast_tools"
doc: """
-For insertion operations, this flag forces running any configured tools for performing static analysis. It does not guarantee reporting or chalking; that is up to the reporting configuration.
+For insertion operations, this flag forces running any configured
+tools for performing static analysis. It does not guarantee reporting
+or chalking; that is up to the reporting configuration.
In the default chalk configuration, these tools do not run at all.
-This flag is defined for all chalk commands, but currently is ignored for any command except "insert" or "docker".
+This flag is defined for all chalk commands, but currently is ignored
+for any command except "insert" or "docker".
"""
}
flag_yn use_report_cache {
field_to_set: "use_report_cache"
doc: """
-Enables or disables the reporting cache. The reporting cache is a ring buffer stored locally, that contains reporting information that could not be delivered to its configured sources, due to some outage.
+Enables or disables the reporting cache. The reporting cache is a
+ring buffer stored locally, that contains reporting information that
+could not be delivered to its configured sources, due to some outage.
-When using the report cache, any time chalk does run reports, it will try to flush as much of the cache as it can.
+When using the report cache, any time chalk does run reports, it will
+try to flush as much of the cache as it can.
"""
}
@@ -166,9 +175,12 @@ When using the report cache, any time chalk does run reports, it will try to flu
field_to_set: "virtual_chalk"
doc: """
-When chalking, do NOT modify artifacts, overriding anything defined in the config file. This is completely ignored for operations that do not normally modify artifacts.
+When chalking, do NOT modify artifacts, overriding anything defined in
+the config file. This is completely ignored for operations that do not
+normally modify artifacts.
-Specifically, this flag only works with `chalk insert`, `chalk docker build`, and `chalk delete`.
+Specifically, this flag only works with `chalk insert`, `chalk docker
+build`, and `chalk delete`.
By default, this will write to "./virtual-chalk.json".
"""
@@ -178,9 +190,12 @@ By default, this will write to "./virtual-chalk.json".
field_to_set: "chalk_debug"
doc: """
-Shows nim stack traces where appropriate, generally where exceptions were caught.
+Shows nim stack traces where appropriate, generally where exceptions
+were caught.
-Additionally, if temporary files might be useful to inspect, this causes them to not get deleted. Specifically, docker temporary files (most notably any docker file modifications) get left behind.
+Additionally, if temporary files might be useful to inspect, this
+causes them to not get deleted. Specifically, docker temporary files
+(most notably any docker file modifications) get left behind.
"""
}
@@ -190,11 +205,13 @@ Additionally, if temporary files might be useful to inspect, this causes them to
no_aliases: []
doc: """
-Skip publishing the command report (i.e., the PRIMARY report). NO output sinks will get it.
+Skip publishing the command report (i.e., the PRIMARY report). NO
+output sinks will get it.
_For most commands, this defeats the purpose of Chalk, so use it sparingly._
-Note that this doesn't turn off any custom reports; you have to disable those seprately.
+Note that this doesn't turn off any custom reports; you have to
+disable those seprately.
"""
}
@@ -211,10 +228,13 @@ Whether to skip the summary report to the terminal.
add_choice_flags: true
field_to_set: "symlink_behavior"
doc: """
-Chalk never follows directory links. When running non-chalking operations, chalk will read the file on the other end of the link, and report using the file name of the link.
-
-For insertion operations, Chalk will, out of the box, warn on symbolic links, without processing them.
-
+Chalk never follows directory links. When running non-chalking
+operations, chalk will read the file on the other end of the link, and
+report using the file name of the link.
+
+For insertion operations, Chalk will, out of the box, warn on symbolic
+links, without processing them.
+
This variable controls what happens in those cases:
- skip will not process files that are linked.
@@ -232,7 +252,7 @@ containers will do a chalk report when they launch. Note that the
'docker' command passes through ALL flags, so this flag needs to
technically be part of the 'global' flags, even though nothing else
uses it.
-
+
If, when wrapping, your chalk binary is using an external
configuration file, that file will NOT get used inside the
container. The wrapped binary currently only uses the embedded
@@ -256,18 +276,25 @@ At the moment, this is only honored for the `chalk help` command.
args: (0, high())
shortdoc: "Add chalk marks to artifacts"
doc: """
-Add chalk marks to artifacts found on the file system. See the `docker` command for adding marks to docker containers.
+Add chalk marks to artifacts found on the file system. See the
+`docker` command for adding marks to docker containers.
-On chalking, what gets put into the chalk mark will be determined by the active chalk mark template after any user config file has loaded. Each command's output configuration can be specified using the 'outconf' section in the configuration file.
-
-For instance, if you create a new mark template named 'my_chalk_mark', you can activate it for both regular and docker insertions with the following in your configuration file:
+On chalking, what gets put into the chalk mark will be determined by
+the active chalk mark template after any user config file has loaded.
+Each command's output configuration can be specified using the
+'outconf' section in the configuration file.
+
+For instance, if you create a new mark template named 'my_chalk_mark',
+you can activate it for both regular and docker insertions with the
+following in your configuration file:
```
outconf.insert.chalk = "myconf"
outconf.docker.chalk = "myconf"
```
-For information on mark templates on the command line, see: `chalk help templates`
+For information on mark templates on the command line, see: `chalk
+help templates`
"""
callback: func set_artifact_search_path(list[string])
@@ -443,29 +470,71 @@ The named reporting template must already exist in your configuration.
aliases: []
shortdoc: "Show configuration variables and settings"
doc: """
-Shows the results of evaluating the configuration, without actually doing any work with artifacts.
-
-Even though they are related, there is a significant difference between the 'config' command and the --show-config flag. They both dump the configuration after evaluating any config file, but they may easily produce different results.
-
-That's because chalk uses 'con4m' for configuration, which, while typically just looking like a regular config file, can have arbitrary code added, with conditionals, and so-on. The default configuration does, for instance, configure different output handlers, depending on the command given.
-
-Running the 'defaults' command will therefore give you the information about the evaluation just when that command ran. Whereas, '--show-config extract' will dump the config as it resolves when you run the 'extract' command, which could be very similar, or very different.
-
-Importantly though, running '--show-config extract' still runs the 'extract' command.
-
-This command does not show the contents of the config file(s) used, just key results from executing those config files. And, generally there will be at least two 'stacked' configuration files. See 'help config' for more information on the configuration file and con4m.
+Shows the results of evaluating the configuration, without actually
+doing any work with artifacts.
+
+Even though they are related, there is a significant difference
+between the 'config' command and the --show-config flag. They both
+dump the configuration after evaluating any config file, but they may
+easily produce different results.
+
+That's because chalk uses 'con4m' for configuration, which, while
+typically just looking like a regular config file, can have arbitrary
+code added, with conditionals, and so-on. The default configuration
+does, for instance, configure different output handlers, depending on
+the command given.
+
+Running the 'defaults' command will therefore give you the information
+about the evaluation just when that command ran. Whereas,
+'--show-config extract' will dump the config as it resolves when you
+run the 'extract' command, which could be very similar, or very
+different.
+
+Importantly though, running '--show-config extract' still runs the
+'extract' command.
+
+This command does not show the contents of the config file(s) used,
+just key results from executing those config files. And, generally
+there will be at least two 'stacked' configuration files. See 'help
+config' for more information on the configuration file and con4m.
"""
}
command dump {
args: (0, 1)
- shortdoc: "Print the embedded configuration file"
- doc: """
-Reads the embedded configuration file, and outputs it, based on your output configuration (see 'help output'). In the default configuration, if no argument is given, the config file is written to stdout; and if an argument is provided, it will try write the configuration to the file specified by the argument.
-
-This behavior can be overridden by the configuration file, where you can specify different output configurations. See 'help config' for an overview of the configuration file format, and 'help output' for an overview of the output system.
+ arg_sub_mutex: false
+ shortdoc: "Print the embedded configuration file"
+ doc: """
+Reads the embedded configuration file, and outputs it, based on your
+output configuration (see 'help output'). In the default
+configuration, if no argument is given, the config file is written to
+stdout; and if an argument is provided, it will try write the
+configuration to the file specified by the argument.
+
+This behavior can be overridden by the configuration file, where you
+can specify different output configurations. See 'help config' for an
+overview of the configuration file format, and 'help output' for an
+overview of the output system.
+"""
+ command params {
+ shortdoc: "Output saved component parameters as JSON"
+ doc: """
+This does not output the saved configuration. Instead, it dump the
+JSON for any saved parameters. That JSON can then be loaded into
+another binary via chalk load --params, which takes parameters over
+stdin, while setting a configuration. This is used in transfering
+configurations to cross-platform binaries.
+"""
+ }
-"""
+ command cache {
+ shortdoc: "Output source for cached components"
+ doc: """
+This does a more complete dump of source code; not just the base
+configuration, but also any cached components that have been loaded.
+"""
+
+ }
}
command load {
@@ -490,6 +559,29 @@ provided argument. When off, it's used only as a component that's added
to the config.
"""
}
+
+ flag_yn update_arch_binaries {
+ field_to_set: "load.update_arch_binaries"
+ doc: """
+When this is true, if you run a `chalk load` on this binary, it will
+try to (via docker) load the exact same configuration into any
+cross-architecture binaries listed in docker.arch_binary_locations.
+
+Note that, if you source config components from a local directory, you
+currently will need to update them manually, as those directories will
+not be mapped into the container.
+"""
+ }
+
+ flag_yn params {
+ field_to_set: "load.params_via_stdin"
+ doc: """
+When provided, parameters will be taken from stdin, as a json
+dictionary. Keys are the parameter name as specified in the
+'parameter' block. If parameters that are needed aren't supplied, then
+defaults will be accepted.
+"""
+ }
flag_yn validation {
field_to_set: "load.validate_configs_on_load"
@@ -501,7 +593,8 @@ When on, validate config files before loading them, by doing a trial run.
flag_yn validation_warning {
field_to_set: "load.validation_warning"
doc: """
-This verbose flag controls whether or not you get the verbose warning. It's much better turning this off in your embedded configuration :)
+This verbose flag controls whether or not you get the verbose warning.
+It's much better turning this off in your embedded configuration :)
"""
}
}
@@ -658,7 +751,12 @@ command login {
Starts the API login process and will provide a URL to follow in a browser to complete authentication.
"""
}
-
+command docgen {
+ shortdoc: "Generate technical documentation"
+ doc: """
+Internal function to generate technical documentation in markdown format.
+"""
+ }
}
diff --git a/src/configs/ioconfig.c4m b/src/configs/ioconfig.c4m
index 9dbdb8f9..b4d15453 100644
--- a/src/configs/ioconfig.c4m
+++ b/src/configs/ioconfig.c4m
@@ -38,11 +38,6 @@ if skip_summary_report {
}
cmd := command_name()
-exceptions := ["defaults", "dump", "load", "profile", "version"]
-
-if exceptions.contains(cmd) {
- subscribe("report", "json_console_out")
-}
subscribe("report", "default_out")
subscribe("audit", "default_out")
diff --git a/src/confload.nim b/src/confload.nim
index df60fd21..d26e269f 100644
--- a/src/confload.nim
+++ b/src/confload.nim
@@ -19,8 +19,6 @@
import config, selfextract, con4mfuncs, plugin_load
import macros except error
-const chalkDefaultconfigStore = "https://chalkdust.io/"
-
# Since these are system keys, we are the only one able to write them,
# and it's easier to do it directly here than in the system plugin.
proc stashFlags(winner: ArgResult) =
@@ -34,7 +32,7 @@ proc stashFlags(winner: ArgResult) =
hostInfo["_OP_CMD_FLAGS"] = pack(flagStrs)
-proc installComponentParams(params: seq[Box]) =
+proc installComponentParams*(params: seq[Box]) =
let runtime = getChalkRuntime()
for item in params:
@@ -119,8 +117,8 @@ proc loadLocalStructs*(state: ConfigState) =
setCon4mVerbosity(c4errLevel)
proc handleCon4mErrors(err, tb: string): bool =
- if chalkConfig == nil or chalkConfig.chalkDebug:
- error(err & "\n" & tb)
+ if tb != "" and chalkConfig == nil or chalkConfig.chalkDebug:
+ echo formatCompilerError(err, nil, tb, default(InstInfo))
else:
error(err)
return true
@@ -157,8 +155,6 @@ proc loadAllConfigs*() =
res: ArgResult # Used across macros above.
resFound: bool
- setDefaultStoreUrl(chalkDefaultConfigStore)
-
let
toStream = newStringStream
stack = newConfigStack()
diff --git a/src/docs/core-hashing.md b/src/docs/core-hashing.md
index 5f420fe3..ed027699 100644
--- a/src/docs/core-hashing.md
+++ b/src/docs/core-hashing.md
@@ -82,7 +82,7 @@ basically anywhere in the binary, but much like the Unix `strip`
command, for the sake of simplicity and correctness, we move the
section table to the back of the binary. Moreover, it would take
significant additional work and require some storage to make this
-operation invertable.
+operation invertible.
As a result, the Chalk Hash (the `HASH` metadata key), is not defined
based on the file system hash. Instead, it is a _normalized_ hash,
@@ -104,8 +104,8 @@ artifacts must be semantically identical.
### More on The Chalk ID
-Once an artifact has been normalized, and the normalizated data stream
-has been hashed using SHA-256, we programiatically take 100 bits of
+Once an artifact has been normalized, and the normalized data stream
+has been hashed using SHA-256, we programmatically take 100 bits of
the raw hash output, base-32 encode those bits, and then add some
hyphens for clarity, to get the `CHALK_ID`.
diff --git a/src/docs/core-release-notes.md b/src/docs/core-release-notes.md
index dd325f07..901a51a3 100644
--- a/src/docs/core-release-notes.md
+++ b/src/docs/core-release-notes.md
@@ -1,4 +1,48 @@
-# Release Notes for Chalk version 0.1.3
+# Release Notes for Chalk version 0.1.3 (Oct 19, 2023)
+
+## New Features
+
+- Added a module so that most users can easily install complex
+ configurations without editing any configuration information
+ whatsoever. Modules can be loaded from https URLs or from the local
+ file system. Our recipes will host modules on chalkdust.io.
+
+ Modules can have parameters that you provide when installing them,
+ and can have arbitrary defaults (for instance, any module importing
+ the module for connecting to our demo web server defaults to your
+ current IP address).
+
+ We do extensive conflict checking to ensure that modules that are
+ incompatible will not run (and generally won't even load).
+
+ We will eventually do an in-app UI to browse and install modules.
+ [47](https://github.com/crashappsec/chalk/pull/47)
+ [67](https://github.com/crashappsec/chalk/pull/67)
+
+- Added initial metadata collection for GCP and Azure, along with a
+ metadata key to provide the current cloud provider, and a key that
+ distinguishes the cloud provider's environments. Currently, this
+ only does AWS (eks, ecs, ec2).
+ [59](https://github.com/crashappsec/chalk/pull/59)
+ [65](https://github.com/crashappsec/chalk/pull/65)
+
+- Added OIDC token refreshing, along with `chalk login` and
+ `chalk logout` commands to log out of auth for the secret manager.
+ [51](https://github.com/crashappsec/chalk/pull/51)
+ [55](https://github.com/crashappsec/chalk/pull/55)
+ [60](https://github.com/crashappsec/chalk/pull/60)
+
+- The initial rendering engine work was completed. This means
+ `chalk help`, `chalk help metadata` are fully functional. This engine is
+ effectively most of the way to a web browser, and will enable us to
+ offload a lot of the documentation, and do a little storefront (once
+ we integrate in notcurses).
+ [58](https://github.com/crashappsec/chalk/pull/58)
+
+- If you're doing multi-arch binary support, Chalk can now pass your
+ native binary's configuration to other arches, though it does
+ currently re-install modules, so the original module locations need
+ to be available.
## Fixes
@@ -16,17 +60,24 @@
[39](https://github.com/crashappsec/chalk/pull/39)
- Sometimes Docker build would not wrap entrypoint.
[45](https://github.com/crashappsec/chalk/pull/45)
+- Cosign now only gets installed if needed.
+ [49](https://github.com/crashappsec/chalk/pull/49)
+- Docker `ENTRYPOINT`/`COMMAND` wrapping now preserves all named
+ arguments from original `ENTRYPOINT`/`COMMAND`.
+ (e.g. `ENTRYPOINT ["ls", "-la"]`)
+ [70](https://github.com/crashappsec/chalk/issues/70)
## Known Issues
-### Containers
+- There are still embedded docs that need to be fixed now that the
+ entire rendering engine is working well enough.
-- When a `Dockerfile` does not use `USER` directive but base image
- uses it to change default image user, chalk cannot wrap the
- image as it on legacy Docker builder (not buildx) as it will
- fail to `chmod` permissions of chalk during the build.
+- When a `Dockerfile` does not use the `USER` directive but base image
+ uses it to change default image user, chalk cannot wrap the image as
+ it on legacy Docker builder (not buildx) as it will fail to `chmod`
+ permissions of chalk during the build.
-# Release Notes for Chalk version 0.1.2
+# Release Notes for Chalk version 0.1.2 (Sept 26, 2023)
This is the first open source release of Chalk. For those who
participated in the public preview, there have been massive changes
diff --git a/src/docs/core-secret-manager-api.md b/src/docs/core-secret-manager-api.md
index 27db4459..6222a664 100644
--- a/src/docs/core-secret-manager-api.md
+++ b/src/docs/core-secret-manager-api.md
@@ -6,7 +6,7 @@ signing and attestation operations.
All secrets and keying material are locally generated on the
system running chalk, with the secret itself being encrypted
-locally priot to being sent to the API.
+locally prior to being sent to the API.
This document provides an overview of the Secret Manager API, how
data is stored securely, and how chalk interacts with the API as a
@@ -125,7 +125,7 @@ open source.
The encryption scheme makes use of a PRP using the Luby-Rackoff
construction. The easiest thing for us to do is to break the input
into two 'halves',one being 128 bits (the width of AES, which we
-will call the 'lefthalf'), and the other the rest of the remaining
+will call the 'left half'), and the other the rest of the remaining
width of the input (the 'right half').
The nonce is random.
@@ -145,13 +145,13 @@ generate a key stream, that we XOR into the right half.
The other PRF is HMAC-3. We take the round key, HMAC the right
side, truncate the result to 128 bits, then XOR into the left half.
-The PRFs are used in a feistel cipher, so we alternate PRFs through
-our four feistel rounds.
+The PRFs are used in a Feistel cipher, so we alternate PRFs through
+our four Feistel rounds.
While three-round Luby-Rackoff is secure against some use cases, we
go through the full four rounds.
-PRPs are reversable, and with feistel contstructions, it's by
+PRPs are reversible, and with Feistel construction, it's by
running the rounds backward.
Once constructed it is this encrypted value that is sent to the
diff --git a/src/docs/guide-config-overview.md b/src/docs/guide-config-overview.md
index f5c4c31a..7fac4d71 100644
--- a/src/docs/guide-config-overview.md
+++ b/src/docs/guide-config-overview.md
@@ -32,7 +32,7 @@ The exact metadata that will be getting included in a report are defined in
_templates_ which are simply collections of metadata keys (with optional conditions
on when said metadata should be getting emitted). The same template
can be re-used across many reports, however each of the different reports
-making use of the template could have different trigger/generation condidtions
+making use of the template could have different trigger/generation conditions
and different destinations.
Here is an excerpt from the template used by default for any metadata extracted
@@ -235,7 +235,7 @@ custom_report chalk_s3_logger {
```
-Notice that we have also suppreassed local terminal output for the above report.
+Notice that we have also suppressed local terminal output for the above report.
### Updating the used templates
diff --git a/src/docs/guide-getting-started.md b/src/docs/guide-getting-started.md
index 70c06942..32b8d03b 100644
--- a/src/docs/guide-getting-started.md
+++ b/src/docs/guide-getting-started.md
@@ -12,9 +12,9 @@ CI/CD pipeline. In many cases, it can be completely transparent to
the user.
Any configuration should be done up-front by whoever needs the data
-from chalk. While chalk is designed to be deeply customisable, we also
+from chalk. While chalk is designed to be deeply customizable, we also
worked hard to make out-of-the-box configurations useful, and to make
-it very easy to configure common usecases.
+it very easy to configure common use-cases.
First, let's do some basics to get up and running, both with
chalking artifacts, and reporting on them in production.
@@ -239,7 +239,7 @@ to note for now:
1. We've captured basic information about the build environment,
including our repo, branch and commit ID. If you pull a repo remotely
- from Github or Gitlab, the "ORIGIN_URI" key will give the URL where
+ from GitHub or GitLab, the "ORIGIN_URI" key will give the URL where
the repository is hosted, instead of `local`.
2. In addition to the report, we inserted a JSON blob into our
@@ -583,7 +583,7 @@ without specifying a file name (which will just print to stdout):
chalk dump
```
-Youu should see:
+You should see:
```bash
# The default config is empty. Please see chalk documentation for examples.
@@ -751,7 +751,7 @@ code you're running.
Chalk really only monitors a subset of docker commands, but when
wrapping docker, it will pass through all docker commands even if it
-doesn't do any of its own processing on them. If chalk encoounters an
+doesn't do any of its own processing on them. If chalk encounters an
error while attempting to wrap docker, it will then execute the
underlying docker command without chalk so that this doesn't break any
pre-existing pipelines.
@@ -820,7 +820,7 @@ curl http://127.0.0.1:8585/execs
# for pretty json output if you have jq installed, run `curl http://127.0.0.1:8585/execs | jq`
```
-![serverout](./img/execout.png){ loading=lazy }
+![exec output](./img/execout.png){ loading=lazy }
You can see that, in addition to artifact information, there is also
information about the operating environment, including the container
diff --git a/src/docs/guide-heartbeat.md b/src/docs/guide-heartbeat.md
index 913f5031..4e8dd248 100644
--- a/src/docs/guide-heartbeat.md
+++ b/src/docs/guide-heartbeat.md
@@ -8,7 +8,7 @@
This document is a guide on how to configure chalk so that a chalked binary or docker container emits a snapshot of network connections at set intervals.
-### Prerequisities
+### Prerequisites
- chalk binary
- (optional) dockerfile for a docker image with compatible architecture
diff --git a/src/docs/guide-user-guide.md b/src/docs/guide-user-guide.md
index 725bfa3f..4ccbba9b 100644
--- a/src/docs/guide-user-guide.md
+++ b/src/docs/guide-user-guide.md
@@ -46,7 +46,7 @@ wizard, which we expect will meet most configuration needs.
We will be making source code available at the time of our public
launch. Instructions on how to build directly and building via docker
-file are availabe in the [Chalk Getting Started
+file are available in the [Chalk Getting Started
Guide](./guide-getting-started.md), as well as instructions on how to
download pre-built chalk binaries.
@@ -249,7 +249,7 @@ will, by default:
3. Generate a chalk report with metadata on the build operation.
Chalk also reports a bit of metadata when pushing images to help
-provide full tracability.
+provide full traceability.
Chalk can also be configured to add build-time attestation when possible.
@@ -367,7 +367,7 @@ Metadata is at the core of Chalk, which categorizes data into four types:
1. **Chalk-time artifact metadata**, which is data specific to a
software artifact, collected when inserting chalk marks. This data can
- be put into a chalk mark, and it can also be seprately reported
+ be put into a chalk mark, and it can also be separately reported
without putting it in the chalk mark.
2. **Chalk-time host metadata**, which is data about the environment
@@ -441,7 +441,7 @@ interoperability across implementations.
For instance, it is easy to write a compliant chalk library that
allows programs to store their implementations inside their
-executable, and retrieve them, while still interoperating with other
+executable, and retrieve them, while still inter-operating with other
programs that collect a wider range of metadata.
We certainly intend to allow other people to implement compatible
@@ -531,7 +531,7 @@ modify chalk marks.
Starting with Chalk 0.1.1, Chalk mark injectors that find an existing
chalk mark in an artifact will, if replacing the chalk mark, keep `$`
-keys they do not recognize, unless specificly configured to remove
+keys they do not recognize, unless specifically configured to remove
them, while also considering them part of the previous chalk mark.
With Chalk 0.1.0, the `$CHALK_CONFIG` key is the only allowable key,
@@ -593,7 +593,7 @@ keys will always be directly taken from the chalk mark. No keys
without the leading underscore can be reported for non-insertion
operations unless they are found in a chalk mark.
-We do recommend, at chalk insertion time, to to be thoughtful about
+We do recommend, at chalk insertion time, to be thoughtful about
what metadata will be added to the chalk mark itself.
There are two key reasons for this:
@@ -609,7 +609,7 @@ There are two key reasons for this:
in practice, some metadata objects may be quite large, such as
generated SBOMs or static analysis reports.
-The first concern is, by far, the most sigificant. Even in cases where
+The first concern is, by far, the most significant. Even in cases where
software never intentionally leaves an organization, there can be
risks. For instance, if the chalk mark contains code ownership or
other contact information, while it does make life easier for
@@ -669,7 +669,7 @@ In the first case, the mark does NOT need to be at the end of the
file, due to the support for placeholders.
A valid placeholder consists of the JSON object `{ "MAGIC" :
-"dadfedabbadabbed" }`. The presense of spaces and the number of spaces
+"dadfedabbadabbed" }`. The presence of spaces and the number of spaces
is all flexible, but no newlines are allowed.
The intent here is to allow developers to specify where they want
@@ -694,8 +694,8 @@ solution. Currently, we're considering two approaches:
1. File-based artifacts will need to be scanned in their entirety
before marking, and if a mark is found, the spot is reused. This would
- make things easier on implementators, but could impact performance for
- some larger artifiacts.
+ make things easier on implementors, but could impact performance for
+ some larger artifacts.
2. We may require marking the locations that older versions would have
selected with a mark that invalidates the location, and points to the
@@ -763,7 +763,7 @@ well-defined image format is not allowed.
### Replacing existing marks
When a Chalk mark already exists in a document, it's up to the context
-of the insertion whether the the existing chalk mark should be
+of the insertion whether the existing chalk mark should be
removed. In most cases, an existing chalk mark should be preserved. For
instance, when chalking during deployment, any previous chalk mark
from the build process should be preserved.
@@ -788,7 +788,7 @@ strongly discourage using those keys without reporting.
Extractors generally do not need to care about file structure for
non-image formats. It should be sufficient for them to scan the bytes
-of such artifacts, looking for the existance of Chalk `MAGIC` key.
+of such artifacts, looking for the existence of Chalk `MAGIC` key.
However, for image-based formats, the extractor needs to be aware
enough of the marking requirements for that format to be able to
@@ -882,7 +882,7 @@ For more information, see the following:
fields. Documentation for keys will also include the conditions
where the reference implementation can find them.
- [The Config Overview Guide](./guide-config-overview.md) covers how
- to to configure WHERE reports get sent.
+ to configure WHERE reports get sent.
Note that compliant insertion implementations do not require compliant
reporting implementations. But compliant chalk tools for other
@@ -890,7 +890,7 @@ operations MUST produce fully conformant JSON.
However, there are no requirements on how that JSON gets distributed
or managed, other than that compliant implementations must provide a
-straightforward way to make the JSON avilable to users if desired.
+straightforward way to make the JSON available to users if desired.
A report not in the proper format, or with key/values pairs that are
not compliant, is not a Chalk report.
@@ -926,7 +926,7 @@ The normalization algorithm is as follows:
`TZ_OFFSET`, `DATETIME`.
3. The following key/value pair is encoded LAST, (whenever present):
`ERR_INFO`.
-4. The remaining keys are encoded in lexigraphical order.
+4. The remaining keys are encoded in lexicographical order.
5. The encoding starts with the number of keys in the normalization,
as a 32-bit little endian integer.
6. Each key/value pair is encoded in order by encoding the key, and
@@ -974,7 +974,7 @@ validation discussed below built on top of the `METADATA_ID`.
We currently omit `EMBEDDED_CHALK`, instead allowing them to be
independently validated, if desired. While this does mean the
`EMBEDDED_CHALK` key can be excised without detection at validation
-time, we expect that either the relevent sub-artifacts will have
+time, we expect that either the relevant sub-artifacts will have
embedded chalk marks themselves, or the server will have record of the
insertion.
@@ -1000,12 +1000,12 @@ well, as long as there is a `HASH` field).
In containers, where we do not have an easy, reliable hash, metadata
normalization and validation works the same way. But we strongly
-recommend automatic digitial signatures to ensure that you can detect
+recommend automatic digital signatures to ensure that you can detect
changes to the container.
Digital signing can be used both with containers and with other
artifacts. With containers, we use Sigstore with their In-Toto
-attestations that we appply on `docker push`. The mark is replicated
+attestations that we apply on `docker push`. The mark is replicated
in full inside the attestation.
For other artifacts, the signature is stored in the Chalk mark, but is
@@ -1044,7 +1044,7 @@ docker.label_prefix: "com.example."
```
In the configuration file, we can also set up environment variables
-for reporting, such as by defining new environment variablaes and
+for reporting, such as by defining new environment variables and
using simple if / else logic to set a default if the environment
variable is not set on the host. For example:
@@ -1135,7 +1135,7 @@ reporting on any of those keys.
| Artifact | Any software artifact handled by Chalk, which can recursively include other artifacts. For instance, a Zip file is an artifact type that can currently be chalked, which can contain ELF executables that can also be chalked. |
| Chalk Mark | JSON containing metadata about a software artifact, generally inserted directly into the artifact in a way that doesn’t affect execution. Often, a chalk mark will be minimal, containing only small bits of identifying information that can be used to correlate the artifact with other metadata collected. |
| Unchalked | A software artifact that does not have a chalk mark embedded in it. |
-| Metadata Key | Each piece of metadata Chalk is able to collect (metadata being data about an artifact or a host on which an artifact has been found) is associated with a metadata key. Chalk reports all metadata in JSon key/value pairs, and you specify what gets added to a chalk mark and what gets reported on by listing the metadata keys you’re interested in via the report template and mark emplate. |
+| Metadata Key | Each piece of metadata Chalk is able to collect (metadata being data about an artifact or a host on which an artifact has been found) is associated with a metadata key. Chalk reports all metadata in JSon key/value pairs, and you specify what gets added to a chalk mark and what gets reported on by listing the metadata keys you’re interested in via the report template and mark template. |
| Chalking | The act of adding metadata to a software artifact. Aka, “insertion”. |
| Extraction | The act of reading metadata from artifacts and reporting on them. |
| Report | Every time Chalk runs, it will want to report on its activity. That can include information about artifacts, and also about the host. Reports are “published” to output “sinks”. By default, you’ll get reports output to the console, and written to a local log file, but can easily set up HTTPS post or writing to object storage either by supplying environment variables, or by editing the Chalk configuration. |
@@ -1143,6 +1143,6 @@ reporting on any of those keys.
| Mark Template | Like report templates, you have complete flexibility over what goes into chalk marks. A mark template is a specification of metadata keys that you want to go into the chalk mark. |
| Sinks | Output types handled by Chalk. Currently, chalk supports JSON log files, rotating (self-truncating) JSON log files, s3 objects, http/https post, and stdin/stdout. |
| Chalk ID | A value unique to an unchalked artifact. Usually, it is derived from the SHA-256 hash of the unchalked artifact, except when that hash is not available at chalking time, in which case, it’s random. Chalk IDs are 100 bits, and human readable (Base32). |
-| Metadata ID | A value unique to a chalked artifact. It is always derived from a normalized hash of all other metadata (except for any metadata keys involved in signing the Metadata ID). Metdata IDs are also 100 bits, and Base32 encoded. |
+| Metadata ID | A value unique to a chalked artifact. It is always derived from a normalized hash of all other metadata (except for any metadata keys involved in signing the Metadata ID). Metadata IDs are also 100 bits, and Base32 encoded. |
| Chalkable keys | Metadata keys that can be added to chalk marks. When reported for an artifact (e.g., during extraction in production), they will always indicated chalk-time metadata. |
| Non-chalkable keys | Metadata keys that will NOT be added to chalk marks. They will always be reported for the current operation, and start with a `_`. There are plenty of metadata keys that have chalkable and non-chalkable versions. |
diff --git a/src/docs/howto-app-inventory.md b/src/docs/howto-app-inventory.md
index 00fff409..1b487a20 100644
--- a/src/docs/howto-app-inventory.md
+++ b/src/docs/howto-app-inventory.md
@@ -15,16 +15,13 @@ figure out where the code lives and who owns it.
Similarly, developers often would like to know what versions of their
code are deployed where, especially when a bug report comes in.
-This how-to uses Chalk™ to automate this in five steps:
+This how-to uses Chalk™ to automate this easily:
-1. Load our `app-inventory` configuration
-2. Set up the Inventory web service
-3. Configure where Chalk reports get sent
+1. Start a web service to collect data (via docker)
+2. Load our `app-inventory` configuration
+3. (Optional) Start up a service to let us browse collected data.
-4. Automate calling docker via `chalk` in your build environment.
-5. Use it
-
-## Steps
+Each of the steps involves running only a single command.
### Before you start
@@ -32,186 +29,108 @@ The easiest way to get Chalk is to download a pre-built binary from
our [release page](https://crashoverride.com/releases). It's a
self-contained binary with no dependencies to install.
-### Step 1: Load our `app-inventory` configuration
-
-Chalk is designed so that you can easily pre-configure it for the
-behavior you want, so that you can generally just run a single binary
-with no arguments, to help avoid using it wrong.
-
-We're going to download and install a chalk configuration that does
-the following:
-
-1. Sets up Chalk to be able to seamlessly wrap invocations of Docker
- via a global alias.
-
-2. Configures Chalk to report not only build-time information, but
- runtime information when containers built with this recipe are run.
-
-3. Has everything report back to a container we'll deploy in the next step.
+Additionally, the reporting web service we'll install by running two
+docker containers, one for collecting logs, and the other to give us a
+web frontend to browse them.
-The container we'll deploy is a simple Python-based HTTP server
-integrated with SQLite. You'll be able to browse and search all the
-info you collect with SQL, or by adding any frontend you desire.
+### Step 1: Set up the Inventory web service
-Or, you can easily use any HTTP / HTTPS endpoint you like.
+We've put together a simple Python-based API Server that will accept
+reports from the chalk binary we're configuring, and stick things
+in an SQLite database.
-The base configuration for this recipe though, will assume the
-reporting container is always running on 'localhost:8585'.
+The SQLite database will live in `~/.local/c0/chalkdb.sqlite`.
-We can fix that after we get things up and running. For now, let's
-just install the base.
-
-Assuming that you've downloaded Chalk, and it's in your current
-directory, you would simply run:
+To start up the API server, which will create our database, run:
```bash
-./chalk load https://chalkdust.io/app-inventory.c4m
+docker run --rm -d -w /db -v $HOME/.local/c0/:/db -p 8585:8585 --restart unless-stopped ghcr.io/crashappsec/chalk-test-server
```
-This downloads our config, tests it, and loads it into the binary.
+This will set up an API server on port 8585 on your machine,
+accessible from any interface. Note, it will run in the background.
-Note that Chalk reconfigures itself by editing its binary. So it's
-best when configuring to have write access to the binary. If you do
-not, then copy the binary and run it from someplace you do.
+### Step 2: Load our `app-inventory` configuration
-### Step 2: Set up the Inventory web service
+Chalk can load remote modules to reconfigure functionality. Even if
+you've already configured Chalk, you should simply just run:
-We are going to set up two containers:
+```
+./chalk load https://chalkdust.io/app_inventory.c4m
+```
-1. A simple Python-based API Server that will accept reports from the
- chalk binary we're configuring, and stick things in the SQLite
- database.
+You will be prompted to enter the IP address for the server we set up
+in the previous step. The default will be your personal IP
+address. For instance, I get:
-2. A container running an SQLite Web interface to give us a reasonable
- GUI on top of it.
+![Output 1](../img/appinv-ss1.png)
-Both of these images will need to share a single SQLite database. The
-API server we'll want to configure to listen for connections on
-external interfaces.
+Generally, the default should work just fine.
-Then, in the next step, we're going to want to re-configure our Chalk
-binary to use the public IP address of the container.
+After accepting the binary, it'll prompt you one more time to finish
+the setup. The resulting binary will be fully configured, and can be
+taken to other machines, as long as your server container stays up.
-Let's put our SQLite database in `~/.local/c0/chalkdb.sqlite`.
+There's nothing else you need to do to keep this new configuration--
+Chalk rewrites data fields in its own binary when saving the
+configuration changes.
-First, let's start up the API server, which will create our database
-for us:
+### Step 3: Browse some data!
-```bash
-docker run \
- --rm \
- -d \
- -w /db \
- -v $HOME/.local/c0/:/db \
- -p 8585:8585 \
- ghcr.io/crashappsec/chalk-test-server
-```
+Now, we should build and deploy some containers using Chalk, so you
+can see real data in the database.
-This will set up an API server on port 8585 on your machine,
-accessible from any interface. Note, it will run in the background.
-You can verify the healthcheck of the server by running
+As a really simple example, let's build a container that prints load
+averages once a minute to stdout.
+First, we'll write a script for this:
```bash
-curl http://localhost:8585/health
+cat > example.sh < Dockerfile < 💀 We do _not_ recommend /etc/profile.d because some (non-login)
-> shells will not use this.
-
-Once you add this, you can log out and log back in to make the alias
-take effect, our simply `source` the file:
+If you're not an SQLite expert, we can run a web service that points
+to the same database, that makes it a bit easier to browse.
+Let's set it up on port 8080:
```bash
-source /etc/bash.bashrc
+docker run -d -p 3000:3000 -p 3001:3001 -v $HOME/.local/c0/chalkdb.sqlite:/chalkdb.sqlite lscr.io/linuxserver/sqlitebrowser:latest
```
-Now, whenever a new bash shell gets created that starts a `docker`
-process, they'll be automatically configured to call `chalk`
-instead. The way we've configured `chalk`, when it doesn't see any of
-its own commands, it knows to use the Chalk `docker` command.
-
-That command always runs the Docker command intended by the user, but
-in our case:
+The database GUI will be available on port 3000. But, our database
+will be empty until we start using Chalk, so definitely use chalk to
+build and deploy some workloads.
-1. Collects information about the build environment; and
-2. Slightly adjusts the Docker input so that Chalk will also start up
- with containers, and report to your Inventory web service.
-
-### Step 5: Use it
-
-Build and deploy some workloads. Once you do, from the machine you
-deployed the containers, browse SQLite database at
+Now, you can browse your SQLite database at
[http://localhost:8080](http://localhost:8080).
The database will be capturing both the repositories you're using to
@@ -228,38 +147,37 @@ information AND the containers you deploy.
> wrapping `docker push`, but you'll have to go through extra work to
> link them together; the CHALK_ID will work.
-In addition to manually browsing the SQLite database, you can query
-some of the data via the API.
-
-To list all built docker images you can list all chalk marks:
-
-```bash
-curl "http://localhost:8585/chalks" -s | jq
-```
-
-To see all `exec` chalk reports from running containers:
-
-```bash
-curl "http://localhost:8585/reports?operation=exec" -s | jq
-```
-
-To see all available endpoints you can see the Swagger docs of the API
-at [http://localhost:8585/docs](http://localhost:8585/docs)
+If you like Chalk, you can easily deploy across your docker builds and
+deploys by adding a global alias. See the [howto for docker deployment](./howto-deploy-chalk-globally-using-docker.md)
## Warning
-This how-to was written for local demonstration purposes only.There is no security for this how-to. You should always have authn, authz and uses SSL as an absolute minimum.
+This how-to was written for local demonstration purposes only.There is
+no security for this how-to. You should always have authn, authz and
+uses SSL as an absolute minimum.
## Our cloud platform
-While creating a basic app inventory with Chalk is easy, our cloud platform makes it even easier. It is designed for enterprise deployments, and provides additional functionality including prebuilt configurations to solve common tasks, prebuilt integrations to enrich your data, a built-in query editor, an API and more.
+While creating a basic app inventory with Chalk is easy, our cloud
+platform makes it even easier. It is designed for enterprise
+deployments, and provides additional functionality including prebuilt
+configurations to solve common tasks, prebuilt integrations to enrich
+your data, a built-in query editor, an API and more.
-There are both free and paid plans. You can [join the waiting list](https://crashoverride.com/join-the-waiting-list) for early access.
+There are both free and paid plans. You can [join the waiting
+list](https://crashoverride.com/join-the-waiting-list) for early
+access.
### Background Information
-Traditionally IT departments maintained list of their hardware and software assets in a CMDB or [configuration management data base](https://en.wikipedia.org/wiki/Configuration_management_database). These systems were not designed for modern cloud based software and the complexity of code that they are made from.
+Traditionally IT departments maintained list of their hardware and
+software assets in a CMDB or [configuration management data
+base](https://en.wikipedia.org/wiki/Configuration_management_database). These
+systems were not designed for modern cloud based software and the
+complexity of code that they are made from.
-Spotify created a project called [Backstage](https://backstage.io) to centralise developer documentation. Many companies now use it as a source of truth for their development teams.
+Spotify created a project called [Backstage](https://backstage.io) to
+centralise developer documentation. Many companies now use it as a
+source of truth for their development teams.
-Many companies create application inventories using spreadsheets.
+Many companies create application inventories using spreadsheets.
\ No newline at end of file
diff --git a/src/docs/howto-compliance.md b/src/docs/howto-compliance.md
index 2822a5c4..19fc2d85 100644
--- a/src/docs/howto-compliance.md
+++ b/src/docs/howto-compliance.md
@@ -20,9 +20,11 @@ This information is complex and tedious to generate, and manage.
This how-to uses Chalk™ to automate this in two steps:
-1. Configure chalk to generate SBOMs, collect code provenance data, and digitally sign it
+1. Load our basic compliance configuration.
-2. Configure chalk to automatically generate compliance reports
+2. Turn on signing.
+
+3. Build software using Docker.
As a big bonus, with no extra effort, you can be [SLSA](https://slsa.dev) [level 2](https://slsa.dev/spec/v1.0/levels) compliant, before people start officially requiring SLSA [level 1](https://slsa.dev/spec/v1.0/levels)
compliance.
@@ -41,40 +43,28 @@ Chalk is designed so that you can easily pre-configure it for the
behavior you want, and so that you can generally just run a single binary
with no arguments, to help avoid using it wrong.
-Therefore, for this how-to, you should configure your binary with
-either our `compliance-docker` configuration, or our
-`compliance-other` configuration, depending on whether you're using
-docker or not.
-
-Assuming you've downloaded chalk into your working directory, in the
-docker case, you would run:
-
-```
-./chalk load https://chalkdust.io/compliance-docker.c4m
-```
-
-Otherwise, run:
+Assuming you've downloaded chalk into your working directory, you just
+need to run:
```
-./chalk load https://chalkdust.io/compliance-other.c4m
+./chalk load https://chalkdust.io/compliance_docker.c4m
```
-The profile we've loaded changes only three things from the default
+The profile we've loaded changes two key things from the default
behavior:
1. It enables the collection of SBOMS (off by default because on large
projects this can add a small delay to the build)
-2. Specifies that any SBOM produced should be added to the chalk mark.
-
-3. It configures the default action for the binary, when no specific
- command is applied (this is the only difference between the two
- configurations).
+2. Specifies that any SBOM produced should be added to built
+artifacts.
By default, chalk is already collecting provenance information by
examining your project's build environment, including the .git
directory and any common CI/CD environment variables.
+### Step 2: Turn on signing
+
To setup digital signing we have built yet another easy button.
Simply run:
@@ -95,48 +85,23 @@ At this point, your chalk binary will have re-written itself to
contain most of what it needs to sign, except for a `secret` that it
requests dynamically from our secret service.
-### Step 2: Configure chalk to automatically generate compliance reports
+### Step 3: Build software
-Now that the binary is configured, you probably will want to move
-the `chalk` binary to a system directory that's in your `PATH`.
+Now that the binary is configured, you probably may want to move the
+`chalk` binary to a system directory that's in your `PATH`. If you're
+running Docker, we recommend adding a global alias, so that Chalk
+always runs, See the [howto for docker deployment](./howto-deploy-chalk-globally-using-docker.md)
How you run chalk depends on whether you're building via `docker` or not:
- _With docker_: You "wrap" your `docker` commands by putting the
- word `chalk` in front of them.
-
-- _Without docker_ : You simply invoke `chalk` in your build pipeline.
-
-As configured, anyone with access to the artifact can use chalk to not
-only see the chalk mark, but to validate the signature.
+ word `chalk` in front of them. That's it.
-Any build of chalk can extract the chalk mark, and verify
-everything. While we configured our binary to add marks by default,
-the `extract` command will pull them out and verify them.
+- _Without docker_ : You simply invoke `chalk insert` in your build
+ pipeline. It defaults to inserting marks into artifacts in your
+ current working directory.
-By default, it will look on the file system in the same way that
-insertion did when we weren't using Docker. So:
-
-```
-chalk extract
-```
-
-Chalk extract will report on anything it finds under your current working directory,
-And so will extract the mark from any artifacts you chalked that weren't
-containerized. But if you built the example container, we can extract
-from the example container easily by just by providing a reference to it:
-
-```
-chalk extract ghcr.io/viega/wordsmith:latest
-```
-
-Either will pull all the metadata chalk saved during the first
-operation, and log it, showing only a short summary report to your
-console. If the signature validation fails, then you'll get an obvious
-error! If anyone tampers with a mark, or changes a file after the
-chalking, it's easily detected.
-
-#### Step 2a: Docker
+#### Step 3a: Docker
Your `build` operations will add a file (the _"chalk mark"_) into the
container with provenance info and other metadata, and any SBOM
@@ -168,7 +133,7 @@ use features that `chalk` doesn't understand), the program will
_always_ makes sure the original `docker` command gets run if it
doesn't successfully exit when wrapped.
-#### Step 2b: When Not Using Docker
+#### Step 3b: When Not Using Docker
When you invoke chalk as configured, it searches your working
directory for artifacts, collects environmental data, and then injects
@@ -191,11 +156,39 @@ file. JAR files (and other artifacts based on the ZIP format) are
handled similarly to container images, and there are marking
approaches for a few other formats, with more to come.
+### What to tell other people
+
+Any `chalk` executable can extract the chalk mark, and verify
+everything. While we configured our binary to add marks by default, the
+`extract` command will pull them out and verify them.
+
+Chalk extract will report on anything it finds under your current
+working directory, And so will extract the mark from any artifacts you
+chalked that weren't containerized. But if you built the example
+container, we can extract from the example container easily (or any
+other container you have a local copy of) by just by providing a
+reference to it:
+
+```
+chalk extract ghcr.io/viega/wordsmith:latest
+```
+
+Either will pull all the metadata chalk saved during the first
+operation, and log it, showing only a short summary report to your
+console. If the signature validation fails, then you'll get an obvious
+error! If anyone tampers with a mark, or changes a file after the
+chalking, it is clear in the output.
+
## Our cloud platform
-While creating compliance reports with chalk is easy, our cloud platform makes it even easier. It is designed for enterprise deployments, and provides additional functionality including prebuilt configurations to solve common tasks, prebuilt integrations to enrich your data, a built-in query editor, an API and more.
+While creating compliance reports with chalk is easy, our cloud
+platform makes it even easier. Not only can you collect software
+compliance information automatically, you can easily share it with
+anyone who needs it.
-There are both free and paid plans. You can [join the waiting list](https://crashoverride.com/join-the-waiting-list) for early access.
+There are both free and paid plans. You can [join the waiting
+list](https://crashoverride.com/join-the-waiting-list) for early
+access.
### Background information
@@ -306,7 +299,8 @@ provide out-of-band.
That's why, when you ran `chalk setup`, Chalk output to disk two files:
1. _chalk.pub_ The public key you can give people out of band.
- 2 _chalk.pri_ The ENCRYPTED private key (just in case you want to load
+
+2 _chalk.pri_ The ENCRYPTED private key (just in case you want to load
the same key into a future chalk binary).
When you run `chalk setup`, we generate a keypair for you, and encrypt
@@ -409,4 +403,4 @@ Log4J is a popular logging library for the Java programming language. In late 20
The SolarWinds attack used an IT monitoring system, Orion, which which had over 30,000 organizations including Cisco, Deloitte, Intel, Microsoft, FireEye, and US government departments, including the Department of Homeland Security. The attackers created a backdoor that was delivered via a software update.
-[The Untold Story of the Boldest Supply-Chain Hack Ever](https://www.wired.com/story/the-untold-story-of-solarwinds-the-boldest-supply-chain-hack-ever/) - Wired Magazine
+[The Untold Story of the Boldest Supply-Chain Hack Ever](https://www.wired.com/story/the-untold-story-of-solarwinds-the-boldest-supply-chain-hack-ever/) - Wired Magazine
\ No newline at end of file
diff --git a/src/docs/howto-deploy-chalk-globally-using-docker.md b/src/docs/howto-deploy-chalk-globally-using-docker.md
new file mode 100644
index 00000000..f99f3539
--- /dev/null
+++ b/src/docs/howto-deploy-chalk-globally-using-docker.md
@@ -0,0 +1,92 @@
+# Deploy Chalk globally via Docker
+
+### Automatically get visibility for every Docker build
+
+## Summary
+
+One of the biggest challenges to automatically tying together
+information you have about production to information you have about
+source code is the ease of deployment at scale.
+
+Nobody wants to deploy one repository at a time, and if you do ask
+people to add things to their pipelines, it will probably be forgotten
+or misused.
+
+With Chalk™, when your teams build via Docker, you can easily set up
+Chalk on your build systems to automatically operate on every docker
+build. All you need to do is:
+
+1. Install a configured Chalk binary.
+2. Set up a global alias for docker, having it call Chalk.
+
+That's it. Chalk figures the rest out.
+
+## Steps
+
+### Step 1: Install a configured binary.
+
+The easiest way to get Chalk is to download a pre-built binary from
+our [release page](https://crashoverride.com/releases). It's a
+self-contained binary with no dependencies to install.
+
+Configuring Chalk is also easy. For the sake of example, we will use
+our [compliance configuration](./howto-compliance.md).
+
+If Chalk is in your current directory, run:
+
+```
+./chalk load https://chalkdust.io/compliance-docker.c4m
+```
+
+When you install Chalk on your build systems, we recommend putting it
+in the same directory where your docker executable is, though anywhere
+in the default PATH is fine.
+
+### Step 2: Add a global alias
+
+You _could_ now deploy chalk and ask everyone to run it by invoking
+`chalk` before their docker commands. But that's easy to forget. It's
+really better to automatically call `chalk` when invoking Docker.
+
+You can do this easily with a global alias. Your build systems will
+have a global file for bash configuration, which, these days, is
+almost always `/etc/bash.bashrc` (but if it's not there, then it
+should be at`/etc/bashrc`).
+
+This file runs when any bash shell starts. All you need to add to this
+file is:
+
+```bash
+alias docker=chalk
+```
+
+> 💀 Some people add global aliases to /etc/profile.d, but we do _not_ recommend this, because some (non-login) shells will not use this.
+
+Once you add this, you can log out and log back in to make the alias
+take effect, our simply `source` the file:
+
+```bash
+source /etc/bash.bashrc
+```
+
+Now, whenever a new bash shell gets created that starts a `docker`
+process, they'll be automatically configured to call `chalk`
+instead. The way we've configured `chalk`, when it doesn't see any of
+its own commands, it knows to use the Chalk `docker` command.
+
+We always run the Docker command intended by the user, but we also
+collect and report on environmental info.
+
+You can also ask Chalk to add automatic data reporting on startup to
+built containers ig you like, as described in [our how-to on building
+an application inventory](./howto-app-inventory.md)
+
+## Our cloud platform
+
+We have tried to make doing everything with Chalk as easy as possible, our cloud
+platform makes it even easier. It is designed for enterprise
+deployments, and provides additional functionality including prebuilt
+configurations to solve common tasks, prebuilt integrations to enrich
+your data, a built-in query editor, an API and a lot more.
+
+There are both free and paid plans. You can [join the waiting list](https://crashoverride.com/join-the-waiting-list) for early access.
\ No newline at end of file
diff --git a/src/docs/howto-net-services.md b/src/docs/howto-net-services.md
index 90b705d4..d7681dbd 100644
--- a/src/docs/howto-net-services.md
+++ b/src/docs/howto-net-services.md
@@ -139,7 +139,7 @@ configured reporting.
You should see some additional JSON output from `chalk` after the
build finishes, identifying the metadata information for the newly
-chalked contianer:
+chalked container:
```json
[
@@ -187,7 +187,7 @@ chalked contianer:
If you built your container with the commands above, you should now be able to now run it with:
`docker run --rm -it mychalkedcontainer`
-Also, if you kept the the `output_to_screen` sink to be `enabled:
+Also, if you kept the `output_to_screen` sink to be `enabled:
true`, and set the heartbeat window to 10 seconds, then after 10
seconds you should see output similar to the following:
diff --git a/src/docs/img/appinv-ss1.png b/src/docs/img/appinv-ss1.png
new file mode 100644
index 00000000..cae15913
Binary files /dev/null and b/src/docs/img/appinv-ss1.png differ
diff --git a/src/plugin_api.nim b/src/plugin_api.nim
index a9fe1548..6798593f 100644
--- a/src/plugin_api.nim
+++ b/src/plugin_api.nim
@@ -392,30 +392,6 @@ proc mustIgnore(path: string, regexes: seq[Regex]): bool {.inline.} =
trace("We will NOT report additional path skips.")
return true
-template symlinkCheck(path: string) =
- if skipLinks:
- warn("Skipping symbolic link: " & path & """\n
-Use --clobber to follow and clobber the linked-to file when inserting,
-or --copy to copy the file and replace the symlink.""")
- continue
- elif useDstName:
- var
- newPath = path
- i = 40
- while i != 0:
- newPath = readlink(newPath)
-
- if getFileInfo(newPath, followSymlink = false).kind != pcLinkToFile:
- break
- i -= 0
-
- if i != 0:
- let opt = self.scanLocation(newPath)
- if opt.isSome():
- let chalk = opt.get()
- result.add(chalk)
- chalk.chalkCloseStream()
-
proc scanArtifactLocations*(self: Plugin, state: ArtifactIterationInfo):
seq[ChalkObj] =
# This will call scan() with a file stream, and you pass back a
diff --git a/src/plugins/elf.nim b/src/plugins/elf.nim
index d60114a9..33002bd5 100644
--- a/src/plugins/elf.nim
+++ b/src/plugins/elf.nim
@@ -6,6 +6,10 @@
##
import ../config, algorithm
+# We've got a lot of ELF-specific defines we're not using but we
+# want to keep around, so silence any warnings.
+{.hint[XDeclaredButNotUsed]: off.}
+
const
NOT8 = uint64(0) - 9
NULLBYTE = '\x00'
diff --git a/src/plugins/procfs.nim b/src/plugins/procfs.nim
index 649e1286..e6ff576d 100644
--- a/src/plugins/procfs.nim
+++ b/src/plugins/procfs.nim
@@ -7,6 +7,12 @@
## Pull metadata from the proc file system on Linux.
+# There are a few unused symbols in here that we intend to use later.
+# Plus, on MacOS, we'll get unused symbol errors too. So just turn
+# them off for this file.
+
+{. hint[XDeclaredButNotUsed]: off.}
+
when hostOs != "linux":
{.warning[UnusedImport]: off.}
@@ -496,9 +502,8 @@ template getTCPSockInfo(): ProcTable =
template getUDPSockInfo(): ProcTable =
getSockInfo(getRawUDPSockInfo().get(""), udpStatusMap)
-# Can be used for UDP or TCP
-# but currently isn't being used yet.
-proc getProcSockInfo(allSockInfo: ProcTable, myFdInfo: ProcFdSet): ProcTable =
+proc getProcSockInfo(allSockInfo: ProcTable, myFdInfo: ProcFdSet):
+ ProcTable =
var allInodes: seq[string]
for k, v in myFdInfo:
diff --git a/src/plugins/system.nim b/src/plugins/system.nim
index 00d97311..236dd6d9 100644
--- a/src/plugins/system.nim
+++ b/src/plugins/system.nim
@@ -274,6 +274,7 @@ proc sysGetChalkTimeHostInfo*(self: Plugin): ChalkDict {.cdecl.} =
result.setIfNeeded("TZ_OFFSET_WHEN_CHALKED", pack(getOffset()))
result.setIfNeeded("DATETIME_WHEN_CHALKED", pack(getDateTime()))
result.setIfNeeded("PLATFORM_WHEN_CHALKED", getChalkPlatform())
+ result.setIfNeeded("PUBLIC_IPV4_ADDR_WHEN_CHALKED", pack(getMyIpV4Addr()))
when defined(posix):
result.setIfNeeded("HOSTINFO_WHEN_CHALKED", uinfo.version)
diff --git a/src/selfextract.nim b/src/selfextract.nim
index 1658bd54..41622522 100644
--- a/src/selfextract.nim
+++ b/src/selfextract.nim
@@ -7,7 +7,8 @@
## Code specific to reading and writing Chalk's own chalk mark.
-import config, plugin_api, posix, collect, con4mfuncs, chalkjson, util
+import config, plugin_api, posix, collect, con4mfuncs, chalkjson, util,
+ docker_base
proc handleSelfChalkWarnings*() =
if not canSelfInject:
@@ -22,7 +23,6 @@ template cantLoad*(s: string) =
error(s)
quit(1)
-
proc getSelfExtraction*(): Option[ChalkObj] =
# If we call twice and we're on a platform where we don't
# have a codec for this type of executable, avoid dupe errors.
@@ -168,7 +168,8 @@ proc writeSelfConfig*(selfChalk: ChalkObj): bool
selfChalk.makeNewValuesAvailable()
return true
-proc testConfigFile*(uri: string, newCon4m: string) =
+proc testConfigFile*(uri: string, newCon4m: string, params: seq[Box]):
+ ConfigState =
info(uri & ": Validating configuration.")
if chalkConfig.loadConfig.getValidationWarning():
warn("Note: validation involves creating a new configuration context" &
@@ -197,12 +198,27 @@ proc testConfigFile*(uri: string, newCon4m: string) =
# Test Run will cause (un)subscribe() to ignore subscriptions, and
# will suppress log messages, etc.
stack.run()
+ for item in params:
+ let
+ row = unpack[seq[Box]](item)
+ attr = unpack[bool](row[0])
+ url = unpack[string](row[1])
+ sym = unpack[string](row[2])
+ c4mType = toCon4mType(unpack[string](row[3]))
+ value = row[4]
+ if attr:
+ stack.configState.setAttributeParamValue(url, sym, value, c4mType)
+ else:
+ stack.configState.setVariableParamValue(url, sym, value, c4mType)
+
startTestRun()
- stack.addConfLoad(uri, toStream(newCon4m)).run()
+ stack.addConfLoad(uri, toStream(newCon4m))
+ stack.run()
endTestRun()
if stack.errored:
quit(1)
info(uri & ": Configuration successfully validated.")
+ return stack.configState
except:
dumpExOnDebug()
cantLoad(getCurrentExceptionMsg() & "\n")
@@ -219,6 +235,49 @@ const nocache = ["configs/ioconfig.c4m", "configs/sastconfig.c4m",
"configs/sbomconfig.c4m", "configs/attestation.c4m",
"configs/getopts.c4m"]
+proc updateArchBinaries*(newConfig: string, newParams: seq[Box],
+ bins: TableRef[string, string] = nil) =
+ var binInfo: TableRef[string, string]
+
+ if bins != nil:
+ binInfo = bins
+ elif not chalkConfig.loadConfig.getUpdateArchBinaries():
+ return
+ else:
+ binInfo = chalkConfig.dockerConfig.getArchBinaryLocations().getOrElse(nil)
+
+ if binInfo == nil or len(binInfo) == 0:
+ trace("No multi-arch binaries to load.")
+ return
+ for arch, unresolvedLocation in binInfo:
+ let location = unresolvedLocation.resolvePath()
+
+ info("Attempting to update config for architecture: " & arch & " (" &
+ location & ")")
+ let
+ (dir, fname) = location.splitPath()
+ confLoc = dir.joinPath("config.c4m")
+
+ if not tryToWriteFile(confLoc, newConfig):
+ warn("Could not write config to: " & confLoc)
+ continue
+
+ let
+ chalkMnt = location & ":/chalk"
+ confMnt = confLoc & ":/config.c4m"
+ ctrCmd = "chmod +x /chalk && /chalk load --replace /config.c4m"
+ arch = "linux/" & arch
+ args = @[ "run", "--rm", "--platform", arch, "-v", chalkMnt, "-v",
+ confMnt, "alpine", "sh", "-c", ctrCmd]
+ output = runDockerGetEverything(args, stdin = boxToJson(pack(newParams)))
+
+ if output.getExit() != 0:
+ warn("Docker command to update config for archiecture " & arch &
+ " failed: " & output.getStderr())
+ continue
+ else:
+ info("Successfully updated config for architecture " & arch)
+
proc handleConfigLoad*(inpath: string) =
assert selfChalk != nil
@@ -238,10 +297,12 @@ proc handleConfigLoad*(inpath: string) =
alreadyCached = haveComponentFromUrl(runtime, path).isSome()
(uri, module, _) = path.fullUrlToParts()
curConfOpt = selfChalkGetKey("$CHALK_CONFIG")
+ validate = chalkConfig.loadConfig.getValidateConfigsOnLoad()
var
component: ComponentInfo
replace: bool
+ testState: ConfigState
try:
component = runtime.loadComponentFromUrl(path)
@@ -269,33 +330,44 @@ proc handleConfigLoad*(inpath: string) =
else:
newEmbedded = component.source
- if len(toConfigure) == 0:
+ if replace:
info("Attempting to replace base configuration from: " & path)
else:
- info("Attempting to load configuration module from: " & path)
- runtime.basicConfigureParameters(component, toConfigure)
-
- if replace or alreadyCached == false:
- # If we just reconfigured a component, then we don't bother testing.
- if chalkConfig.loadConfig.getValidateConfigsOnLoad():
- testConfigFile(path, newEmbedded)
+ info("Attempting to load module from: " & path)
+
+ if chalkConfig.loadConfig.getParamsViaStdin():
+ try:
+ let
+ chalkJsonTree = newStringStream(stdin.readLine()).chalkParseJson()
+ runtime = getChalkRuntime()
+
+ if chalkJsonTree.kind != CJArray:
+ raise newException(IOError, "")
+ for row in chalkJsonTree.items:
+ if row.kind != CJArray or row.items.len() != 5:
+ raise newException(IOError, "")
+ let
+ attr = row.items[0].boolval
+ url = row.items[1].strval
+ sym = row.items[2].strval
+ c4mType = row.items[3].strval.toCon4mType()
+ value = row.items[4].jsonNodeToBox()
+ if attr:
+ runtime.setAttributeParamValue(url, sym, value, c4mType)
+ else:
+ runtime.setVariableParamValue(url, sym, value, c4mType)
+ except:
+ error("Invalid json parameters via stdin: " & getCurrentExceptionMsg())
+ dumpExOnDebug()
+ quit(1)
+ elif validate:
+ let prompt = "Press [enter] to check your configuration for conflicts."
+ runtime.basicConfigureParameters(component, toConfigure, prompt)
else:
- warn("Skipping configuration validation. This could break chalk.")
-
- selfChalkSetKey("$CHALK_CONFIG", pack(newEmbedded))
-
- # Now, load the code cache.
- var cachedCode = OrderedTableRef[string, string]()
-
- for _, onecomp in runtime.components:
- if onecomp.url in nocache:
- continue
- if replace and onecomp == component:
- continue
- if onecomp.source != "":
- cachedCode[onecomp.url] = onecomp.source
+ runtime.basicConfigureParameters(component, toConfigure)
- # Load any saved parameters.
+ # Load any saved parameters; we will pass them off to any
+ # testing, plus we will need to save them!
var
allComponents = runtime.programRoot.getUsedComponents()
params: seq[Box]
@@ -309,10 +381,38 @@ proc handleConfigLoad*(inpath: string) =
params.add(paramsToBox(false, component.url, v.name, v.defaultType,
v.value.get()))
-
for _, v in component.attrParams:
params.add(paramsToBox(true, component.url, v.name, v.defaultType,
v.value.get()))
+ if validate:
+ testState = testConfigFile(path, newEmbedded, params)
+ assert testState != nil
+ else:
+ warn("Skipping configuration validation. This could break chalk.")
+
+ selfChalkSetKey("$CHALK_CONFIG", pack(newEmbedded))
+
+ # Now, load the code cache.
+ var cachedCode = OrderedTableRef[string, string]()
+
+ for _, onecomp in runtime.components:
+ if onecomp.url in nocache:
+ continue
+ if replace and onecomp == component:
+ continue
+ if onecomp.source != "":
+ cachedCode[onecomp.url] = onecomp.source
+
+
selfChalkSetKey("$CHALK_COMPONENT_CACHE", pack(cachedCode))
selfChalkSetKey("$CHALK_SAVED_COMPONENT_PARAMETERS", pack(params))
+
+ if testState != nil:
+ let archOpt: Option[TableRef[string, string]] =
+ getOpt[TableRef[string, string]](testState, "docker.arch_binary_locations")
+ if archOpt.isSome():
+ updateArchBinaries(newEmbedded, params, archOpt.get())
+ return
+
+ updateArchBinaries(newEmbedded, params)
diff --git a/src/util.nim b/src/util.nim
index 8874e2fb..0eef580b 100644
--- a/src/util.nim
+++ b/src/util.nim
@@ -202,7 +202,8 @@ elif hostOs == "macosx":
else:
template makeCompletionAutoSource() = discard
-const currentAutocompleteVersion = (0, 1, 1)
+const currentAutocompleteVersion = (0, 1, 3)
+
proc validateMetadata*(obj: ChalkObj): ValidateResult {.importc.}
proc autocompleteFileCheck*() =
@@ -277,6 +278,8 @@ proc autocompleteFileCheck*() =
template otherSetupTasks*() =
setupManagedTemp()
autocompleteFileCheck()
+ if isatty(1) == 0:
+ setShowColor(false)
var exitCode = 0