Skip to content

Commit

Permalink
feat: Add Cloudflare Environment Variable and Secret support (#41)
Browse files Browse the repository at this point in the history
* Add Cloudflare Environment Variable and Secret support

* Regenerate package-lock.json after merge

Co-authored-by: Nolan Woods <nolan_w@sfu.ca>
  • Loading branch information
innovate-invent and Nolan Woods authored Mar 25, 2020
1 parent 997d5d8 commit 2091bb5
Show file tree
Hide file tree
Showing 9 changed files with 2,094 additions and 1,992 deletions.
30 changes: 30 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ $ nodemon --watch /path/to/worker.js --signal SIGHUP --exec 'cloudflare-worker-l
* crypto.subtle
* Cloudflare key value store if you pass in the KV_NAMESPACE environment variable
* Cloudflare [event.passThroughOnException()](https://workers.cloudflare.com/docs/reference/workers-concepts/fetch-event-lifecycle/#passthroughonexception) for runtime exception handling
* Cloudflare Environment Variables and Secrets loaded from a wrangler.toml
* ... this list should probably have more things

## Contributors
Expand Down Expand Up @@ -65,3 +66,32 @@ Optionally, these variables are available as well:
MINIO_PORT, MINIO_USE_SSL, MINIO_REGION, MINIO_TRANSPORT, MINIO_SESSIONTOKEN, and MINIO_PARTSIZE

See [the Minio documentation](https://docs.min.io/docs/javascript-client-api-reference.html) for details on the various parameters.

## CloudFlare Environment Variables and Secrets

Support for CloudFlare Environment Variables and Secrets is provided via a wrangler.toml file.
See the [wrangler documentation](https://developers.cloudflare.com/workers/tooling/wrangler/configuration)
for more information on the file schema.

To load the wrangler.toml, specify it on the command line:
```shell
$ cloudflare-worker-local /path/to/worker.js localhost:3000 4000 /path/to/wrangler.toml
```

Optionally, the desired environment specified within the wrangler.toml can be loaded:
```shell
$ cloudflare-worker-local /path/to/worker.js localhost:3000 4000 /path/to/wrangler.toml production
```

Secrets are specified under the 'secrets' root key in the document. See the [wrangler.toml](./examples/wrangler.toml)
for an example of the supported structures.

Two features are provided while loading the wrangler.toml:
* All vars and secrets strings can contain ${} placeholders.
A placeholder path is resolved using lodash.get and has the context of the root of the config document.
A placeholder can not refer to a value defined later in the document that also has placeholders.
* Any var or secret that is not a string will be automatically JSON encoded.
This allows you to inject complex data into a script by JSON decoding the variable value.

Additionally, any 'kv-namespaces' in the wrangler.toml will be appended to the list of namespaces
provided by KV_NAMESPACES.
24 changes: 24 additions & 0 deletions app/__tests__/server_spec.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
const { createApp } = require("../server");
const supertest = require("supertest");
const { MinioKVStore, Minio } = require("../minio-kv-store");
const wrangler = require("../../lib/wrangler");

describe("server", () => {
it("returns the response from the worker", async () => {
Expand Down Expand Up @@ -68,6 +69,29 @@ describe("server", () => {
.expect(200, "value");
});

it("can load CloudFlare 'environment variables' and 'secrets' from wrangler.toml", async () => {
let kvStores = ["MYSTORE"]; // Check if stores somehow clobbered
// Import config from provided wrangler.toml
const config = wrangler.loadConfig(__dirname + "/../../examples/wrangler.toml");
wrangler.toJSON(config);
const env = {...config.vars, ...config.secrets};
if (Array.isArray(config['kv-namespaces'])) kvStores = kvStores.concat(config['kv-namespaces'].map(n=>n.binding));
const app = createApp(
'addEventListener("fetch", (e) => e.respondWith(Promise.all([MYSTORE.get("key"), wranglerKV.get("key")]).then(([v, x]) => new Response(JSON.stringify({MYSTORE: v, wranglerKV: x, variable1, foo})))))',
{
kvStores,
env
}
);

await app.stores.MYSTORE.put("key", "value");
await app.stores.wranglerKV.put("key", "value");

await supertest(app)
.get("/some-route")
.expect(200, '{"MYSTORE":"value","wranglerKV":"value","variable1":"somevalue","foo":"{\\"bar\\":\\"shhh\\"}"}');
});

it("allows big post request", async () => {
let body = "x"
for (let i = 0; i < 20; i++) {
Expand Down
11 changes: 11 additions & 0 deletions app/__tests__/worker_spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,17 @@ describe("Workers", () => {
expect(await kvStoreFactory.getClient("MYSTORE").get("foo")).toBe(undefined);
});

test("It can access CloudFlare 'environment variables' and 'secrets' ", async () => {
const worker = new Worker(
"foo.com",
`addEventListener('test', () => ({ variable1, foo }))`,
{env: {variable1: "somevalue", foo: '{"bar": "shhh"}'}}
);
const { variable1, foo } = worker.triggerEvent("test");
expect(variable1).toBe("somevalue");
expect(foo).toBe('{"bar": "shhh"}');
});

test("It fetches directly from origin is passThroughOnException() is called", async () => {
const worker = new Worker(
upstreamHost,
Expand Down
8 changes: 4 additions & 4 deletions app/worker.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,17 +65,17 @@ class FetchEvent {

class Worker {
constructor(origin, workerContents, opts = {}) {
const { upstreamHost, kvStores = {} } = opts;
const { upstreamHost, kvStores = {}, env = {} } = opts;
this.listeners = {
fetch: e => e.respondWith(this.fetchUpstream(e.request))
};
this.upstreamHost = upstreamHost;
this.origin = origin;

this.evaluateWorkerContents(workerContents, kvStores);
this.evaluateWorkerContents(workerContents, kvStores, env);
}

evaluateWorkerContents(workerContents, kvStores) {
evaluateWorkerContents(workerContents, kvStores, env) {
const context = {
// From fetch
Request,
Expand Down Expand Up @@ -108,7 +108,7 @@ class Worker {
const script = new Script(workerContents);
script.runInContext(
createContext(
Object.assign(context, kvStores, {
Object.assign(context, kvStores, env, {
fetch: this.fetchUpstream.bind(this),
addEventListener: this.addEventListener.bind(this),
triggerEvent: this.triggerEvent.bind(this),
Expand Down
19 changes: 19 additions & 0 deletions examples/wrangler.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
name = "example"
type = "javascript"
workers_dev = false
route = "example.com/*"
account_id = "account id"
zone_id = "zone id"

kv-namespaces = [
{ binding = "wranglerKV", id = "06779da6940b431db6e566b4846d64db" }
]

[vars]
variable1 = "somevalue"

[secrets]
mysecret = "not4u"

[secrets.foo]
bar = 'shhh'
89 changes: 89 additions & 0 deletions lib/wrangler.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
"use strict";
exports.__esModule = true;
var fs = require("fs");
var TOML = require("@iarna/toml");
var lodash_get_1 = require("lodash.get");
var lodash_merge_1 = require("lodash.merge");
var placeholder = /(?<!\\)$\{([^\}]+)\}/g; // negative lookbehind supported by V8 and therefore CloudFlare and NodeJS
//TODO remove '\' from escaped '\${}'
/**
* Load the CloudFlare wrangler.toml file
* This replaces all ${} placeholders in vars and secrets string values.
* A placeholder path is resolved using lodash.get and has the context of the root of the config document.
* A placeholder can not refer to a value defined later in the document that also has placeholders.
* @param path filesystem path to wrangler.toml file
* @param env env to load from wrangler.toml file
*/
function loadConfig(path, env) {
var data = fs.readFileSync(path, { encoding: 'utf8' });
var config = TOML.parse(data);
// Load env
if (env && config.env && config.env[env]) {
var env_config = config.env[env];
delete config['env'];
lodash_merge_1["default"](config, env_config);
}
else {
// Use base env
delete config['env'];
}
// Populate all ${} placeholders in vars
if (config.vars) {
for (var _i = 0, _a = Object.entries(config.vars); _i < _a.length; _i++) {
var _b = _a[_i], k = _b[0], v = _b[1];
if (typeof v === 'string')
config.vars[k] = v.replace(placeholder, function (match, path) { return lodash_get_1["default"](config, path, match); });
}
}
// Populate all ${} placeholders in secrets
if (config.secrets && config.vars && typeof config.vars['IDP'] === 'string') {
var IDP_1 = config.vars['IDP'].split(',').map(function (v) { return v.trim(); }); // TODO should a full blown CSV parser be pulled in?
for (var _c = 0, _d = Object.entries(config.secrets).filter(function (_a) {
var k = _a[0], secret = _a[1];
return IDP_1.includes(k);
}); _c < _d.length; _c++) {
var _e = _d[_c], k = _e[0], secret = _e[1];
switch (typeof secret) {
case 'string':
config.secrets[k] = secret.replace(placeholder, function (match, path) { return lodash_get_1["default"](config, path, match); });
break;
case 'object':
if (Array.isArray(secret)) {
config.secrets[k] = secret.map(function (v) {
return typeof v === 'string' ?
v.replace(placeholder, function (match, path) { return lodash_get_1["default"](config, path, match); })
: v;
});
}
else {
for (var _f = 0, _g = Object.entries(secret); _f < _g.length; _f++) {
var _h = _g[_f], k_1 = _h[0], v = _h[1];
if (typeof v === 'string')
secret[k_1] = v.replace(placeholder, function (match, path) { return lodash_get_1["default"](config, path, match); });
}
}
}
}
}
return config;
}
exports.loadConfig = loadConfig;
/**
* Replace, in place, all non string values of vars and secrets with their JSON encoding
* @param config config root loaded via loadConfig()
*/
function toJSON(config) {
if (config.vars)
for (var _i = 0, _a = Object.entries(config.vars); _i < _a.length; _i++) {
var _b = _a[_i], k = _b[0], v = _b[1];
if (typeof v !== 'string')
config.vars[k] = JSON.stringify(v);
}
if (config.secrets)
for (var _c = 0, _d = Object.entries(config.secrets); _c < _d.length; _c++) {
var _e = _d[_c], k = _e[0], v = _e[1];
if (typeof v !== 'string')
config.secrets[k] = JSON.stringify(v);
}
}
exports.toJSON = toJSON;
Loading

0 comments on commit 2091bb5

Please sign in to comment.