Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: parameterise s3 build cache setup #465

Merged
merged 6 commits into from
Sep 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/actions/run-interop-hole-punch-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
5 changes: 5 additions & 0 deletions .github/actions/run-interop-ping-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
5 changes: 5 additions & 0 deletions .github/actions/run-transport-interop-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/hole-punch-interop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
- uses: actions/checkout@v3
- uses: ./.github/actions/run-interop-hole-punch-test
with:
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }}
s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }}
worker-count: 16
8 changes: 4 additions & 4 deletions .github/workflows/transport-interop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ jobs:
- uses: actions/checkout@v3
- uses: ./.github/actions/run-transport-interop-test
with:
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }}
s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }}
worker-count: 16
build-without-secrets:
runs-on: ubuntu-latest
runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf
steps:
- uses: actions/checkout@v3
# Purposely not using secrets to replicate how forks will behave.
Expand Down
36 changes: 20 additions & 16 deletions hole-punch-interop/helpers/cache.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap';
const AWS_BUCKET = process.env.AWS_BUCKET;
const scriptDir = __dirname;

import * as crypto from 'crypto';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as child_process from 'child_process';
import ignore, { Ignore } from 'ignore'
Expand Down Expand Up @@ -76,10 +77,14 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) {
if (mode == Mode.PushCache) {
console.log("Pushing cache")
try {
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"})
if (res.ok) {
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
try {
child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`)
console.log("Cache already exists")
} else {
} catch (e) {
console.log("Cache doesn't exist", e)
// Read image id from image.json
const imageID = JSON.parse(fs.readFileSync(path.join(dir, 'image.json')).toString()).imageID;
console.log(`Pushing cache for ${dir}: ${imageID}`)
Expand All @@ -96,18 +101,17 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) {
console.log("Loading cache")
let cacheHit = false
try {
// Check if the cache exists
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"})
if (res.ok) {
const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n");
cacheHit = true
}
} else {
console.log("Cache not found")
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache'))
const archivePath = path.join(cachePath, 'archive.tar.gz')
const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n");
cacheHit = true
}
} catch (e) {
console.log("Cache not found:", e)
Expand Down
36 changes: 20 additions & 16 deletions transport-interop/helpers/cache.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap';
const AWS_BUCKET = process.env.AWS_BUCKET;
const scriptDir = __dirname;

import * as crypto from 'crypto';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as child_process from 'child_process';
import ignore, { Ignore } from 'ignore'
Expand Down Expand Up @@ -65,10 +66,14 @@ switch (modeStr) {
if (mode == Mode.PushCache) {
console.log("Pushing cache")
try {
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" })
if (res.ok) {
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
try {
child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`)
console.log("Cache already exists")
} else {
} catch (e) {
console.log("Cache doesn't exist", e)
// Read image id from image.json
const imageID = JSON.parse(fs.readFileSync(path.join(implFolder, 'image.json')).toString()).imageID;
console.log(`Pushing cache for ${impl}: ${imageID}`)
Expand All @@ -85,18 +90,17 @@ switch (modeStr) {
console.log("Loading cache")
let cacheHit = false
try {
// Check if the cache exists
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" })
if (res.ok) {
const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n");
cacheHit = true
}
} else {
console.log("Cache not found")
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache'))
const archivePath = path.join(cachePath, 'archive.tar.gz')
const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];

console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n");
cacheHit = true
}
} catch (e) {
console.log("Cache not found:", e)
Expand Down
Loading