Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master'
Browse files Browse the repository at this point in the history
* upstream/master:
  chore: update the link to the interop dashboard [skip ci]
  chore: parameterise s3 build cache setup (libp2p#465)
  chore: Create funding.json
  • Loading branch information
AlejandroCabeza committed Sep 12, 2024
2 parents 40fde07 + c164b24 commit e7d9b15
Show file tree
Hide file tree
Showing 8 changed files with 67 additions and 39 deletions.
5 changes: 5 additions & 0 deletions .github/actions/run-interop-hole-punch-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
5 changes: 5 additions & 0 deletions .github/actions/run-interop-ping-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
5 changes: 5 additions & 0 deletions .github/actions/run-transport-interop-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/hole-punch-interop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
- uses: actions/checkout@v3
- uses: ./.github/actions/run-interop-hole-punch-test
with:
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }}
s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }}
worker-count: 16
8 changes: 4 additions & 4 deletions .github/workflows/transport-interop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ jobs:
- uses: actions/checkout@v3
- uses: ./.github/actions/run-transport-interop-test
with:
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }}
s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }}
worker-count: 16
build-without-secrets:
runs-on: ubuntu-latest
runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf
steps:
- uses: actions/checkout@v3
# Purposely not using secrets to replicate how forks will behave.
Expand Down
5 changes: 5 additions & 0 deletions funding.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"opRetro": {
"projectId": "0x966804cb492e1a4bde5d781a676a44a23d69aa5dd2562fa7a4f95bb606021c8b"
}
}
36 changes: 20 additions & 16 deletions hole-punch-interop/helpers/cache.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap';
const AWS_BUCKET = process.env.AWS_BUCKET;
const scriptDir = __dirname;

import * as crypto from 'crypto';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as child_process from 'child_process';
import ignore, { Ignore } from 'ignore'
Expand Down Expand Up @@ -76,10 +77,14 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) {
if (mode == Mode.PushCache) {
console.log("Pushing cache")
try {
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"})
if (res.ok) {
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
try {
child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`)
console.log("Cache already exists")
} else {
} catch (e) {
console.log("Cache doesn't exist", e)
// Read image id from image.json
const imageID = JSON.parse(fs.readFileSync(path.join(dir, 'image.json')).toString()).imageID;
console.log(`Pushing cache for ${dir}: ${imageID}`)
Expand All @@ -96,18 +101,17 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) {
console.log("Loading cache")
let cacheHit = false
try {
// Check if the cache exists
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"})
if (res.ok) {
const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n");
cacheHit = true
}
} else {
console.log("Cache not found")
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache'))
const archivePath = path.join(cachePath, 'archive.tar.gz')
const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n");
cacheHit = true
}
} catch (e) {
console.log("Cache not found:", e)
Expand Down
36 changes: 20 additions & 16 deletions transport-interop/helpers/cache.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap';
const AWS_BUCKET = process.env.AWS_BUCKET;
const scriptDir = __dirname;

import * as crypto from 'crypto';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as child_process from 'child_process';
import ignore, { Ignore } from 'ignore'
Expand Down Expand Up @@ -65,10 +66,14 @@ switch (modeStr) {
if (mode == Mode.PushCache) {
console.log("Pushing cache")
try {
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" })
if (res.ok) {
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
try {
child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`)
console.log("Cache already exists")
} else {
} catch (e) {
console.log("Cache doesn't exist", e)
// Read image id from image.json
const imageID = JSON.parse(fs.readFileSync(path.join(implFolder, 'image.json')).toString()).imageID;
console.log(`Pushing cache for ${impl}: ${imageID}`)
Expand All @@ -85,18 +90,17 @@ switch (modeStr) {
console.log("Loading cache")
let cacheHit = false
try {
// Check if the cache exists
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" })
if (res.ok) {
const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n");
cacheHit = true
}
} else {
console.log("Cache not found")
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache'))
const archivePath = path.join(cachePath, 'archive.tar.gz')
const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n");
cacheHit = true
}
} catch (e) {
console.log("Cache not found:", e)
Expand Down

0 comments on commit e7d9b15

Please sign in to comment.