Skip to content

Commit

Permalink
Record features referenced by tarball URI
Browse files Browse the repository at this point in the history
  • Loading branch information
chrmarti committed Jul 14, 2023
1 parent 75ee28b commit 906a868
Show file tree
Hide file tree
Showing 9 changed files with 56 additions and 24 deletions.
23 changes: 15 additions & 8 deletions src/spec-configuration/containerFeaturesConfiguration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ export async function generateFeaturesConfig(params: ContainerFeatureInternalPar
};

output.write('--- Processing User Features ----', LogLevel.Trace);
const featureSets = await computeDependsOnInstallationOrder(params, processFeature, userFeatures, config);
const featureSets = await computeDependsOnInstallationOrder(params, processFeature, userFeatures, config, lockfile);
if (!featureSets) {
throw new Error('Failed to compute Feature installation order!');
}
Expand All @@ -566,7 +566,7 @@ export async function generateFeaturesConfig(params: ContainerFeatureInternalPar

// Fetch features, stage into the appropriate build folder, and read the feature's devcontainer-feature.json
output.write('--- Fetching User Features ----', LogLevel.Trace);
await fetchFeatures(params, featuresConfig, locallyCachedFeatureSet, dstFolder, localFeaturesFolder, ociCacheDir);
await fetchFeatures(params, featuresConfig, locallyCachedFeatureSet, dstFolder, localFeaturesFolder, ociCacheDir, lockfile);

await logFeatureAdvisories(params, featuresConfig);
await writeLockfile(params, config, featuresConfig);
Expand Down Expand Up @@ -1027,7 +1027,7 @@ export async function processFeatureIdentifier(params: CommonParams, configPath:
// throw new Error(`Unsupported feature source type: ${type}`);
}

async function fetchFeatures(params: { extensionPath: string; cwd: string; output: Log; env: NodeJS.ProcessEnv }, featuresConfig: FeaturesConfig, localFeatures: FeatureSet, dstFolder: string, localFeaturesFolder: string, ociCacheDir: string) {
async function fetchFeatures(params: { extensionPath: string; cwd: string; output: Log; env: NodeJS.ProcessEnv }, featuresConfig: FeaturesConfig, localFeatures: FeatureSet, dstFolder: string, localFeaturesFolder: string, ociCacheDir: string, lockfile: Lockfile | undefined) {
const featureSets = featuresConfig.featureSets;
for (let idx = 0; idx < featureSets.length; idx++) { // Index represents the previously computed installation order.
const featureSet = featureSets[idx];
Expand Down Expand Up @@ -1106,7 +1106,7 @@ async function fetchFeatures(params: { extensionPath: string; cwd: string; outpu
const headers = getRequestHeaders(params, featureSet.sourceInformation);

// Ordered list of tarballUris to attempt to fetch from.
let tarballUris: string[] = [];
let tarballUris: (string | { uri: string; digest?: string })[] = [];

if (sourceInfoType === 'github-repo') {
output.write('Determining tarball URI for provided github repo.', LogLevel.Trace);
Expand All @@ -1129,16 +1129,20 @@ async function fetchFeatures(params: { extensionPath: string; cwd: string; outpu

} else {
// We have a plain ol' tarball URI, since we aren't in the github-repo case.
tarballUris.push(featureSet.sourceInformation.tarballUri);
const uri = featureSet.sourceInformation.tarballUri;
const digest = lockfile?.features[uri]?.integrity;
tarballUris.push({ uri, digest });
}

// Attempt to fetch from 'tarballUris' in order, until one succeeds.
let res: { computedDigest: string } | undefined;
for (const tarballUri of tarballUris) {
res = await fetchContentsAtTarballUri(params, tarballUri, featCachePath, headers, dstFolder);
const uri = typeof tarballUri === 'string' ? tarballUri : tarballUri.uri;
const digest = typeof tarballUri === 'string' ? undefined : tarballUri.digest;
res = await fetchContentsAtTarballUri(params, uri, digest, featCachePath, headers, dstFolder);

if (res) {
output.write(`Succeeded fetching ${tarballUri}`, LogLevel.Trace);
output.write(`Succeeded fetching ${uri}`, LogLevel.Trace);
if (!(await applyFeatureConfigToFeature(output, featureSet, feature, featCachePath, res.computedDigest))) {
const err = `Failed to parse feature '${featureDebugId}'. Please check your devcontainer.json 'features' attribute.`;
throw new Error(err);
Expand All @@ -1159,7 +1163,7 @@ async function fetchFeatures(params: { extensionPath: string; cwd: string; outpu
}
}

export async function fetchContentsAtTarballUri(params: { output: Log; env: NodeJS.ProcessEnv }, tarballUri: string, featCachePath: string, headers: { 'user-agent': string; 'Authorization'?: string; 'Accept'?: string } | undefined, dstFolder: string, metadataFile?: string): Promise<{ computedDigest: string; metadata: {} | undefined } | undefined> {
export async function fetchContentsAtTarballUri(params: { output: Log; env: NodeJS.ProcessEnv }, tarballUri: string, expectedDigest: string | undefined, featCachePath: string, headers: { 'user-agent': string; 'Authorization'?: string; 'Accept'?: string } | undefined, dstFolder: string, metadataFile?: string): Promise<{ computedDigest: string; metadata: {} | undefined } | undefined> {
const { output } = params;
const tempTarballPath = path.join(dstFolder, 'temp.tgz');
try {
Expand All @@ -1179,6 +1183,9 @@ export async function fetchContentsAtTarballUri(params: { output: Log; env: Node
}

const computedDigest = `sha256:${crypto.createHash('sha256').update(tarball).digest('hex')}`;
if (expectedDigest && computedDigest !== expectedDigest) {
throw new Error(`Digest did not match for ${tarballUri}.`);
}

// Filter what gets emitted from the tar.extract().
const filter = (file: string, _: tar.FileStat) => {
Expand Down
20 changes: 13 additions & 7 deletions src/spec-configuration/containerFeaturesOrder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { DevContainerFeature } from './configuration';
import { CommonParams, OCIRef } from './containerCollectionsOCI';
import { isLocalFile, readLocalFile } from '../spec-utils/pfs';
import { fetchOCIFeature } from './containerFeaturesOCI';
import { Lockfile } from './lockfile';

interface FNode {
type: 'user-provided' | 'override' | 'resolved';
Expand Down Expand Up @@ -345,7 +346,8 @@ async function _buildDependencyGraph(
params: CommonParams,
processFeature: (userFeature: DevContainerFeature) => Promise<FeatureSet | undefined>,
worklist: FNode[],
acc: FNode[]): Promise<DependencyGraph> {
acc: FNode[],
lockfile: Lockfile | undefined): Promise<DependencyGraph> {
const { output } = params;

while (worklist.length > 0) {
Expand Down Expand Up @@ -390,7 +392,9 @@ async function _buildDependencyGraph(
break;

case 'direct-tarball':
metadata = await getTgzFeatureMetadata(params, current);
const tarballUri = (processedFeature.sourceInformation as DirectTarballSourceInformation).tarballUri;
const expectedDigest = lockfile?.features[tarballUri]?.integrity;
metadata = await getTgzFeatureMetadata(params, current, expectedDigest);
break;

default:
Expand Down Expand Up @@ -504,7 +508,7 @@ async function getOCIFeatureMetadata(params: CommonParams, node: FNode): Promise
return;
}

async function getTgzFeatureMetadata(params: CommonParams, node: FNode): Promise<Feature | undefined> {
async function getTgzFeatureMetadata(params: CommonParams, node: FNode, expectedDigest: string | undefined): Promise<Feature | undefined> {
const { output } = params;

// TODO: Implement a caching layer here!
Expand All @@ -516,7 +520,7 @@ async function getTgzFeatureMetadata(params: CommonParams, node: FNode): Promise
}

const tmp = path.join(os.tmpdir(), crypto.randomUUID());
const result = await fetchContentsAtTarballUri(params, srcInfo.tarballUri, tmp, undefined, tmp, DEVCONTAINER_FEATURE_FILE_NAME);
const result = await fetchContentsAtTarballUri(params, srcInfo.tarballUri, expectedDigest, tmp, undefined, tmp, DEVCONTAINER_FEATURE_FILE_NAME);
if (!result || !result.metadata) {
output.write(`No metadata for Feature '${node.userFeatureId}' from '${srcInfo.tarballUri}'`, LogLevel.Trace);
return;
Expand All @@ -532,7 +536,8 @@ export async function buildDependencyGraph(
params: CommonParams,
processFeature: (userFeature: DevContainerFeature) => Promise<FeatureSet | undefined>,
userFeatures: DevContainerFeature[],
config: { overrideFeatureInstallOrder?: string[] }): Promise<DependencyGraph | undefined> {
config: { overrideFeatureInstallOrder?: string[] },
lockfile: Lockfile | undefined): Promise<DependencyGraph | undefined> {

const { output } = params;

Expand All @@ -550,7 +555,7 @@ export async function buildDependencyGraph(

output.write(`[* user-provided] ${rootNodes.map(n => n.userFeatureId).join(', ')}`, LogLevel.Trace);

const { worklist } = await _buildDependencyGraph(params, processFeature, rootNodes, []);
const { worklist } = await _buildDependencyGraph(params, processFeature, rootNodes, [], lockfile);

output.write(`[* resolved worklist] ${worklist.map(n => n.userFeatureId).join(', ')}`, LogLevel.Trace);

Expand All @@ -568,12 +573,13 @@ export async function computeDependsOnInstallationOrder(
processFeature: (userFeature: DevContainerFeature) => Promise<FeatureSet | undefined>,
userFeatures: DevContainerFeature[],
config: { overrideFeatureInstallOrder?: string[] },
lockfile?: Lockfile,
precomputedGraph?: DependencyGraph): Promise<FeatureSet[] | undefined> {

const { output } = params;

// Build dependency graph and resolves all to FeatureSets.
const graph = precomputedGraph ?? await buildDependencyGraph(params, processFeature, userFeatures, config);
const graph = precomputedGraph ?? await buildDependencyGraph(params, processFeature, userFeatures, config, lockfile);
if (!graph) {
return;
}
Expand Down
8 changes: 5 additions & 3 deletions src/spec-configuration/lockfile.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import * as path from 'path';
import { DevContainerConfig } from './configuration';
import { readLocalFile, writeLocalFile } from '../spec-utils/pfs';
import { ContainerFeatureInternalParams, FeatureSet, FeaturesConfig, OCISourceInformation } from './containerFeaturesConfiguration';
import { ContainerFeatureInternalParams, DirectTarballSourceInformation, FeatureSet, FeaturesConfig, OCISourceInformation } from './containerFeaturesConfiguration';


export interface Lockfile {
Expand All @@ -28,13 +28,15 @@ export async function writeLockfile(params: ContainerFeatureInternalParams, conf

const lockfile: Lockfile = featuresConfig.featureSets
.map(f => [f, f.sourceInformation] as const)
.filter((tup): tup is [FeatureSet, OCISourceInformation] => tup[1].type === 'oci')
.filter((tup): tup is [FeatureSet, OCISourceInformation | DirectTarballSourceInformation] => ['oci', 'direct-tarball'].indexOf(tup[1].type) !== -1)
.map(([set, source]) => {
const dependsOn = Object.keys(set.features[0].dependsOn || {});
return {
id: source.userFeatureId,
version: set.features[0].version!,
resolved: `${source.featureRef.registry}/${source.featureRef.path}@${set.computedDigest}`,
resolved: source.type === 'oci' ?
`${source.featureRef.registry}/${source.featureRef.path}@${set.computedDigest}` :
source.tarballUri,
integrity: set.computedDigest!,
dependsOn: dependsOn.length ? dependsOn : undefined,
};
Expand Down
2 changes: 1 addition & 1 deletion src/spec-node/featuresCLI/info.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ async function featuresInfo({
const processFeature = async (_userFeature: DevContainerFeature) => {
return await processFeatureIdentifier(params, undefined, '', _userFeature);
};
const graph = await buildDependencyGraph(params, processFeature, [{ userFeatureId: featureId, options: {} }], { overrideFeatureInstallOrder: undefined });
const graph = await buildDependencyGraph(params, processFeature, [{ userFeatureId: featureId, options: {} }], { overrideFeatureInstallOrder: undefined }, undefined);
output.write(JSON.stringify(graph, undefined, 4), LogLevel.Trace);
if (!graph) {
output.write(`Could not build dependency graph.`, LogLevel.Error);
Expand Down
8 changes: 5 additions & 3 deletions src/spec-node/featuresCLI/resolveDependencies.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import { isLocalFile, readLocalFile } from '../../spec-utils/pfs';
import { DevContainerConfig, DevContainerFeature } from '../../spec-configuration/configuration';
import { buildDependencyGraph, computeDependsOnInstallationOrder, generateMermaidDiagram } from '../../spec-configuration/containerFeaturesOrder';
import { OCISourceInformation, processFeatureIdentifier, userFeaturesToArray } from '../../spec-configuration/containerFeaturesConfiguration';
import { readLockfile } from '../../spec-configuration/lockfile';

interface JsonOutput {
installOrder?: {
Expand Down Expand Up @@ -82,15 +83,16 @@ async function featuresResolveDependencies({
env: process.env,
};

const lockfile = await readLockfile(config);
const processFeature = async (_userFeature: DevContainerFeature) => {
return await processFeatureIdentifier(params, configPath, workspaceFolder, _userFeature);
return await processFeatureIdentifier(params, configPath, workspaceFolder, _userFeature, lockfile);
};

const graph = await buildDependencyGraph(params, processFeature, userFeaturesConfig, config);
const graph = await buildDependencyGraph(params, processFeature, userFeaturesConfig, config, lockfile);
const worklist = graph?.worklist!;
console.log(generateMermaidDiagram(params, worklist));

const installOrder = await computeDependsOnInstallationOrder(params, processFeature, userFeaturesConfig, config, graph);
const installOrder = await computeDependsOnInstallationOrder(params, processFeature, userFeaturesConfig, config, lockfile, graph);

if (!installOrder) {
// Bold
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"image": "mcr.microsoft.com/devcontainers/base:ubuntu",
"features": {
"ghcr.io/codspace/dependson/A:2": {},
"ghcr.io/codspace/dependson/E:1": {}
"ghcr.io/codspace/dependson/E:1": {},
"https://github.com/codspace/tgz-features-with-dependson/releases/download/0.0.2/devcontainer-feature-A.tgz": {}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,14 @@
"version": "1.0.0",
"resolved": "ghcr.io/codspace/dependson/e@sha256:90b84127edab28ecb169cd6c6f2101ce0ea1d77589cee01951fec7f879f3a11c",
"integrity": "sha256:90b84127edab28ecb169cd6c6f2101ce0ea1d77589cee01951fec7f879f3a11c"
},
"https://github.com/codspace/tgz-features-with-dependson/releases/download/0.0.2/devcontainer-feature-A.tgz": {
"version": "2.0.1",
"resolved": "https://github.com/codspace/tgz-features-with-dependson/releases/download/0.0.2/devcontainer-feature-A.tgz",
"integrity": "sha256:f2dd5be682cceedb5497f9a734b5d5e7834424ade75b8cc700927242585ec671",
"dependsOn": [
"ghcr.io/codspace/dependson/E"
]
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"image": "mcr.microsoft.com/devcontainers/base:ubuntu",
"features": {
"ghcr.io/codspace/features/flower:1.0.0": {},
"ghcr.io/codspace/features/color:1.0.4": {}
"ghcr.io/codspace/features/color:1.0.4": {},
"https://github.com/codspace/features/releases/download/tarball02/devcontainer-feature-docker-in-docker.tgz": {}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@
"version": "1.0.0",
"resolved": "ghcr.io/codspace/features/flower@sha256:c9cc1ac636b9ef595512b5ca7ecb3a35b7d3499cb6f86372edec76ae0cd71d43",
"integrity": "sha256:c9cc1ac636b9ef595512b5ca7ecb3a35b7d3499cb6f86372edec76ae0cd71d43"
},
"https://github.com/codspace/features/releases/download/tarball02/devcontainer-feature-docker-in-docker.tgz": {
"version": "1.0.0",
"resolved": "https://github.com/codspace/features/releases/download/tarball02/devcontainer-feature-docker-in-docker.tgz",
"integrity": "sha256:9cf3f2a17c1bb2b599b6027cfa975d2fb28234df88ba33ff5e276fa052aac7ae"
}
}
}

0 comments on commit 906a868

Please sign in to comment.