diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 000000000..60000face --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,23 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Launch cli - up", + "program": "${workspaceFolder}/src/spec-node/devContainersSpecCLI.ts", + "cwd": "${workspaceFolder}", + "args": [ + "up", + "--workspace-folder", + "../devcontainers-features", + "--log-level", + "debug", + ], + "console": "integratedTerminal", + } + ] +} \ No newline at end of file diff --git a/package.json b/package.json index 51aba232d..7b4911d84 100644 --- a/package.json +++ b/package.json @@ -41,6 +41,7 @@ "@types/js-yaml": "^4.0.5", "@types/mocha": "^9.1.0", "@types/node": "^16.11.7", + "@types/ncp": "^2.0.5", "@types/pull-stream": "^3.6.2", "@types/semver": "^7.3.9", "@types/shell-quote": "^1.7.1", @@ -65,6 +66,7 @@ "vinyl-fs": "^3.0.3" }, "dependencies": { + "ncp": "^2.0.0", "follow-redirects": "^1.14.8", "js-yaml": "^4.1.0", "jsonc-parser": "^3.0.0", diff --git a/src/spec-configuration/configuration.ts b/src/spec-configuration/configuration.ts index b182aedd5..2271b9c38 100644 --- a/src/spec-configuration/configuration.ts +++ b/src/spec-configuration/configuration.ts @@ -26,6 +26,11 @@ export interface HostRequirements { storage?: string; } +export interface DevContainerFeature { + id: string; + options: boolean | string | Record; +} + export interface DevContainerFromImageConfig { configFilePath: URI; image: string; @@ -55,6 +60,7 @@ export interface DevContainerFromImageConfig { updateRemoteUserUID?: boolean; userEnvProbe?: UserEnvProbe; features?: Record>; + overrideFeatureInstallOrder?: string[]; hostRequirements?: HostRequirements; } @@ -86,6 +92,7 @@ export type DevContainerFromDockerfileConfig = { updateRemoteUserUID?: boolean; userEnvProbe?: UserEnvProbe; features?: Record>; + overrideFeatureInstallOrder?: string[]; hostRequirements?: HostRequirements; } & ( { @@ -133,6 +140,7 @@ export interface DevContainerFromDockerComposeConfig { updateRemoteUserUID?: boolean; userEnvProbe?: UserEnvProbe; features?: Record>; + overrideFeatureInstallOrder?: string[]; hostRequirements?: HostRequirements; } diff --git a/src/spec-configuration/containerFeaturesConfiguration.ts b/src/spec-configuration/containerFeaturesConfiguration.ts index bad8490fa..7f4c95fd8 100644 --- a/src/spec-configuration/containerFeaturesConfiguration.ts +++ b/src/spec-configuration/containerFeaturesConfiguration.ts @@ -5,20 +5,32 @@ import * as jsonc from 'jsonc-parser'; import * as path from 'path'; -import * as semver from 'semver'; import * as URL from 'url'; import * as tar from 'tar'; -import { DevContainerConfig } from './configuration'; -import { mkdirpLocal, readLocalFile, rmLocal, writeLocalFile } from '../spec-utils/pfs'; +import { existsSync } from 'fs'; +import { DevContainerConfig, DevContainerFeature } from './configuration'; +import { mkdirpLocal, readLocalFile, rmLocal, writeLocalFile, cpDirectoryLocal } from '../spec-utils/pfs'; import { Log, LogLevel } from '../spec-utils/log'; import { request } from '../spec-utils/httpRequest'; +import { computeFeatureInstallationOrder } from './containerFeaturesOrder'; -const ASSET_NAME = 'devcontainer-features.tgz'; + +const V1_ASSET_NAME = 'devcontainer-features.tgz'; export interface Feature { id: string; name: string; + description?: string; + filename?: string; + runApp?: string; + runParams?: string; + infoString?: string; + internalVersion?: string; // set programmatically + tempLocalPath?: string; + consecutiveId?: string; + install?: Record; documentationURL?: string; + licenseURL?: string; options?: Record; buildArg?: string; // old properties for temporary compatibility containerEnv?: Record; @@ -28,6 +40,7 @@ export interface Feature { capAdd?: string[]; securityOpt?: string[]; entrypoint?: string; + installAfter?: string[]; include?: string[]; exclude?: string[]; value: boolean | string | Record; // set programmatically @@ -99,6 +112,7 @@ export interface FilePathSourceInformation extends BaseSourceInformation { export interface FeatureSet { features: Feature[]; + internalVersion?: string; sourceInformation: SourceInformation; } @@ -140,6 +154,12 @@ export function collapseFeaturesConfig(original: FeaturesConfig): CollapsedFeatu export const multiStageBuildExploration = false; +// Counter to ensure that no two folders are the same even if we are executing the same feature multiple times. +let counter = 1; +function getCounter() { + return counter++; +} + const isTsnode = path.basename(process.argv[0]) === 'ts-node' || process.argv.indexOf('ts-node/register') !== -1; export function getContainerFeaturesFolder(_extensionPath: string | { distFolder: string }) { @@ -156,13 +176,13 @@ export function getSourceInfoString(srcInfo: SourceInformation): string { const { type } = srcInfo; switch (type) { case 'local-cache': - return 'local-cache'; + return 'local-cache-' + getCounter(); case 'direct-tarball': - return Buffer.from(srcInfo.tarballUri).toString('base64'); + return srcInfo.tarballUri + getCounter(); case 'github-repo': - return `github-${srcInfo.owner}-${srcInfo.repo}-${srcInfo.isLatest ? 'latest' : srcInfo.tag}`; + return `github-${srcInfo.owner}-${srcInfo.repo}-${srcInfo.isLatest ? 'latest' : srcInfo.tag}-${getCounter()}`; case 'file-path': - return Buffer.from(srcInfo.filePath).toString('base64'); + return srcInfo.filePath + '-' + getCounter(); } } @@ -192,17 +212,48 @@ USER $_DEV_CONTAINERS_IMAGE_USER export function getFeatureLayers(featuresConfig: FeaturesConfig) { let result = ''; - const folders = (featuresConfig.featureSets || []).map(x => getSourceInfoString(x.sourceInformation)); + + // Features version 1 + const folders = (featuresConfig.featureSets || []).filter(y => y.internalVersion !== '2').map(x => x.features[0].consecutiveId); folders.forEach(folder => { result += `RUN cd /tmp/build-features/${folder} \\ && chmod +x ./install.sh \\ && ./install.sh `; + }); + // Features version 2 + featuresConfig.featureSets.filter(y => y.internalVersion === '2').forEach(featureSet => { + featureSet.features.forEach(feature => { + result += generateContainerEnvs(feature); + result += ` + +RUN cd /tmp/build-features/${feature.consecutiveId} \\ +&& export $(cat devcontainer-features.env | xargs) \\ +&& chmod +x ./${feature.runParams} \\ +&& ./${feature.runParams} + +`; + }) }); return result; } +// Features version two export their environment variables as part of the Dockerfile to make them available to subsequent features. +export function generateContainerEnvs(feature: Feature) { + let result = ''; + if(!feature.containerEnv) + { + return result; + } + let keys = Object.keys(feature.containerEnv); + result = keys.map(k => `ENV ${k}=${feature.containerEnv![k]}`).join('\n'); + + return result; +} + +const allowedFeatureIdRegex = new RegExp('^[a-zA-Z0-9_-]*$'); + // Parses a declared feature in user's devcontainer file into // a usable URI to download remote features. // RETURNS @@ -211,158 +262,6 @@ export function getFeatureLayers(featuresConfig: FeaturesConfig) { // sourceInformation <----- Source information (is this locally cached, a GitHub remote feature, etc..), including tarballUri if applicable. // } // -export function parseFeatureIdentifier(input: string, output: Log): { id: string; sourceInformation: SourceInformation } | undefined { - // A identifier takes this form: - // (0) - // (1) //@version - // (2) https://<../URI/..>/devcontainer-features.tgz# - // (3) ./# -or- ../# -or- /# - // - // (0) This is a locally cached feature. The function should return `undefined` for tarballUrl - // - // (1) Our "registry" is backed by GitHub public repositories (or repos visible with the environment's GITHUB_TOKEN). - // Say organization 'octocat' has a repo titled 'myfeatures' with a set of feature definitions. - // One of the [1..n] features in this repo has an id of 'helloworld'. - // - // eg: octocat/myfeatures/helloworld - // - // The above example assumes the 'latest' GitHub release, and internally will - // fetch the devcontainer-features.tgz artifact from that release. - // To specify a certain release tag, append the tag with an @ symbol - // - // eg: octocat/myfeatures/helloworld@v0.0.2 - // - // (2) A fully-qualified https URI to a devcontainer-features.tgz file can be provided instead - // of a using the GitHub registry "shorthand". Note this is identified by a - // s.StartsWith("https://" || "http://"). - // - // eg: https://example.com/../../devcontainer-features.tgz#helloworld - // - // (3) This is a local path to a directory on disk following the expected file convention - // The path can either be: - // - a relative file path to the .devcontainer file (prepended by a ./ or ../) - // - an absolute file path (prepended by a /) - // - // No version can be provided, as the directory is copied 'as is' and is inherently taking the 'latest' - - // Regexes - const allowedFeatureIdRegex = new RegExp('^[a-zA-Z0-9_-]*$'); - - // Case (0): Cached feature - if (!input.includes('/')) { - output.write(`[${input}] - No slash, must be locally cached feature.`, LogLevel.Trace); - return { - id: input, - sourceInformation: { type: 'local-cache' }, - }; - } - - // Case (2): Direct URI to a tgz - if (input.startsWith('http://') || input.startsWith('https://')) { - output.write(`[${input}] - Direct URI`, LogLevel.Trace); - - // Trim any trailing slash character to make parsing easier. - // A slash at the end of the direct tgz identifier is not important. - input = input.replace(/\/+$/, ''); - - // Parse out feature ID by splitting on final slash character. - const featureIdDelimiter = input.lastIndexOf('#'); - const id = input.substring(featureIdDelimiter + 1); - // Ensure feature id only contains the expected set of characters. - if (id === '' || !allowedFeatureIdRegex.test(id)) { - output.write(`Parse error. Specify a feature id with alphanumeric, dash, or underscore characters. Provided: ${id}.`, LogLevel.Error); - return undefined; - } - const tarballUri = - new URL.URL(input.substring(0, featureIdDelimiter)) - .toString(); - - output.write(`[${input}] - uri: ${tarballUri} , id: ${id}`, LogLevel.Trace); - return { - id, - sourceInformation: { 'type': 'direct-tarball', tarballUri } - }; - } - - // Case (3): Local disk relative/absolute path to directory - if (input.startsWith('/') || input.startsWith('./') || input.startsWith('../')) { - // Currently unimplemented. - return undefined; - - // const splitOnHash = input.split('#'); - // if (!splitOnHash || splitOnHash.length !== 2) { - // output.write(`Parse error. Relative or absolute path to directory should be of the form: #`, LogLevel.Error); - // return undefined; - // } - // const filePath = splitOnHash[0]; - // const id = splitOnHash[1]; - // if (!allowedFeatureIdRegex.test(id)) { - // output.write(`Parse error. Specify a feature id with alphanumeric, dash, or underscore characters. Provided: ${id}.`, LogLevel.Error); - // return undefined; - // } - // return { - // id, - // sourceInformation: { 'type': 'file-path', filePath, isRelative: input.startsWith('./') } - // }; - } - - // Must be case (1) - GH - let version = 'latest'; - let splitOnAt = input.split('@'); - if (splitOnAt.length > 2) { - output.write(`Parse error. Use the '@' symbol only to designate a version tag.`, LogLevel.Error); - return undefined; - } - if (splitOnAt.length === 2) { - output.write(`[${input}] has version ${splitOnAt[1]}`, LogLevel.Trace); - version = splitOnAt[1]; - } - // Remaining info must be in the first part of the split. - const featureBlob = splitOnAt[0]; - const splitOnSlash = featureBlob.split('/'); - // We expect all GitHub/registry features to follow the triple slash pattern at this point - // eg: // - if (splitOnSlash.length !== 3 || splitOnSlash.some(x => x === '') || !allowedFeatureIdRegex.test(splitOnSlash[2])) { - output.write(`Invalid parse for GitHub/registry feature identifier. Follow format: '//'`, LogLevel.Error); - return undefined; - } - const owner = splitOnSlash[0]; - const repo = splitOnSlash[1]; - const id = splitOnSlash[2]; - - // Return expected tarball URI for a latest release on the parsed repo. - const ghSrcInfo = createGitHubSourceInformation({ owner, repo, tag: version }); - return { - id, - sourceInformation: ghSrcInfo - }; -} - -export function createGitHubSourceInformation(params: GithubSourceInformationInput): GithubSourceInformation { - const { owner, repo, tag } = params; - if (tag === 'latest') { - return { - type: 'github-repo', - apiUri: `https://api.github.com/repos/${owner}/${repo}/releases/latest`, - unauthenticatedUri: `https://github.com/${owner}/${repo}/releases/latest/download/${ASSET_NAME}`, - owner, - repo, - isLatest: true - }; - } else { - // We must have a tag, return a tarball URI for the tagged version. - return { - type: 'github-repo', - apiUri: `https://api.github.com/repos/${owner}/${repo}/releases/tags/${tag}`, - unauthenticatedUri: `https://github.com/${owner}/${repo}/releases/download/${tag}/${ASSET_NAME}`, - owner, - repo, - tag, - isLatest: false - }; - } -} - const cleanupIterationFetchAndMerge = async (tempTarballPath: string, output: Log) => { // Non-fatal, will just get overwritten if we don't do the cleaned up. @@ -395,186 +294,22 @@ function getRequestHeaders(sourceInformation: SourceInformation, env: NodeJS.Pro return headers; } -async function fetchAndMergeRemoteFeaturesAsync(params: { extensionPath: string; output: Log; env: NodeJS.ProcessEnv }, featuresConfig: FeaturesConfig, config: DevContainerConfig) { - - const { output, env } = params; - const { dstFolder } = featuresConfig; - let buildFoldersCreatedAlready: String[] = []; - - // The requested features from the user's devcontainer - const features = config.features; - if (!features || !Object.keys(features).length) { - return undefined; - } - - // We need a dstFolder to know where to download remote resources to - if (!dstFolder) { - return undefined; - } - - const tempTarballPath = path.join(dstFolder, ASSET_NAME); - - output.write(`Preparing to parse declared features and fetch remote features.`); - - for await (const id of Object.keys(features)) { - const remoteFeatureParsed = parseFeatureIdentifier(id, output); - - if (remoteFeatureParsed === undefined) { - output.write(`Failed to parse key: ${id}`, LogLevel.Error); - // Failed to parse. - // TODO: Should be more fatal. - continue; - } - - // -- Next section handles each possible type of "SourceInformation" - - const featureName = remoteFeatureParsed.id; - const sourceInformation = remoteFeatureParsed.sourceInformation; - const sourceType = sourceInformation.type; - - if (sourceType === 'local-cache') { - output.write(`Detected local feature set. Continuing...`); - continue; - } - - const buildFolderName = getSourceInfoString(remoteFeatureParsed.sourceInformation); - // Calculate some predictable caching paths. - // Don't create the folder on-disk until we need it. - const featCachePath = path.join(dstFolder, buildFolderName); - - // Break out earlier if already copied over remote features to dstFolder - const alreadyExists = buildFoldersCreatedAlready.some(x => x === buildFolderName); - if (alreadyExists) { - output.write(`Already pulled remote resource for '${buildFolderName}'. No need to re-fetch.`); //TODO: not true, might have been updated on the repo since if we pulled `local`. Should probably use commit SHA? - continue; - } - - output.write(`Fetching: featureSet = ${buildFolderName}, feature = ${featureName}, Type = ${sourceType}`); - - if (sourceType === 'file-path') { - output.write(`Local file-path to features on disk is unimplemented. Continuing...`); - continue; - } else { - let tarballUri: string | undefined = undefined; - const headers = getRequestHeaders(sourceInformation, env, output); - - // If this is 'github-repo', we need to do an API call to fetch the appropriate asset's tarballUri - if (sourceType === 'github-repo') { - output.write('Determining tarball URI for provided github repo.', LogLevel.Trace); - if (headers.Authorization && headers.Authorization !== '') { - output.write('Authenticated. Fetching from GH API.', LogLevel.Trace); - tarballUri = await askGitHubApiForTarballUri(sourceInformation, headers, output); - headers.Accept = 'Accept: application/octet-stream'; - } else { - output.write('Not authenticated. Fetching from unauthenticated uri', LogLevel.Trace); - tarballUri = sourceInformation.unauthenticatedUri; - } - } else if (sourceType === 'direct-tarball') { - tarballUri = sourceInformation.tarballUri; - } else { - output.write(`Unhandled source type: ${sourceType}`, LogLevel.Error); - continue; // TODO: Should be more fatal? - } - - // uri direct to the tarball either acquired at this point, or failed. - if (tarballUri !== undefined && tarballUri !== '') { - const options = { - type: 'GET', - url: tarballUri, - headers - }; - output.write(`Fetching tarball at ${options.url}`); - output.write(`Headers: ${JSON.stringify(options)}`, LogLevel.Trace); - const tarball = await request(options, output); - - if (!tarball || tarball.length === 0) { - output.write(`Did not receive a response from tarball download URI`, LogLevel.Error); - // Continue loop to the next remote feature. - // TODO: Should be more fatal. - await cleanupIterationFetchAndMerge(tempTarballPath, output); - continue; - } - - // Filter what gets emitted from the tar.extract(). - const filter = (file: string, _: tar.FileStat) => { - // Don't include .dotfiles or the archive itself. - if (file.startsWith('./.') || file === `./${ASSET_NAME}` || file === './.') { - return false; - } - return true; - }; - - output.write(`Preparing to unarchive received tgz.`, LogLevel.Trace); - // Create the directory to cache this feature-set in. - await mkdirpLocal(featCachePath); - await writeLocalFile(tempTarballPath, tarball); - await tar.x( - { - file: tempTarballPath, - cwd: featCachePath, - filter - } - ); - - } else { - output.write(`Could not fetch features from constructed tarball URL`, LogLevel.Error); - // Continue loop to the next remote feature. - // TODO: Should be more fatal. - await cleanupIterationFetchAndMerge(tempTarballPath, output); - continue; - } - } - - // -- Whichever modality the feature-set was stored, at this point that process of retrieving and extracting a feature-set has completed successfully. - // Now, load in the devcontainer-features.json from the `featureCachePath` and continue merging into the featuresConfig. - - output.write('Attempting to load devcontainer-features.json', LogLevel.Trace); - let newFeaturesSet: FeatureSet | undefined = await loadFeaturesJsonFromDisk(featCachePath, output); - - if (!newFeaturesSet || !newFeaturesSet.features || newFeaturesSet.features.length === 0) { - output.write(`Unable to parse received devcontainer-features.json.`, LogLevel.Error); - // TODO: Should be more fatal? - await cleanupIterationFetchAndMerge(tempTarballPath, output); - continue; - } - output.write(`Done loading FeatureSet ${buildFolderName} into from disk into memory`, LogLevel.Trace); - - // Merge sourceInformation if the remote featureSet provides one. - // Priority is to maintain the values we had calculated previously. - if (newFeaturesSet.sourceInformation) { - newFeaturesSet = { - ...newFeaturesSet, - sourceInformation: { ...newFeaturesSet.sourceInformation, ...sourceInformation }, - }; - } - output.write(`Merged sourceInfomation`, LogLevel.Trace); - - // Add this new feature set to our featuresConfig - featuresConfig.featureSets.push(newFeaturesSet); - // Remember that we've succeeded in fetching this featureSet - buildFoldersCreatedAlready.push(buildFolderName); - - // Clean-up - await cleanupIterationFetchAndMerge(tempTarballPath, output); - output.write(`Succeeded in fetching feature set ${buildFolderName}`, LogLevel.Trace); - } - - // Return updated featuresConfig - return featuresConfig; -} - - -async function askGitHubApiForTarballUri(sourceInformation: GithubSourceInformation, headers: { 'user-agent': string; 'Authorization'?: string; 'Accept'?: string }, output: Log) { +async function askGitHubApiForTarballUri(sourceInformation: GithubSourceInformation, feature: Feature, headers: { 'user-agent': string; 'Authorization'?: string; 'Accept'?: string }, output: Log) { const options = { type: 'GET', url: sourceInformation.apiUri, headers }; + const apiInfo: GitHubApiReleaseInfo = JSON.parse(((await request(options, output)).toString())); if (apiInfo) { - const asset = apiInfo.assets.find(a => a.name === ASSET_NAME); + const asset = + apiInfo.assets.find(a => a.name === `${feature.id}.tgz`) // v2 + || apiInfo.assets.find(a => a.name === V1_ASSET_NAME) // v1 + || undefined; + if (asset && asset.url) { - output.write(`Found url to fetch release artifact ${asset.name}. Asset of size ${asset.size} has been downloaded ${asset.download_count} times and was last updated at ${asset.updated_at}`); + output.write(`Found url to fetch release artifact '${asset.name}'. Asset of size ${asset.size} has been downloaded ${asset.download_count} times and was last updated at ${asset.updated_at}`); return asset.url; } else { output.write('Unable to fetch release artifact URI from GitHub API', LogLevel.Error); @@ -584,7 +319,7 @@ async function askGitHubApiForTarballUri(sourceInformation: GithubSourceInformat return undefined; } -export async function loadFeaturesJson(jsonBuffer: Buffer, output: Log): Promise { +export async function loadFeaturesJson(jsonBuffer: Buffer, filePath: string, output: Log): Promise { if (jsonBuffer.length === 0) { output.write('Parsed featureSet is empty.', LogLevel.Error); return undefined; @@ -595,15 +330,16 @@ export async function loadFeaturesJson(jsonBuffer: Buffer, output: Log): Promise output.write('Parsed featureSet contains no features.', LogLevel.Error); return undefined; } - output.write(`Loaded devcontainer-features.json declares ${featureSet.features.length} features and ${(!!featureSet.sourceInformation) ? 'contains' : 'does not contain'} explicit source info.`, + output.write(`Loaded ${filePath}, which declares ${featureSet.features.length} features and ${(!!featureSet.sourceInformation) ? 'contains' : 'does not contain'} explicit source info.`, LogLevel.Trace); return updateFromOldProperties(featureSet); } export async function loadFeaturesJsonFromDisk(pathToDirectory: string, output: Log): Promise { - const jsonBuffer: Buffer = await readLocalFile(path.join(pathToDirectory, 'devcontainer-features.json')); - return loadFeaturesJson(jsonBuffer, output); + const filePath = path.join(pathToDirectory, 'devcontainer-features.json'); + const jsonBuffer: Buffer = await readLocalFile(filePath); + return loadFeaturesJson(jsonBuffer, filePath, output); } function updateFromOldProperties(original: T): T { @@ -638,11 +374,23 @@ function updateFromOldProperties Promise<{ definition?: string; version?: string }>, getLocalFolder: (d: string) => string) { +export async function generateFeaturesConfig(params: { extensionPath: string; cwd: string, output: Log; env: NodeJS.ProcessEnv }, dstFolder: string, config: DevContainerConfig, imageLabelDetails: () => Promise<{ definition?: string; version?: string }>, getLocalFolder: (d: string) => string) { const { output } = params; - const userDeclaredFeatures = config.features; - if (!userDeclaredFeatures || !Object.keys(userDeclaredFeatures).length) { + if (!config.features) + { + return undefined; + } + + if(!imageLabelDetails) + { + return undefined; + } + + // Converts from object notation to array notation. + const userFeatures = convertOldFeatures(params, config); + + if(!userFeatures) { return undefined; } @@ -653,123 +401,447 @@ export async function generateFeaturesConfig(params: { extensionPath: string; ou dstFolder }; + // load local cache of features; + // TODO: Update so that cached features are always version 2 let locallyCachedFeatureSet = await loadFeaturesJsonFromDisk(getLocalFolder(params.extensionPath), output); // TODO: Pass dist folder instead to also work with the devcontainer.json support package. if (!locallyCachedFeatureSet) { output.write('Failed to load locally cached features', LogLevel.Error); return undefined; } - // Add in the locally cached features - locallyCachedFeatureSet = { - ...locallyCachedFeatureSet, - sourceInformation: { 'type': 'local-cache' }, - }; + // Read features and get the type. + output.write('--- Processing User Features ----', LogLevel.Trace); + featuresConfig = await processUserFeatures(params.output, userFeatures, featuresConfig); - // Push feature set to FeaturesConfig - featuresConfig.featureSets.push(locallyCachedFeatureSet); + // Fetch features and get version information + output.write('--- Fetching User Features ----', LogLevel.Trace); + await fetchFeatures(params, featuresConfig, locallyCachedFeatureSet, dstFolder); - // Parse, fetch, and merge information on remote features (if any). - // TODO: right now if anything fails in this method and we return `undefined`, we fallback to just the prior state of featureConfig (locally cached only). Is that what we want?? - featuresConfig = await fetchAndMergeRemoteFeaturesAsync(params, featuresConfig, config) ?? featuresConfig; + const ordererdFeatures = computeFeatureInstallationOrder(config, featuresConfig.featureSets); - // Run filtering and include user options into config. - featuresConfig = await doReadUserDeclaredFeatures(params, config, featuresConfig, imageLabelDetails); - if (featuresConfig.featureSets.every(set => - set.features.every(feature => feature.value === false))) { - return undefined; + output.write('--- Computed order ----', LogLevel.Trace); + for (const feature of ordererdFeatures) { + output.write(`${feature.features[0].id}`, LogLevel.Trace); } + featuresConfig.featureSets = ordererdFeatures; + return featuresConfig; } -const getUniqueFeatureId = (id: string, srcInfo: SourceInformation) => `${id}-${getSourceInfoString(srcInfo)}`; +// Convert features from object syntax to array syntax +function convertOldFeatures(params: { output: Log }, config: DevContainerConfig): DevContainerFeature[] | undefined +{ + params.output.write(''); -// Given an existing featuresConfig, parse the user's features as they declared them in their devcontainer. -export async function doReadUserDeclaredFeatures(params: { output: Log }, config: DevContainerConfig, featuresConfig: FeaturesConfig, imageLabelDetails: () => Promise<{ definition?: string; version?: string }>) { + let userFeatures: DevContainerFeature[] = []; + if (Array.isArray(config.features)) + { + userFeatures = config.features; + } + else { + if (!config.features || !Object.keys(config.features).length) { + return undefined; + } - const { output } = params; - const { definition, version } = await imageLabelDetails(); - - // Map user's declared features to its appropriate feature-set feature. - let configFeatures = config.features || {}; - let userValues: Record> = {}; - for (const feat of Object.keys(configFeatures)) { - const { id, sourceInformation } = parseFeatureIdentifier(feat, output) ?? {}; - if (id && sourceInformation) { - const uniqueId = getUniqueFeatureId(id, sourceInformation); - userValues[uniqueId] = configFeatures[feat]; - } else { - output.write(`Failed to read user declared feature ${feat}. Skipping.`, LogLevel.Error); - continue; + for (const userFeatureKey of Object.keys(config.features)) { + const userFeatureValue = config.features[userFeatureKey]; + if(userFeatureKey) + { + let feature : DevContainerFeature = { + id: userFeatureKey, + options: userFeatureValue + }; + userFeatures.push(feature); + } } } - const included = {} as Record; - for (const featureSet of featuresConfig.featureSets) { - for (const feature of featureSet.features) { - updateFeature(feature); // REMOVEME: Temporary migration. - const uniqueFeatureId = getUniqueFeatureId(feature.id, featureSet.sourceInformation); - - // Compare the feature to the base definition. - if (definition && (feature.exclude || []).some(e => matches(e, definition, version))) { - // The feature explicitly excludes the detected base definition - feature.included = false; - } else if ('include' in feature) { - // The feature explicitly includes one or more base definitions - // Set the included flag to true IFF we have detected a base definition, and its in the feature's list of includes - feature.included = !!definition && (feature.include || []).some(e => matches(e, definition, version)); - } else { - // The feature doesn't define any base definitions to "include" or "exclude" in which we can filter on. - // By default, include it. - feature.included = true; + return userFeatures; +} + +// Process features contained in devcontainer.json +// Creates one feature set per feature to aid in support of the previous structure. +async function processUserFeatures(output: Log, userFeatures: DevContainerFeature[], featuresConfig: FeaturesConfig) : Promise +{ + userFeatures.forEach(userFeature => { + const newFeatureSet = parseFeatureIdentifier(output, userFeature); + if(newFeatureSet) { + featuresConfig.featureSets.push(newFeatureSet); } + } + ); + return featuresConfig; +} + +export function parseFeatureIdentifier(output: Log, userFeature: DevContainerFeature) : FeatureSet | undefined { + // A identifier takes this form: + // (0) + // (1) //@version + // (2) https://<../URI/..>/devcontainer-features.tgz# + // (3) ./# -or- ../# -or- /# + // + // (0) This is a locally cached feature. + // + // (1) Our "registry" is backed by GitHub public repositories (or repos visible with the environment's GITHUB_TOKEN). + // Say organization 'octocat' has a repo titled 'myfeatures' with a set of feature definitions. + // One of the [1..n] features in this repo has an id of 'helloworld'. + // + // eg: octocat/myfeatures/helloworld + // + // The above example assumes the 'latest' GitHub release, and internally will + // fetch the devcontainer-features.tgz artifact from that release. + // To specify a certain release tag, append the tag with an @ symbol + // + // eg: octocat/myfeatures/helloworld@v0.0.2 + // + // (2) A fully-qualified https URI to a devcontainer-features.tgz file can be provided instead + // of a using the GitHub registry "shorthand". Note this is identified by a + // s.StartsWith("https://" || "http://"). + // + // eg: https://example.com/../../devcontainer-features.tgz#helloworld + // + // (3) This is a local path to a directory on disk following the expected file convention + // The path can either be: + // - a relative file path to the .devcontainer file (prepended by a ./ or ../) + // - an absolute file path (prepended by a /) + // + // No version can be provided, as the directory is copied 'as is' and is inherently taking the 'latest' + + output.write(`* Processing feature: ${userFeature.id}`); + + // cached feature + if (!userFeature.id.includes('/') && !userFeature.id.includes('\\')) { + output.write(`Cached feature found.`); + + let feat: Feature = { + id: userFeature.id, + name: userFeature.id, + value: userFeature.options, + included: true, + } - // Mark feature as with its state of inclusion - included[uniqueFeatureId] = included[uniqueFeatureId] || feature.included; + let newFeaturesSet: FeatureSet = { + sourceInformation: { + type: 'local-cache', + }, + features: [feat], + }; + + return newFeaturesSet; + } - // Set the user-defined values from the user's devcontainer onto the feature config. - feature.value = userValues[uniqueFeatureId] || false; + // remote tar file + if (userFeature.id.startsWith('http://') || userFeature.id.startsWith('https://')) { + output.write(`Remote tar file found.`); + let input = userFeature.id.replace(/\/+$/, ''); + const featureIdDelimiter = input.lastIndexOf('#'); + const id = input.substring(featureIdDelimiter + 1); + + if (id === '' || !allowedFeatureIdRegex.test(id)) { + output.write(`Parse error. Specify a feature id with alphanumeric, dash, or underscore characters. Provided: ${id}.`, LogLevel.Error); + return undefined; + } + + const tarballUri = new URL.URL(input.substring(0, featureIdDelimiter)).toString(); + let feat: Feature = { + id: id, + name: userFeature.id, + value: userFeature.options, + included: true, } + + let newFeaturesSet: FeatureSet = { + sourceInformation: { + type: 'direct-tarball', + tarballUri: tarballUri + }, + features: [feat], + }; + + return newFeaturesSet; + } + + // local disk + const userFeaturePath = path.parse(userFeature.id); + // If its a valid path + if (userFeature.id.startsWith('./') || userFeature.id.startsWith('../') || (userFeaturePath && path.isAbsolute(userFeature.id))) { + //if (userFeaturePath && ((path.isAbsolute(userFeature.id) && existsSync(userFeature.id)) || !path.isAbsolute(userFeature.id))) { + output.write(`Local disk feature.`); + const filePath = userFeature.id; + const id = userFeaturePath.name; + const isRelative = !path.isAbsolute(userFeature.id); + + let feat: Feature = { + id: id, + name: userFeature.id, + value: userFeature.options, + included: true, + } + + let newFeaturesSet: FeatureSet = { + sourceInformation: { + type: 'file-path', + filePath, + isRelative: isRelative + }, + features: [feat], + }; + + return newFeaturesSet; + } + + output.write(`Github feature.`); + // Github repository source. + let version = 'latest'; + let splitOnAt = userFeature.id.split('@'); + if (splitOnAt.length > 2) { + output.write(`Parse error. Use the '@' symbol only to designate a version tag.`, LogLevel.Error); + return undefined; + } + if (splitOnAt.length === 2) { + output.write(`[${userFeature.id}] has version ${splitOnAt[1]}`, LogLevel.Trace); + version = splitOnAt[1]; } - params.output.write('Feature configuration:\n' + JSON.stringify({ ...featuresConfig, imageDetails: undefined }, undefined, ' '), LogLevel.Trace); - // Filter + // Remaining info must be in the first part of the split. + const featureBlob = splitOnAt[0]; + const splitOnSlash = featureBlob.split('/'); + // We expect all GitHub/registry features to follow the triple slash pattern at this point + // eg: // + if (splitOnSlash.length !== 3 || splitOnSlash.some(x => x === '') || !allowedFeatureIdRegex.test(splitOnSlash[2])) { + output.write(`Invalid parse for GitHub/registry feature identifier. Follow format: '//'`, LogLevel.Error); + return undefined; + } + const owner = splitOnSlash[0]; + const repo = splitOnSlash[1]; + const id = splitOnSlash[2]; + + let feat: Feature = { + id: id, + name: userFeature.id, + value: userFeature.options, + included: true, + }; + + if (version === 'latest') { + let newFeaturesSet: FeatureSet = { + sourceInformation: { + type: 'github-repo', + apiUri: `https://api.github.com/repos/${owner}/${repo}/releases/latest`, + unauthenticatedUri: `https://github.com/${owner}/${repo}/releases/latest/download`, // v1/v2 implementations append name of relevant asset + owner, + repo, + isLatest: true + }, + features: [feat], + }; + return newFeaturesSet; + } else { + // We must have a tag, return a tarball URI for the tagged version. + let newFeaturesSet: FeatureSet = { + sourceInformation: { + type: 'github-repo', + apiUri: `https://api.github.com/repos/${owner}/${repo}/releases/tags/${version}`, + unauthenticatedUri: `https://github.com/${owner}/${repo}/releases/download/${version}`, // v1/v2 implementations append name of relevant asset + owner, + repo, + tag: version, + isLatest: false + }, + features: [feat], + }; + return newFeaturesSet; + } +} + +async function fetchFeatures(params: { extensionPath: string; cwd: string; output: Log; env: NodeJS.ProcessEnv }, featuresConfig: FeaturesConfig, localFeatures: FeatureSet, dstFolder: string) { for (const featureSet of featuresConfig.featureSets) { - featureSet.features = featureSet.features.filter(feature => { - const uniqueFeatureId = getUniqueFeatureId(feature.id, featureSet.sourceInformation); - // Ensure we are not including duplicate features. - // Note: Takes first feature even if f.included == false. - if (uniqueFeatureId in included && feature.included === included[uniqueFeatureId]) { // TODO: This logic should be revisited. - delete included[feature.id]; - return true; + try { + if (!featureSet || !featureSet.features || !featureSet.sourceInformation) + { + continue; } - return false; - }); + + if (!localFeatures) { + continue; + } + + const feature = featureSet.features[0]; + const consecutiveId = feature.id + '_' + getCounter(); + // Calculate some predictable caching paths. + const featCachePath = path.join(dstFolder, consecutiveId); + const sourceInfoType = featureSet.sourceInformation?.type; + + feature.infoString = featCachePath; + feature.consecutiveId = consecutiveId; + + const featureDebugId = `${feature.consecutiveId}_${sourceInfoType}`; + params.output.write(`* Fetching feature: ${featureDebugId}`); + + if (sourceInfoType === 'local-cache') { + // create copy of the local features to set the environment variables for them. + await mkdirpLocal(featCachePath); + await cpDirectoryLocal(path.join(dstFolder, 'local-cache'), featCachePath); + + await parseDevContainerFeature(featureSet, feature, featCachePath); + + if (featureSet.internalVersion !== '2') { + const local = localFeatures.features.find(x => x.id === feature.id); + feature.buildArg = local?.buildArg; + feature.options = local?.options; + } + continue; + } + + if (sourceInfoType === 'file-path') { + params.output.write(`Detected local file path`, LogLevel.Trace); + + const executionPath = featureSet.sourceInformation.isRelative ? path.join(params.cwd, featureSet.sourceInformation.filePath) : featureSet.sourceInformation.filePath; + + await parseDevContainerFeature(featureSet, feature, executionPath); + await mkdirpLocal(featCachePath); + await cpDirectoryLocal(executionPath, featCachePath); + continue; + } + + params.output.write(`Detected tarball`, LogLevel.Trace); + const headers = getRequestHeaders(featureSet.sourceInformation, params.env, params.output); + + // Ordered list of tarballUris to attempt to fetch from. + let tarballUris: string[] = []; + + if (sourceInfoType === 'github-repo') { + params.output.write('Determining tarball URI for provided github repo.', LogLevel.Trace); + if (headers.Authorization && headers.Authorization !== '') { + params.output.write('GITHUB_TOKEN available. Attempting to fetch via GH API.', LogLevel.Info); + const authenticatedGithubTarballUri = await askGitHubApiForTarballUri(featureSet.sourceInformation, feature, headers, params.output); + + if (authenticatedGithubTarballUri) { + tarballUris.push(authenticatedGithubTarballUri); + } else { + params.output.write('Failed to generate autenticated tarball URI for provided feature, despite a GitHub token present', LogLevel.Warning); + } + headers.Accept = 'Accept: application/octet-stream'; + } + + // Always add the unauthenticated URIs as fallback options. + params.output.write('Appending unauthenticated URIs for v2 and then v1', LogLevel.Trace); + tarballUris.push(`${featureSet.sourceInformation.unauthenticatedUri}/${feature.id}.tgz`); + tarballUris.push(`${featureSet.sourceInformation.unauthenticatedUri}/${V1_ASSET_NAME}`); + + } else { + // We have a plain ol' tarball URI, since we aren't in the github-repo case. + tarballUris.push(featureSet.sourceInformation.tarballUri); + } + + // Attempt to fetch from 'tarballUris' in order, until one succeeds. + let didSucceed: boolean = false; + for (const tarballUri of tarballUris) { + didSucceed = await fetchContentsAtTarballUri(tarballUri, featCachePath, headers, dstFolder, params.output); + + if (didSucceed) { + params.output.write(`Succeeded fetching ${tarballUri}`, LogLevel.Trace); + await parseDevContainerFeature(featureSet, feature, featCachePath); + break; + } + } + + if (!didSucceed) { + const msg = `(!) Failed to fetch tarball for ${featureDebugId} after attempting ${tarballUris.length} possibilities.`; + params.output.write(msg, LogLevel.Error); + throw new Error(msg); + } + } + catch (e) { + params.output.write(`(!) ERR: Failed to fetch feature: ${e?.Message ?? ''} `, LogLevel.Error); + // TODO: Should this be more fatal? + } } - return featuresConfig; } -function updateFeature(feature: Feature & { type?: 'option' | 'choice'; values?: string[]; customValues?: boolean; hint?: string }) { - // Update old to new properties for temporary compatiblity. - if (feature.values) { - const options = feature.options || (feature.options = {}); - options.version = { - type: 'string', - [feature.customValues ? 'proposals' : 'enum']: feature.values, - default: feature.values[0], - description: feature.hint, +async function fetchContentsAtTarballUri(tarballUri: string, featCachePath: string, headers: { 'user-agent': string; 'Authorization'?: string; 'Accept'?: string }, dstFolder: string, output: Log): Promise { + const tempTarballPath = path.join(dstFolder, 'temp.tgz'); + try { + const options = { + type: 'GET', + url: tarballUri, + headers + }; + output.write(`Fetching tarball at ${options.url}`); + output.write(`Headers: ${JSON.stringify(options)}`, LogLevel.Trace); + const tarball = await request(options, output); + + if (!tarball || tarball.length === 0) { + output.write(`Did not receive a response from tarball download URI: ${tarballUri}`, LogLevel.Trace); + return false; + } + + // Filter what gets emitted from the tar.extract(). + const filter = (file: string, _: tar.FileStat) => { + // Don't include .dotfiles or the archive itself. + if (file.startsWith('./.') || file === `./${V1_ASSET_NAME}` || file === './.') { + return false; + } + return true; }; + + output.write(`Preparing to unarchive received tgz from ${tempTarballPath} -> ${featCachePath}.`, LogLevel.Trace); + // Create the directory to cache this feature-set in. + await mkdirpLocal(featCachePath); + await writeLocalFile(tempTarballPath, tarball); + await tar.x( + { + file: tempTarballPath, + cwd: featCachePath, + filter + } + ); + + await cleanupIterationFetchAndMerge(tempTarballPath, output); + + return true; + } catch (e) { + output.write(`Caught failure when fetching from URI '${tarballUri}': ${e}`, LogLevel.Trace); + await cleanupIterationFetchAndMerge(tempTarballPath, output); + return false; } - delete feature.type; - delete feature.values; - delete feature.customValues; - delete feature.hint; } -function matches(spec: string, definition: string, version: string | undefined) { - const i = spec.indexOf('@'); - const [specDefinition, specVersion] = i !== -1 ? [spec.slice(0, i), spec.slice(i + 1)] : [spec, undefined]; - return definition === specDefinition && (!specVersion || !version || semver.satisfies(version, specVersion)); + +async function parseDevContainerFeature(featureSet: FeatureSet, feature: Feature, featCachePath: string) { + // Read version information. + const jsonPath = path.join(featCachePath, 'devcontainer-feature.json'); + const innerPath = path.join(featCachePath, feature.id); + const innerJsonPath = path.join(innerPath, 'devcontainer-feature.json'); + + let foundPath: string | undefined; + + if (existsSync(jsonPath)) { + foundPath = jsonPath; + } else if (existsSync(innerJsonPath)) { + foundPath = innerJsonPath; + feature.infoString = innerPath; + } + + if (foundPath) { + const jsonString: Buffer = await readLocalFile(foundPath); + const featureJson = jsonc.parse(jsonString.toString()); + feature.runApp = ''; + feature.runParams = 'install.sh'; + if (featureJson.install) { + feature.runApp = featureJson.install!.app ?? ''; + feature.runParams = featureJson.install!.file ?? 'install.sh'; + } + + feature.containerEnv = featureJson.containerEnv; + featureSet.internalVersion = '2'; + feature.buildArg = featureJson.buildArg; + feature.options = featureJson.options; + feature.installAfter = featureJson.installAfter; + } else { + featureSet.internalVersion = '1'; + } } export function getFeatureMainProperty(feature: Feature) { diff --git a/src/spec-configuration/containerFeaturesOrder.ts b/src/spec-configuration/containerFeaturesOrder.ts new file mode 100644 index 000000000..d143d5dd7 --- /dev/null +++ b/src/spec-configuration/containerFeaturesOrder.ts @@ -0,0 +1,111 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + + +import { ContainerError } from '../spec-common/errors'; +import { FeatureSet } from '../spec-configuration/containerFeaturesConfiguration'; +import { DevContainerConfig } from './configuration'; + +interface FeatureNode { + feature: FeatureSet; + before: Set; + after: Set; +} + +export function computeFeatureInstallationOrder(config: DevContainerConfig, features: FeatureSet[]) { + + if (config.overrideFeatureInstallOrder) { + return computeOverrideInstallationOrder(config, features); + } + else { + return computeInstallationOrder(features); + } +} + +// Exported for unit tests. +export function computeOverrideInstallationOrder(config: DevContainerConfig, features: FeatureSet[]) { + // Starts with the automatic installation order. + const automaticOrder = computeInstallationOrder(features); + + // Moves to the beginning the features that are explicitly configured. + const orderedFeatures = []; + for (const featureId of config.overrideFeatureInstallOrder!) { + const feature = automaticOrder.find(feature => feature.features[0].name === featureId); + if (!feature) { + throw new ContainerError({ description: `Feature ${featureId} not found` }); + } + orderedFeatures.push(feature); + features.splice(features.indexOf(feature), 1); + } + + return orderedFeatures.concat(features); +} + +// Exported for unit tests. +export function computeInstallationOrder(features: FeatureSet[]) { + const nodesById = features.map(feature => ({ + feature, + before: new Set(), + after: new Set(), + })).reduce((map, feature) => map.set(feature.feature.features[0].id, feature), new Map()); + + const nodes = [...nodesById.values()]; + for (const later of nodes) { + for (const firstId of later.feature.features[0].installAfter || []) { + const first = nodesById.get(firstId); + // soft dependencies + if (first) { + later.after.add(first); + first.before.add(later); + } + } + } + + const { roots, islands } = nodes.reduce((prev, node) => { + if (node.after.size === 0) { + if (node.before.size === 0) { + prev.islands.push(node); + } else { + prev.roots.push(node); + } + } + return prev; + }, { roots: [] as FeatureNode[], islands: [] as FeatureNode[] }); + + const orderedFeatures = []; + let current = roots; + while (current.length) { + const next = []; + for (const first of current) { + for (const later of first.before) { + later.after.delete(first); + if (later.after.size === 0) { + next.push(later); + } + } + } + orderedFeatures.push( + ...current.map(node => node.feature) + .sort((a, b) => a.features[0].id.localeCompare(b.features[0].id)) // stable order + ); + current = next; + } + + orderedFeatures.push( + ...islands.map(node => node.feature) + .sort((a, b) => a.features[0].id.localeCompare(b.features[0].id)) // stable order + ); + + const missing = new Set(nodesById.keys()); + for (const feature of orderedFeatures) { + missing.delete(feature.features[0].id); + } + + if (missing.size !== 0) { + throw new ContainerError({ description: `Features declare cyclic dependency: ${[...missing].join(', ')}` }); + } + + return orderedFeatures; +} \ No newline at end of file diff --git a/src/spec-configuration/tsconfig.json b/src/spec-configuration/tsconfig.json index eff319378..2b22c3542 100644 --- a/src/spec-configuration/tsconfig.json +++ b/src/spec-configuration/tsconfig.json @@ -3,6 +3,9 @@ "references": [ { "path": "../spec-utils" + }, + { + "path": "../spec-common" } ] } \ No newline at end of file diff --git a/src/spec-node/containerFeatures.ts b/src/spec-node/containerFeatures.ts index d1d53a211..b8dac0eee 100644 --- a/src/spec-node/containerFeatures.ts +++ b/src/spec-node/containerFeatures.ts @@ -98,11 +98,19 @@ export async function extendImage(params: DockerResolverParameters, config: DevC export async function getExtendImageBuildInfo(params: DockerResolverParameters, config: DevContainerConfig, baseName: string, imageUser: string, imageLabelDetails: () => Promise<{ definition: string | undefined; version: string | undefined }>) { - const featuresConfig = await generateFeaturesConfig(params.common, (await createFeaturesTempFolder(params.common)), config, imageLabelDetails, getContainerFeaturesFolder); + // Creates the folder where the working files will be setup. + const tempFolder = await createFeaturesTempFolder(params.common); + + // Extracts the local cache of features. + await createLocalFeatures(params, tempFolder); + + // Processes the user's configuration. + const featuresConfig = await generateFeaturesConfig(params.common, tempFolder, config, imageLabelDetails, getContainerFeaturesFolder); if (!featuresConfig) { return null; } + // Generates the end configuration. const collapsedFeaturesConfig = collapseFeaturesConfig(featuresConfig); const featureBuildInfo = await getContainerFeaturesBuildInfo(params, featuresConfig, baseName, imageUser); if (!featureBuildInfo) { @@ -116,27 +124,26 @@ export async function getExtendImageBuildInfo(params: DockerResolverParameters, export function generateContainerEnvs(featuresConfig: FeaturesConfig) { let result = ''; for (const fSet of featuresConfig.featureSets) { - result += fSet.features - .filter(f => (includeAllConfiguredFeatures || f.included) && f.value) - .reduce((envs, f) => envs.concat(Object.keys(f.containerEnv || {}) - .map(k => `ENV ${k}=${f.containerEnv![k]}`)), [] as string[]) - .join('\n'); + // We only need to generate this ENV references for the initial features specification. + if(fSet.internalVersion !== '2') + { + result += fSet.features + .filter(f => (includeAllConfiguredFeatures || f.included) && f.value) + .reduce((envs, f) => envs.concat(Object.keys(f.containerEnv || {}) + .map(k => `ENV ${k}=${f.containerEnv![k]}`)), [] as string[]) + .join('\n'); + } } return result; } -async function getContainerFeaturesBuildInfo(params: DockerResolverParameters, featuresConfig: FeaturesConfig, baseName: string, imageUser: string): Promise<{ dstFolder: string; dockerfileContent: string; dockerfilePrefixContent: string; buildArgs: Record; buildKitContexts: Record } | null> { +async function createLocalFeatures(params: DockerResolverParameters, dstFolder: string) +{ const { common } = params; const { cliHost, output } = common; - const { dstFolder } = featuresConfig; - - if (!dstFolder || dstFolder === '') { - output.write('dstFolder is undefined or empty in addContainerFeatures', LogLevel.Error); - return null; - } - // Calculate name of the build folder where localcache has been copied to. - const localCacheBuildFolderName = getSourceInfoString({ type: 'local-cache' }); + // Name of the local cache folder inside the working directory + const localCacheBuildFolderName = 'local-cache'; const srcFolder = getContainerFeaturesFolder(common.extensionPath); output.write(`local container features stored at: ${srcFolder}`); @@ -170,6 +177,17 @@ async function getContainerFeaturesBuildInfo(params: DockerResolverParameters, f create.pipe(extract.stdin); await extract.exit; await createExit; // Allow errors to surface. +} + +async function getContainerFeaturesBuildInfo(params: DockerResolverParameters, featuresConfig: FeaturesConfig, baseName: string, imageUser: string): Promise<{ dstFolder: string; dockerfileContent: string; dockerfilePrefixContent: string; buildArgs: Record; buildKitContexts: Record } | null> { + const { common } = params; + const { cliHost, output } = common; + const { dstFolder } = featuresConfig; + + if (!dstFolder || dstFolder === '') { + output.write('dstFolder is undefined or empty in addContainerFeatures', LogLevel.Error); + return null; + } const buildStageScripts = await Promise.all(featuresConfig.featureSets .map(featureSet => multiStageBuildExploration ? featureSet.features @@ -212,28 +230,43 @@ ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder `; // Build devcontainer-features.env file(s) for each features source folder - await Promise.all([...featuresConfig.featureSets].map(async (featureSet, i) => { - const featuresEnv = ([] as string[]).concat( - ...featureSet.features - .filter(f => (includeAllConfiguredFeatures || f.included) && f.value && !buildStageScripts[i][f.id]?.hasAcquire) - .map(getFeatureEnvVariables) - ).join('\n'); - const envPath = cliHost.path.join(dstFolder, getSourceInfoString(featureSet.sourceInformation), 'devcontainer-features.env'); // next to install.sh - await Promise.all([ - cliHost.writeFile(envPath, Buffer.from(featuresEnv)), - ...featureSet.features + for await (const fSet of featuresConfig.featureSets) { + let i = 0; + if(fSet.internalVersion === '2') + { + for await (const fe of fSet.features) { + if (fe.infoString) + { + fe.internalVersion = '2'; + const envPath = cliHost.path.join(fe.infoString, 'devcontainer-features.env'); + const variables = getFeatureEnvVariables(fe); + await cliHost.writeFile(envPath, Buffer.from(variables.join('\n'))); + } + } + } else { + const featuresEnv = ([] as string[]).concat( + ...fSet.features + .filter(f => (includeAllConfiguredFeatures|| f.included) && f.value && !buildStageScripts[i][f.id]?.hasAcquire) + .map(getFeatureEnvVariables) + ).join('\n'); + const envPath = cliHost.path.join(fSet.features[0].infoString!, 'devcontainer-features.env'); + await Promise.all([ + cliHost.writeFile(envPath, Buffer.from(featuresEnv)), + ...fSet.features .filter(f => (includeAllConfiguredFeatures || f.included) && f.value && buildStageScripts[i][f.id]?.hasAcquire) .map(f => { - const featuresEnv = [ - ...getFeatureEnvVariables(f), - `_BUILD_ARG_${getSafeId(f.id)}_TARGETPATH=${path.posix.join('/usr/local/devcontainer-features', getSourceInfoString(featureSet.sourceInformation), f.id)}` - ] - .join('\n'); - const envPath = cliHost.path.join(dstFolder, getSourceInfoString(featureSet.sourceInformation), 'features', f.id, 'devcontainer-features.env'); // next to bin/acquire - return cliHost.writeFile(envPath, Buffer.from(featuresEnv)); - }) - ]); - })); + const featuresEnv = [ + ...getFeatureEnvVariables(f), + `_BUILD_ARG_${getSafeId(f.id)}_TARGETPATH=${path.posix.join('/usr/local/devcontainer-features', getSourceInfoString(fSet.sourceInformation), f.id)}` + ] + .join('\n'); + const envPath = cliHost.path.join(dstFolder, getSourceInfoString(fSet.sourceInformation), 'features', f.id, 'devcontainer-features.env'); // next to bin/acquire + return cliHost.writeFile(envPath, Buffer.from(featuresEnv)); + }) + ]); + } + i++; + } // For non-BuildKit, build the temporary image for the container-features content if (!useBuildKitBuildContexts) { @@ -302,15 +335,28 @@ function getFeatureEnvVariables(f: Feature) { const values = getFeatureValueObject(f); const idSafe = getSafeId(f.id); const variables = []; - if (values) { - variables.push(...Object.keys(values) - .map(name => `_BUILD_ARG_${idSafe}_${getSafeId(name)}="${values[name]}"`)); - variables.push(`_BUILD_ARG_${idSafe}=true`); - } - if (f.buildArg) { - variables.push(`${f.buildArg}=${getFeatureMainValue(f)}`); - } - return variables; + + if(f.internalVersion !== '2') + { + if (values) { + variables.push(...Object.keys(values) + .map(name => `_BUILD_ARG_${idSafe}_${getSafeId(name)}="${values[name]}"`)); + variables.push(`_BUILD_ARG_${idSafe}=true`); + } + if (f.buildArg) { + variables.push(`${f.buildArg}=${getFeatureMainValue(f)}`); + } + return variables; + } else { + if (values) { + variables.push(...Object.keys(values) + .map(name => `${getSafeId(name)}="${values[name]}"`)); + } + if (f.buildArg) { + variables.push(`${f.buildArg}=${getFeatureMainValue(f)}`); + } + return variables; + } } diff --git a/src/spec-node/devContainersSpecCLI.ts b/src/spec-node/devContainersSpecCLI.ts index 26acbbddb..fd4618b4e 100644 --- a/src/spec-node/devContainersSpecCLI.ts +++ b/src/spec-node/devContainersSpecCLI.ts @@ -638,7 +638,7 @@ async function readConfiguration({ if (!configs) { throw new ContainerError({ description: `Dev container config (${uriToFsPath(configFile || getDefaultDevContainerConfigPath(cliHost, workspace!.configFolderPath), cliHost.platform)}) not found.` }); } - const featuresConfiguration = includeFeaturesConfig ? await generateFeaturesConfig({ extensionPath, output, env: cliHost.env }, (await createFeaturesTempFolder({ cliHost, package: pkg })), configs.config, async () => /* TODO: ? (await imageDetails()).Config.Labels || */({}), getContainerFeaturesFolder) : undefined; + const featuresConfiguration = includeFeaturesConfig ? await generateFeaturesConfig({ extensionPath, cwd: process.cwd(), output, env: cliHost.env }, (await createFeaturesTempFolder({ cliHost, package: pkg })), configs.config, async () => /* TODO: ? (await imageDetails()).Config.Labels || */ ({}), getContainerFeaturesFolder) : undefined; await new Promise((resolve, reject) => { process.stdout.write(JSON.stringify({ configuration: configs.config, diff --git a/src/spec-utils/pfs.ts b/src/spec-utils/pfs.ts index 9fd36ebc4..1e42a927c 100644 --- a/src/spec-utils/pfs.ts +++ b/src/spec-utils/pfs.ts @@ -6,6 +6,7 @@ import * as fs from 'fs'; import { promisify } from 'util'; import * as path from 'path'; +import * as ncp from 'ncp'; import { URI } from 'vscode-uri'; @@ -27,6 +28,7 @@ export const mkdirpLocal = (path: string) => new Promise((res, rej) => fs. export const rmdirLocal = promisify(fs.rmdir); export const rmLocal = promisify(fs.rm); export const cpLocal = promisify(fs.copyFile); +export const cpDirectoryLocal = promisify(ncp.ncp); export interface FileHost { platform: NodeJS.Platform; diff --git a/src/test/container-features/containerFeaturesOrder.offline.test.ts b/src/test/container-features/containerFeaturesOrder.offline.test.ts new file mode 100644 index 000000000..9e99f7dda --- /dev/null +++ b/src/test/container-features/containerFeaturesOrder.offline.test.ts @@ -0,0 +1,113 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import * as assert from 'assert'; +import { ContainerError } from '../../spec-common/errors'; +import { FeatureSet } from '../../spec-configuration/containerFeaturesConfiguration'; +import { computeInstallationOrder, computeOverrideInstallationOrder } from '../../spec-configuration/containerFeaturesOrder'; +import { URI } from 'vscode-uri'; + +describe('Container features install order', () => { + + it('has stable order among independent features', () => { + assert.deepEqual( + computeInstallationOrder([ + installAfter('C'), + installAfter('A'), + installAfter('B'), + ]).map(f => f.features[0].id), + ['A', 'B', 'C'] + ); + }); + + it('orders "installAfter" first in breadth-first order (tree)', () => { + assert.deepEqual( + computeInstallationOrder([ + installAfter('A', 'B'), + installAfter('B', 'C'), + installAfter('C'), + installAfter('D', 'E'), + installAfter('E', 'C'), + ]).map(f => f.features[0].id), + ['C', 'B', 'E', 'A', 'D'] + ); + }); + + it('orders "installAfter" first in breadth-first order (DAG)', () => { + assert.deepEqual( + computeInstallationOrder([ + installAfter('A', 'B', 'C'), + installAfter('B', 'C'), + installAfter('C'), + installAfter('D', 'C'), + ]).map(f => f.features[0].id), + ['C', 'B', 'D', 'A'] + ); + }); + + it('treats "installAfter" is a soft dependency', () => { + assert.deepEqual( + computeInstallationOrder([ + installAfter('A', 'B', 'C'), + installAfter('B'), + ]).map(f => f.features[0].id), + ['B', 'A'] + ); + }); + + it('orders independent features last', () => { + assert.deepEqual( + computeInstallationOrder([ + installAfter('A'), + installAfter('B', 'C'), + installAfter('C'), + ]).map(f => f.features[0].id), + ['C', 'B', 'A'] + ); + }); + + it('detects cycles', () => { + try { + computeInstallationOrder([ + installAfter('A', 'B'), + installAfter('B'), + installAfter('C', 'D'), + installAfter('D', 'C'), + ]); + assert.fail('Cyclic dependency not detected.'); + } catch (err) { + assert.ok(err instanceof ContainerError); + assert.ok(err.message.indexOf('cyclic')); + } + }); + + it('respects OverrideConfig', () => { + assert.deepEqual( + computeOverrideInstallationOrder( + { image: 'ubuntu', configFilePath: URI.from({ 'scheme': 'https' }), overrideFeatureInstallOrder: ['A', 'B', 'C'] }, + [ + installAfter('A', 'C'), + installAfter('B', 'C'), + installAfter('C', 'D'), + ]).map(f => f.features[0].id), + ['A', 'B', 'C'] + ); + }); + + function installAfter(id: string, ...installAfter: string[]): FeatureSet { + return { + sourceInformation: { + type: 'local-cache', + }, + features: [{ + id, + name: id, + installAfter, + value: true, + included: true, + }], + }; + } +}); \ No newline at end of file diff --git a/src/test/container-features/generateFeaturesConfig.offline.test.ts b/src/test/container-features/generateFeaturesConfig.offline.test.ts index 74e6e9d09..605e18d1c 100644 --- a/src/test/container-features/generateFeaturesConfig.offline.test.ts +++ b/src/test/container-features/generateFeaturesConfig.offline.test.ts @@ -14,7 +14,7 @@ describe('validate (offline) generateFeaturesConfig()', function () { // Setup const env = { 'SOME_KEY': 'SOME_VAL' }; - const params = { extensionPath: '', output, env, persistedFolder: '' }; + const params = { extensionPath: '', cwd: '', output, env, persistedFolder: '' }; // Mocha executes with the root of the project as the cwd. const localFeaturesFolder = (_: string) => { @@ -38,15 +38,17 @@ describe('validate (offline) generateFeaturesConfig()', function () { const tmpFolder: string = path.join(getLocalCacheFolder(), 'container-features', `${version}-${Date.now()}`); await mkdirpLocal(tmpFolder); + const config: DevContainerConfig = { configFilePath: URI.from({ 'scheme': 'https' }), dockerFile: '.', features: { first: { - 'version': 'latest', - 'option1': true + 'version': 'latest' + }, + second: { + 'value': true }, - third: 'latest' }, }; @@ -55,22 +57,16 @@ describe('validate (offline) generateFeaturesConfig()', function () { assert.fail(); } - const localFeatureSet = (featuresConfig?.featureSets.find(set => set.sourceInformation.type === 'local-cache')); - assert.exists(localFeatureSet); - assert.strictEqual(localFeatureSet?.features.length, 3); + assert.strictEqual(featuresConfig?.featureSets.length, 2); - const first = localFeatureSet?.features.find((f) => f.id === 'first'); + const first = featuresConfig.featureSets[0].features.find((f) => f.id === 'first'); assert.exists(first); - const second = localFeatureSet?.features.find((f) => f.id === 'second'); + const second = featuresConfig.featureSets[1].features.find((f) => f.id === 'second'); assert.exists(second); - const third = localFeatureSet?.features.find((f) => f.id === 'third'); - assert.exists(third); - assert.isObject(first?.value); - assert.isBoolean(second?.value); - assert.isString(third?.value); + assert.isObject(second?.value); // -- Test containerFeatures.ts helper functions @@ -83,7 +79,11 @@ describe('validate (offline) generateFeaturesConfig()', function () { // getFeatureLayers const actualLayers = await getFeatureLayers(featuresConfig); - const expectedLayers = `RUN cd /tmp/build-features/local-cache \\ + const expectedLayers = `RUN cd /tmp/build-features/first_1 \\ +&& chmod +x ./install.sh \\ +&& ./install.sh + +RUN cd /tmp/build-features/second_2 \\ && chmod +x ./install.sh \\ && ./install.sh @@ -93,4 +93,4 @@ describe('validate (offline) generateFeaturesConfig()', function () { -}); +}); \ No newline at end of file diff --git a/src/test/container-features/helpers.offline.test.ts b/src/test/container-features/helpers.offline.test.ts index 10e50325c..c33d811be 100644 --- a/src/test/container-features/helpers.offline.test.ts +++ b/src/test/container-features/helpers.offline.test.ts @@ -1,4 +1,5 @@ import { assert } from 'chai'; +import { DevContainerFeature } from '../../spec-configuration/configuration'; import { getSourceInfoString, parseFeatureIdentifier, SourceInformation } from '../../spec-configuration/containerFeaturesConfiguration'; import { getSafeId } from '../../spec-node/containerFeatures'; import { createPlainLog, LogLevel, makeLog } from '../../spec-utils/log'; @@ -34,120 +35,194 @@ describe('getIdSafe should return safe environment variable name', function () { describe('validate function parseRemoteFeatureToDownloadUri', function () { - // -- Valid + // // -- Valid it('should parse local features and return an undefined tarballUrl', async function () { - const result = parseFeatureIdentifier('helloworld', output); + const feature: DevContainerFeature = { + id: 'helloworld', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.exists(result); - assert.strictEqual(result?.id, 'helloworld'); + assert.strictEqual(result?.features[0].id, 'helloworld'); assert.strictEqual(result?.sourceInformation.type, 'local-cache'); }); it('should parse gitHub without version', async function () { - const result = parseFeatureIdentifier('octocat/myfeatures/helloworld', output); + const feature: DevContainerFeature = { + id: 'octocat/myfeatures/helloworld', + options: {}, + } + const result = parseFeatureIdentifier(output, feature); assert.exists(result); - assert.strictEqual(result?.id, 'helloworld'); - assert.deepEqual(result?.sourceInformation, { - type: 'github-repo', - owner: 'octocat', - repo: 'myfeatures', - apiUri: 'https://api.github.com/repos/octocat/myfeatures/releases/latest', - unauthenticatedUri: 'https://github.com/octocat/myfeatures/releases/latest/download/devcontainer-features.tgz', - isLatest: true - }); + assert.strictEqual(result?.features[0].id, 'helloworld'); + assert.deepEqual(result?.sourceInformation, { type: 'github-repo', + owner: 'octocat', + repo: 'myfeatures', + apiUri: 'https://api.github.com/repos/octocat/myfeatures/releases/latest', + unauthenticatedUri: 'https://github.com/octocat/myfeatures/releases/latest/download', + isLatest: true + }); }); it('should parse gitHub with version', async function () { - const result = parseFeatureIdentifier('octocat/myfeatures/helloworld@v0.0.4', output); + const feature: DevContainerFeature = { + id: 'octocat/myfeatures/helloworld@v0.0.4', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.exists(result); - assert.strictEqual(result?.id, 'helloworld'); - assert.deepEqual(result?.sourceInformation, { - type: 'github-repo', - owner: 'octocat', - repo: 'myfeatures', - tag: 'v0.0.4', - apiUri: 'https://api.github.com/repos/octocat/myfeatures/releases/tags/v0.0.4', - unauthenticatedUri: 'https://github.com/octocat/myfeatures/releases/download/v0.0.4/devcontainer-features.tgz', - isLatest: false - }); + assert.strictEqual(result?.features[0].id, 'helloworld'); + assert.deepEqual(result?.sourceInformation, { type: 'github-repo', + owner: 'octocat', + repo: 'myfeatures', + tag: 'v0.0.4', + apiUri: 'https://api.github.com/repos/octocat/myfeatures/releases/tags/v0.0.4', + unauthenticatedUri: 'https://github.com/octocat/myfeatures/releases/download/v0.0.4', + isLatest: false + }); }); it('should parse generic tar', async function () { - const result = parseFeatureIdentifier('https://example.com/some/long/path/devcontainer-features.tgz#helloworld', output); + const feature: DevContainerFeature = { + id: 'https://example.com/some/long/path/devcontainer-features.tgz#helloworld', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.exists(result); - assert.strictEqual(result?.id, 'helloworld'); + assert.strictEqual(result?.features[0].id, 'helloworld'); assert.deepEqual(result?.sourceInformation, { type: 'direct-tarball', tarballUri: 'https://example.com/some/long/path/devcontainer-features.tgz' }); }); it('should parse when provided a local-filesystem relative path', async function () { - const result = parseFeatureIdentifier('./some/long/path/to/features#helloworld', output); - assert.notExists(result); - // assert.exists(result); - // assert.strictEqual(result?.id, 'helloworld'); - // assert.deepEqual(result?.sourceInformation, { type: 'file-path', filePath: './some/long/path/to/features', isRelative: true }); + const feature: DevContainerFeature = { + id: './some/long/path/to/helloworld', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); + assert.exists(result); + assert.strictEqual(result?.features[0].id, 'helloworld'); + assert.deepEqual(result?.sourceInformation, { type: 'file-path', filePath: './some/long/path/to/helloworld', isRelative: true }); }); it('should parse when provided a local-filesystem relative path, starting with ../', async function () { - const result = parseFeatureIdentifier('../some/long/path/to/features#helloworld', output); - assert.notExists(result); - // assert.exists(result); - // assert.strictEqual(result?.id, 'helloworld'); - // assert.deepEqual(result?.sourceInformation, { type: 'file-path', filePath: '../some/long/path/to/features', isRelative: true }); + const feature: DevContainerFeature = { + id: '../some/long/path/to/helloworld', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); + + assert.exists(result); + assert.strictEqual(result?.features[0].id, 'helloworld'); + assert.deepEqual(result?.sourceInformation, { type: 'file-path', filePath: '../some/long/path/to/helloworld', isRelative: true }); }); it('should parse when provided a local-filesystem absolute path', async function () { - const result = parseFeatureIdentifier('/some/long/path/to/features#helloworld', output); - assert.notExists(result); - // assert.exists(result); - // assert.strictEqual(result?.id, 'helloworld'); - // assert.deepEqual(result?.sourceInformation, { type: 'file-path', filePath: '/some/long/path/to/features', isRelative: false }); + const feature: DevContainerFeature = { + id: '/some/long/path/to/helloworld', + options: {}, + } + const result = parseFeatureIdentifier(output, feature); + assert.exists(result); + assert.strictEqual(result?.features[0].id, 'helloworld'); + assert.deepEqual(result?.sourceInformation, { type: 'file-path', filePath: '/some/long/path/to/helloworld', isRelative: false }); }); // -- Invalid it('should fail parsing a generic tar with no feature and trailing slash', async function () { - const result = parseFeatureIdentifier('https://example.com/some/long/path/devcontainer-features.tgz/', output); + const feature: DevContainerFeature = { + id: 'https://example.com/some/long/path/devcontainer-features.tgz/', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); it('should not parse gitHub without triple slash', async function () { - const result = parseFeatureIdentifier('octocat/myfeatures#helloworld', output); + const feature: DevContainerFeature = { + id: 'octocat/myfeatures#helloworld', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); it('should fail parsing a generic tar with no feature and no trailing slash', async function () { - const result = parseFeatureIdentifier('https://example.com/some/long/path/devcontainer-features.tgz', output); + const feature: DevContainerFeature = { + id: 'https://example.com/some/long/path/devcontainer-features.tgz', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); it('should fail parsing a generic tar with a hash but no feature', async function () { - const result = parseFeatureIdentifier('https://example.com/some/long/path/devcontainer-features.tgz#', output); + const feature: DevContainerFeature = { + id: 'https://example.com/some/long/path/devcontainer-features.tgz#', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); it('should fail parsing a marketplace shorthand with only two segments and a hash with no feature', async function () { - const result = parseFeatureIdentifier('octocat/myfeatures#', output); + const feature: DevContainerFeature = { + id: 'octocat/myfeatures#', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); it('should fail parsing a marketplace shorthand with only two segments (no feature)', async function () { - const result = parseFeatureIdentifier('octocat/myfeatures', output); + const feature: DevContainerFeature = { + id: 'octocat/myfeatures', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); it('should fail parsing a marketplace shorthand with an invalid feature name (1)', async function () { - const result = parseFeatureIdentifier('octocat/myfeatures/@mycoolfeature', output); + const feature: DevContainerFeature = { + id: 'octocat/myfeatures/@mycoolfeature', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); it('should fail parsing a marketplace shorthand with an invalid feature name (2)', async function () { - const result = parseFeatureIdentifier('octocat/myfeatures/MY_$UPER_COOL_FEATURE', output); + const feature: DevContainerFeature = { + id: 'octocat/myfeatures/MY_$UPER_COOL_FEATURE', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); it('should fail parsing a marketplace shorthand with only two segments, no hash, and with a version', async function () { - const result = parseFeatureIdentifier('octocat/myfeatures@v0.0.1', output); + const feature: DevContainerFeature = { + id: 'octocat/myfeatures@v0.0.1', + options: {}, + } + + const result = parseFeatureIdentifier(output, feature); assert.notExists(result); }); }); @@ -160,7 +235,7 @@ describe('validate function getSourceInfoString', function () { type: 'local-cache' }; const output = getSourceInfoString(srcInfo); - assert.strictEqual(output, 'local-cache'); + assert.include(output, 'local-cache'); }); it('should work for github-repo without a tag (implicit latest)', async function () { @@ -170,10 +245,10 @@ describe('validate function getSourceInfoString', function () { repo: 'mobileapp', isLatest: true, apiUri: 'https://api.github.com/repos/bob/mobileapp/releases/latest', - unauthenticatedUri: 'https://github.com/bob/mobileapp/releases/latest/download/devcontainer-features.tgz' + unauthenticatedUri: 'https://github.com/bob/mobileapp/releases/latest/download' }; const output = getSourceInfoString(srcInfo); - assert.strictEqual(output, 'github-bob-mobileapp-latest'); + assert.include(output, 'github-bob-mobileapp-latest'); }); it('should work for github-repo with a tag', async function () { @@ -184,9 +259,9 @@ describe('validate function getSourceInfoString', function () { tag: 'v0.0.4', isLatest: false, apiUri: 'https://api.github.com/repos/bob/mobileapp/releases/tags/v0.0.4', - unauthenticatedUri: 'https://github.com/bob/mobileapp/releases/download/v0.0.4/devcontainer-features.tgz' + unauthenticatedUri: 'https://github.com/bob/mobileapp/releases/download/v0.0.4' }; const output = getSourceInfoString(srcInfo); - assert.strictEqual(output, 'github-bob-mobileapp-v0.0.4'); + assert.include(output, 'github-bob-mobileapp-v0.0.4'); }); }); \ No newline at end of file diff --git a/tsconfig.base.json b/tsconfig.base.json index 77b55edf1..2bfc3ce9a 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -13,6 +13,7 @@ "noUnusedLocals": true, "noUnusedParameters": true, "useUnknownInCatchVariables": false, - "newLine": "LF" + "newLine": "LF", + "sourceMap": true } } \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index acf2e145e..ba8b7f5a2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -151,6 +151,13 @@ resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-9.1.0.tgz#baf17ab2cca3fcce2d322ebc30454bff487efad5" integrity sha512-QCWHkbMv4Y5U9oW10Uxbr45qMMSzl4OzijsozynUAgx3kEHUdXB00udx2dWDQ7f2TU2a2uuiFaRZjCe3unPpeg== +"@types/ncp@^2.0.5": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@types/ncp/-/ncp-2.0.5.tgz#5c53b229a321946102a188b603306162137f4fb9" + integrity sha512-ocK0p8JuFmX7UkMabFPjY0F7apPvQyLWt5qtdvuvQEBz9i4m2dbzV+6L1zNaUp042RfnL6pHnxDE53OH6XQ9VQ== + dependencies: + "@types/node" "*" + "@types/node@*": version "14.14.41" resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.41.tgz#d0b939d94c1d7bd53d04824af45f1139b8c45615" @@ -1984,6 +1991,11 @@ natural-compare@^1.4.0: resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= +ncp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ncp/-/ncp-2.0.0.tgz#195a21d6c46e361d2fb1281ba38b91e9df7bdbb3" + integrity sha1-GVoh1sRuNh0vsSgbo4uR6d9727M= + nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"