Skip to content

Commit

Permalink
feat: log re-optimization reasons (#15339)
Browse files Browse the repository at this point in the history
  • Loading branch information
antfu authored Dec 13, 2023
1 parent 2b39fe6 commit b1a6c84
Showing 1 changed file with 75 additions and 32 deletions.
107 changes: 75 additions & 32 deletions packages/vite/src/node/optimizer/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,16 @@ export interface DepOptimizationMetadata {
* This is checked on server startup to avoid unnecessary re-bundles.
*/
hash: string
/**
* This hash is determined by dependency lockfiles.
* This is checked on server startup to avoid unnecessary re-bundles.
*/
lockfileHash: string
/**
* This hash is determined by user config.
* This is checked on server startup to avoid unnecessary re-bundles.
*/
configHash: string
/**
* The browser hash is determined by the main hash plus additional dependencies
* discovered at runtime. This is used to invalidate browser requests to
Expand Down Expand Up @@ -310,9 +320,11 @@ export function initDepsOptimizerMetadata(
ssr: boolean,
timestamp?: string,
): DepOptimizationMetadata {
const hash = getDepHash(config, ssr)
const { lockfileHash, configHash, hash } = getDepHash(config, ssr)
return {
hash,
lockfileHash,
configHash,
browserHash: getOptimizedBrowserHash(hash, {}, timestamp),
optimized: {},
chunks: {},
Expand Down Expand Up @@ -363,11 +375,21 @@ export async function loadCachedDepOptimizationMetadata(
)
} catch (e) {}
// hash is consistent, no need to re-bundle
if (cachedMetadata && cachedMetadata.hash === getDepHash(config, ssr)) {
log?.('Hash is consistent. Skipping. Use --force to override.')
// Nothing to commit or cancel as we are using the cache, we only
// need to resolve the processing promise so requests can move on
return cachedMetadata
if (cachedMetadata) {
if (cachedMetadata.lockfileHash !== getLockfileHash(config, ssr)) {
config.logger.info(
'Re-optimizing dependencies because lockfile has changed',
)
} else if (cachedMetadata.configHash !== getConfigHash(config, ssr)) {
config.logger.info(
'Re-optimizing dependencies because vite config has changed',
)
} else {
log?.('Hash is consistent. Skipping. Use --force to override.')
// Nothing to commit or cancel as we are using the cache, we only
// need to resolve the processing promise so requests can move on
return cachedMetadata
}
}
} else {
config.logger.info('Forced re-optimization of dependencies')
Expand Down Expand Up @@ -417,7 +439,7 @@ export function toDiscoveredDependencies(
timestamp?: string,
): Record<string, OptimizedDepInfo> {
const browserHash = getOptimizedBrowserHash(
getDepHash(config, ssr),
getDepHash(config, ssr).hash,
deps,
timestamp,
)
Expand Down Expand Up @@ -975,17 +997,15 @@ function parseDepsOptimizerMetadata(
jsonMetadata: string,
depsCacheDir: string,
): DepOptimizationMetadata | undefined {
const { hash, browserHash, optimized, chunks } = JSON.parse(
jsonMetadata,
(key: string, value: string) => {
const { hash, lockfileHash, configHash, browserHash, optimized, chunks } =
JSON.parse(jsonMetadata, (key: string, value: string) => {
// Paths can be absolute or relative to the deps cache dir where
// the _metadata.json is located
if (key === 'file' || key === 'src') {
return normalizePath(path.resolve(depsCacheDir, value))
}
return value
},
)
})
if (
!chunks ||
Object.values(optimized).some((depInfo: any) => !depInfo.fileHash)
Expand All @@ -995,6 +1015,8 @@ function parseDepsOptimizerMetadata(
}
const metadata = {
hash,
lockfileHash,
configHash,
browserHash,
optimized: {},
discovered: {},
Expand Down Expand Up @@ -1029,10 +1051,13 @@ function stringifyDepsOptimizerMetadata(
metadata: DepOptimizationMetadata,
depsCacheDir: string,
) {
const { hash, browserHash, optimized, chunks } = metadata
const { hash, configHash, lockfileHash, browserHash, optimized, chunks } =
metadata
return JSON.stringify(
{
hash,
configHash,
lockfileHash,
browserHash,
optimized: Object.fromEntries(
Object.values(optimized).map(
Expand Down Expand Up @@ -1187,27 +1212,11 @@ const lockfileFormats = [
})
const lockfileNames = lockfileFormats.map((l) => l.name)

export function getDepHash(config: ResolvedConfig, ssr: boolean): string {
const lockfilePath = lookupFile(config.root, lockfileNames)
let content = lockfilePath ? fs.readFileSync(lockfilePath, 'utf-8') : ''
if (lockfilePath) {
const lockfileName = path.basename(lockfilePath)
const { checkPatches } = lockfileFormats.find(
(f) => f.name === lockfileName,
)!
if (checkPatches) {
// Default of https://github.com/ds300/patch-package
const fullPath = path.join(path.dirname(lockfilePath), 'patches')
const stat = tryStatSync(fullPath)
if (stat?.isDirectory()) {
content += stat.mtimeMs.toString()
}
}
}
// also take config into account
function getConfigHash(config: ResolvedConfig, ssr: boolean): string {
// Take config into account
// only a subset of config options that can affect dep optimization
const optimizeDeps = getDepOptimizationConfig(config, ssr)
content += JSON.stringify(
const content = JSON.stringify(
{
mode: process.env.NODE_ENV || config.mode,
root: config.root,
Expand Down Expand Up @@ -1238,6 +1247,40 @@ export function getDepHash(config: ResolvedConfig, ssr: boolean): string {
return getHash(content)
}

function getLockfileHash(config: ResolvedConfig, ssr: boolean): string {
const lockfilePath = lookupFile(config.root, lockfileNames)
let content = lockfilePath ? fs.readFileSync(lockfilePath, 'utf-8') : ''
if (lockfilePath) {
const lockfileName = path.basename(lockfilePath)
const { checkPatches } = lockfileFormats.find(
(f) => f.name === lockfileName,
)!
if (checkPatches) {
// Default of https://github.com/ds300/patch-package
const fullPath = path.join(path.dirname(lockfilePath), 'patches')
const stat = tryStatSync(fullPath)
if (stat?.isDirectory()) {
content += stat.mtimeMs.toString()
}
}
}
return getHash(content)
}

function getDepHash(
config: ResolvedConfig,
ssr: boolean,
): { lockfileHash: string; configHash: string; hash: string } {
const lockfileHash = getLockfileHash(config, ssr)
const configHash = getConfigHash(config, ssr)
const hash = getHash(lockfileHash + configHash)
return {
hash,
lockfileHash,
configHash,
}
}

function getOptimizedBrowserHash(
hash: string,
deps: Record<string, string>,
Expand Down

0 comments on commit b1a6c84

Please sign in to comment.