Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: log re-optimization reasons #15339

Merged
merged 6 commits into from
Dec 13, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
107 changes: 75 additions & 32 deletions packages/vite/src/node/optimizer/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,16 @@ export interface DepOptimizationMetadata {
* This is checked on server startup to avoid unnecessary re-bundles.
*/
hash: string
/**
* This hash is determined by dependency lockfiles.
* This is checked on server startup to avoid unnecessary re-bundles.
*/
lockfileHash: string
/**
* This hash is determined by user config.
* This is checked on server startup to avoid unnecessary re-bundles.
*/
configHash: string
/**
* The browser hash is determined by the main hash plus additional dependencies
* discovered at runtime. This is used to invalidate browser requests to
Expand Down Expand Up @@ -310,9 +320,11 @@ export function initDepsOptimizerMetadata(
ssr: boolean,
timestamp?: string,
): DepOptimizationMetadata {
const hash = getDepHash(config, ssr)
const { lockfileHash, configHash, hash } = getDepHash(config, ssr)
return {
hash,
lockfileHash,
configHash,
browserHash: getOptimizedBrowserHash(hash, {}, timestamp),
optimized: {},
chunks: {},
Expand Down Expand Up @@ -363,11 +375,21 @@ export async function loadCachedDepOptimizationMetadata(
)
} catch (e) {}
// hash is consistent, no need to re-bundle
if (cachedMetadata && cachedMetadata.hash === getDepHash(config, ssr)) {
log?.('Hash is consistent. Skipping. Use --force to override.')
// Nothing to commit or cancel as we are using the cache, we only
// need to resolve the processing promise so requests can move on
return cachedMetadata
if (cachedMetadata) {
if (cachedMetadata.lockfileHash !== getLockfileHash(config, ssr)) {
config.logger.info(
'Re-optimizing dependencies because lockfile has changed',
)
} else if (cachedMetadata.configHash !== getConfigHash(config, ssr)) {
config.logger.info(
'Re-optimizing dependencies because vite config has changed',
)
} else {
Comment on lines +379 to +387
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just a note, we don't need to account for the transition, when cachedMetadata.lockfileHash and cachedMetadata.configHash are undefined. In that case, the log will be that the lockfile has changed (because of the Vite update) so we are good

log?.('Hash is consistent. Skipping. Use --force to override.')
// Nothing to commit or cancel as we are using the cache, we only
// need to resolve the processing promise so requests can move on
return cachedMetadata
}
}
} else {
config.logger.info('Forced re-optimization of dependencies')
Expand Down Expand Up @@ -417,7 +439,7 @@ export function toDiscoveredDependencies(
timestamp?: string,
): Record<string, OptimizedDepInfo> {
const browserHash = getOptimizedBrowserHash(
getDepHash(config, ssr),
getDepHash(config, ssr).hash,
deps,
timestamp,
)
Expand Down Expand Up @@ -975,17 +997,15 @@ function parseDepsOptimizerMetadata(
jsonMetadata: string,
depsCacheDir: string,
): DepOptimizationMetadata | undefined {
const { hash, browserHash, optimized, chunks } = JSON.parse(
jsonMetadata,
(key: string, value: string) => {
const { hash, lockfileHash, configHash, browserHash, optimized, chunks } =
JSON.parse(jsonMetadata, (key: string, value: string) => {
// Paths can be absolute or relative to the deps cache dir where
// the _metadata.json is located
if (key === 'file' || key === 'src') {
return normalizePath(path.resolve(depsCacheDir, value))
}
return value
},
)
})
if (
!chunks ||
Object.values(optimized).some((depInfo: any) => !depInfo.fileHash)
Expand All @@ -995,6 +1015,8 @@ function parseDepsOptimizerMetadata(
}
const metadata = {
hash,
lockfileHash,
configHash,
browserHash,
optimized: {},
discovered: {},
Expand Down Expand Up @@ -1029,10 +1051,13 @@ function stringifyDepsOptimizerMetadata(
metadata: DepOptimizationMetadata,
depsCacheDir: string,
) {
const { hash, browserHash, optimized, chunks } = metadata
const { hash, configHash, lockfileHash, browserHash, optimized, chunks } =
metadata
return JSON.stringify(
{
hash,
configHash,
lockfileHash,
browserHash,
optimized: Object.fromEntries(
Object.values(optimized).map(
Expand Down Expand Up @@ -1187,27 +1212,11 @@ const lockfileFormats = [
})
const lockfileNames = lockfileFormats.map((l) => l.name)

export function getDepHash(config: ResolvedConfig, ssr: boolean): string {
const lockfilePath = lookupFile(config.root, lockfileNames)
let content = lockfilePath ? fs.readFileSync(lockfilePath, 'utf-8') : ''
if (lockfilePath) {
const lockfileName = path.basename(lockfilePath)
const { checkPatches } = lockfileFormats.find(
(f) => f.name === lockfileName,
)!
if (checkPatches) {
// Default of https://github.com/ds300/patch-package
const fullPath = path.join(path.dirname(lockfilePath), 'patches')
const stat = tryStatSync(fullPath)
if (stat?.isDirectory()) {
content += stat.mtimeMs.toString()
}
}
}
// also take config into account
function getConfigHash(config: ResolvedConfig, ssr: boolean): string {
// Take config into account
// only a subset of config options that can affect dep optimization
const optimizeDeps = getDepOptimizationConfig(config, ssr)
content += JSON.stringify(
const content = JSON.stringify(
{
mode: process.env.NODE_ENV || config.mode,
root: config.root,
Expand Down Expand Up @@ -1238,6 +1247,40 @@ export function getDepHash(config: ResolvedConfig, ssr: boolean): string {
return getHash(content)
}

function getLockfileHash(config: ResolvedConfig, ssr: boolean): string {
const lockfilePath = lookupFile(config.root, lockfileNames)
let content = lockfilePath ? fs.readFileSync(lockfilePath, 'utf-8') : ''
if (lockfilePath) {
const lockfileName = path.basename(lockfilePath)
const { checkPatches } = lockfileFormats.find(
(f) => f.name === lockfileName,
)!
if (checkPatches) {
// Default of https://github.com/ds300/patch-package
const fullPath = path.join(path.dirname(lockfilePath), 'patches')
const stat = tryStatSync(fullPath)
if (stat?.isDirectory()) {
content += stat.mtimeMs.toString()
}
}
}
return getHash(content)
}

function getDepHash(
config: ResolvedConfig,
ssr: boolean,
): { lockfileHash: string; configHash: string; hash: string } {
const lockfileHash = getLockfileHash(config, ssr)
const configHash = getConfigHash(config, ssr)
const hash = getHash(lockfileHash + configHash)
return {
hash,
lockfileHash,
configHash,
}
}

function getOptimizedBrowserHash(
hash: string,
deps: Record<string, string>,
Expand Down