Skip to content

Commit

Permalink
perf: non-blocking write of optimized dep files (#12603)
Browse files Browse the repository at this point in the history
Co-authored-by: bluwy <[email protected]>
  • Loading branch information
patak-dev and bluwy authored Mar 27, 2023
1 parent c881971 commit 2f5f968
Show file tree
Hide file tree
Showing 5 changed files with 146 additions and 50 deletions.
163 changes: 131 additions & 32 deletions packages/vite/src/node/optimizer/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import { transformWithEsbuild } from '../plugins/esbuild'
import { ESBUILD_MODULES_TARGET } from '../constants'
import { resolvePackageData } from '../packages'
import type { ViteDevServer } from '../server'
import type { Logger } from '../logger'
import { esbuildCjsExternalPlugin, esbuildDepPlugin } from './esbuildDepPlugin'
import { scanImports } from './scan'
export {
Expand Down Expand Up @@ -359,6 +360,11 @@ export async function loadCachedDepOptimizationMetadata(

const depsCacheDir = getDepsCacheDir(config, ssr)

// If the lock timed out, we cancel and return undefined
if (!(await waitOptimizerWriteLock(depsCacheDir, config.logger))) {
return
}

if (!force) {
let cachedMetadata: DepOptimizationMetadata | undefined
try {
Expand Down Expand Up @@ -587,50 +593,82 @@ export function runOptimizeDeps(
`Dependencies bundled in ${(performance.now() - start).toFixed(2)}ms`,
)

return {
metadata,
async commit() {
// Write this run of pre-bundled dependencies to the deps cache

// Get a list of old files in the deps directory to delete the stale ones
const oldFilesPaths: string[] = []
if (!fs.existsSync(depsCacheDir)) {
fs.mkdirSync(depsCacheDir, { recursive: true })
} else {
oldFilesPaths.push(
...(await fsp.readdir(depsCacheDir)).map((f) =>
path.join(depsCacheDir, f),
),
)
}
// Write this run of pre-bundled dependencies to the deps cache
async function commitFiles() {
// Get a list of old files in the deps directory to delete the stale ones
const oldFilesPaths: string[] = []
// File used to tell other processes that we're writing the deps cache directory
const writingFilePath = path.resolve(depsCacheDir, '_writing')

if (
!fs.existsSync(depsCacheDir) ||
!(await waitOptimizerWriteLock(depsCacheDir, config.logger)) // unlock timed out
) {
fs.mkdirSync(depsCacheDir, { recursive: true })
fs.writeFileSync(writingFilePath, '')
} else {
fs.writeFileSync(writingFilePath, '')
oldFilesPaths.push(
...(await fsp.readdir(depsCacheDir)).map((f) =>
path.join(depsCacheDir, f),
),
)
}

const newFilesPaths = new Set<string>()
const files: Promise<void>[] = []
const write = (filePath: string, content: string) => {
newFilesPaths.add(filePath)
files.push(fsp.writeFile(filePath, content))
}
const newFilesPaths = new Set<string>()
newFilesPaths.add(writingFilePath)
const files: Promise<void>[] = []
const write = (filePath: string, content: string | Uint8Array) => {
newFilesPaths.add(filePath)
files.push(fsp.writeFile(filePath, content))
}

path.join(depsCacheDir, '_metadata.json'),
// a hint for Node.js
// all files in the cache directory should be recognized as ES modules
write(
path.resolve(depsCacheDir, 'package.json'),
'{\n "type": "module"\n}\n',
)

write(
path.join(depsCacheDir, '_metadata.json'),
stringifyDepsOptimizerMetadata(metadata, depsCacheDir),
)
write(
path.join(depsCacheDir, '_metadata.json'),
stringifyDepsOptimizerMetadata(metadata, depsCacheDir),
)

for (const outputFile of result.outputFiles!)
write(outputFile.path, outputFile.text)
for (const outputFile of result.outputFiles!)
write(outputFile.path, outputFile.contents)

// Clean up old files in the background
for (const filePath of oldFilesPaths)
if (!newFilesPaths.has(filePath)) fs.unlink(filePath, () => {}) // ignore errors
// Clean up old files in the background
for (const filePath of oldFilesPaths)
if (!newFilesPaths.has(filePath)) fs.unlink(filePath, () => {}) // ignore errors

await Promise.all(files)

// Successful write
fsp.unlink(writingFilePath)

setTimeout(() => {
// Free up memory, these files aren't going to be re-requested because
// the requests are cached. If they do, then let them read from disk.
optimizedDepsCache.delete(metadata)
}, 5000)
}

await Promise.all(files)
return {
metadata,
async commit() {
// Keep the output files in memory while we write them to disk in the
// background. These files are going to be sent right away to the browser
optimizedDepsCache.set(
metadata,
new Map(
result.outputFiles!.map((f) => [normalizePath(f.path), f.text]),
),
)

// No need to wait, files are written in the background
setTimeout(commitFiles, 0)
},
cancel: () => {},
}
Expand Down Expand Up @@ -1291,3 +1329,64 @@ export async function optimizedDepNeedsInterop(
}
return depInfo?.needsInterop
}

const optimizedDepsCache = new WeakMap<
DepOptimizationMetadata,
Map<string, string>
>()
export async function loadOptimizedDep(
file: string,
depsOptimizer: DepsOptimizer,
): Promise<string> {
const outputFiles = optimizedDepsCache.get(depsOptimizer.metadata)
if (outputFiles) {
const outputFile = outputFiles.get(file)
if (outputFile) return outputFile
}
return fsp.readFile(file, 'utf-8')
}

/**
* Processes that write to the deps cache directory adds a `_writing` lock to
* inform other processes of so. So before doing any work on it, they can wait
* for the file to be removed to know it's ready.
*
* Returns true if successfully waited for unlock, false if lock timed out.
*/
async function waitOptimizerWriteLock(depsCacheDir: string, logger: Logger) {
const writingPath = path.join(depsCacheDir, '_writing')
const tryAgainMs = 100

// if _writing exist, we wait for a maximum of 500ms before assuming something
// is not right
let maxWaitTime = 500
let waited = 0
let filesLength: number

while (fs.existsSync(writingPath)) {
// on the first run, we check the number of files it started with for later use
filesLength ??= (await fsp.readdir(depsCacheDir)).length

await new Promise((r) => setTimeout(r, tryAgainMs))
waited += tryAgainMs

if (waited >= maxWaitTime) {
const newFilesLength = (await fsp.readdir(depsCacheDir)).length

// after 500ms, if the number of files is the same, assume previous process
// terminated and didn't cleanup `_writing` lock. clear the directory.
if (filesLength === newFilesLength) {
logger.info('Outdated deps cache, forcing re-optimization...')
await fsp.rm(depsCacheDir, { recursive: true, force: true })
return false
}
// new files were saved, wait a bit longer to decide again.
else {
maxWaitTime += 500
filesLength = newFilesLength
}
}
}

return true
}
2 changes: 1 addition & 1 deletion packages/vite/src/node/optimizer/optimizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ async function createDepsOptimizer(
const deps: Record<string, string> = {}
await addManuallyIncludedOptimizeDeps(deps, config, ssr)

const discovered = await toDiscoveredDependencies(
const discovered = toDiscoveredDependencies(
config,
deps,
ssr,
Expand Down
11 changes: 7 additions & 4 deletions packages/vite/src/node/plugins/optimizedDeps.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import { promises as fs } from 'node:fs'
import colors from 'picocolors'
import type { ResolvedConfig } from '..'
import type { Plugin } from '../plugin'
import { DEP_VERSION_RE } from '../constants'
import { cleanUrl, createDebugger } from '../utils'
import { getDepsOptimizer, optimizedDepInfoFromFile } from '../optimizer'
import {
getDepsOptimizer,
loadOptimizedDep,
optimizedDepInfoFromFile,
} from '../optimizer'

export const ERR_OPTIMIZE_DEPS_PROCESSING_ERROR =
'ERR_OPTIMIZE_DEPS_PROCESSING_ERROR'
Expand Down Expand Up @@ -67,7 +70,7 @@ export function optimizedDepsPlugin(config: ResolvedConfig): Plugin {
// load hooks to avoid race conditions, once processing is resolved,
// we are sure that the file has been properly save to disk
try {
return await fs.readFile(file, 'utf-8')
return loadOptimizedDep(file, depsOptimizer)
} catch (e) {
// Outdated non-entry points (CHUNK), loaded after a rerun
throwOutdatedRequest(id)
Expand Down Expand Up @@ -128,7 +131,7 @@ export function optimizedDepsBuildPlugin(config: ResolvedConfig): Plugin {
// load hooks to avoid race conditions, once processing is resolved,
// we are sure that the file has been properly save to disk

return await fs.readFile(file, 'utf-8')
return loadOptimizedDep(file, depsOptimizer)
},
}
}
Expand Down
15 changes: 5 additions & 10 deletions packages/vite/src/node/server/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -341,10 +341,9 @@ export async function createServer(
): Promise<ViteDevServer> {
const config = await resolveConfig(inlineConfig, 'serve')

// start optimizer in the background
let depsOptimizerReady: Promise<void> | undefined
if (isDepsOptimizerEnabled(config, false)) {
depsOptimizerReady = initDepsOptimizer(config)
// start optimizer in the background, we still need to await the setup
await initDepsOptimizer(config)
}

const { root, server: serverConfig } = config
Expand Down Expand Up @@ -665,13 +664,9 @@ export async function createServer(

// when the optimizer is ready, hook server so that it can reload the page
// or invalidate the module graph when needed
if (depsOptimizerReady) {
depsOptimizerReady.then(() => {
const depsOptimizer = getDepsOptimizer(config)
if (depsOptimizer) {
depsOptimizer.server = server
}
})
const depsOptimizer = getDepsOptimizer(config)
if (depsOptimizer) {
depsOptimizer.server = server
}

if (!middlewareMode && httpServer) {
Expand Down
5 changes: 2 additions & 3 deletions packages/vite/src/node/server/middlewares/transform.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { promises as fs } from 'node:fs'
import path from 'node:path'
import type { Connect } from 'dep-types/connect'
import colors from 'picocolors'
Expand Down Expand Up @@ -34,7 +33,7 @@ import {
ERR_OPTIMIZE_DEPS_PROCESSING_ERROR,
ERR_OUTDATED_OPTIMIZED_DEP,
} from '../../plugins/optimizedDeps'
import { getDepsOptimizer } from '../../optimizer'
import { getDepsOptimizer, loadOptimizedDep } from '../../optimizer'

const debugCache = createDebugger('vite:cache')
const isDebug = !!process.env.DEBUG
Expand Down Expand Up @@ -81,7 +80,7 @@ export function transformMiddleware(
ensureVolumeInPath(path.resolve(root, url.slice(1))),
)
try {
const map = await fs.readFile(mapFile, 'utf-8')
const map = await loadOptimizedDep(mapFile, depsOptimizer)
return send(req, res, map, 'json', {
headers: server.config.server.headers,
})
Expand Down

0 comments on commit 2f5f968

Please sign in to comment.