From 8b1663cceb0e26a773e1e30d433d6c012d9258b8 Mon Sep 17 00:00:00 2001 From: Mary Date: Sat, 31 Jan 2026 14:40:40 +0700 Subject: [PATCH 1/2] wip --- app/bundler/events.ts | 6 + app/bundler/lib/bundler.ts | 257 +++++++++++ app/bundler/lib/errors.ts | 81 ++++ app/bundler/lib/fetch.test.ts | 272 +++++++++++ app/bundler/lib/fetch.ts | 210 +++++++++ app/bundler/lib/hoist.test.ts | 459 +++++++++++++++++++ app/bundler/lib/hoist.ts | 144 ++++++ app/bundler/lib/installed-packages.test.ts | 116 +++++ app/bundler/lib/installed-packages.ts | 126 ++++++ app/bundler/lib/module-type.test.ts | 230 ++++++++++ app/bundler/lib/module-type.ts | 395 ++++++++++++++++ app/bundler/lib/registry.ts | 103 +++++ app/bundler/lib/resolve.test.ts | 496 +++++++++++++++++++++ app/bundler/lib/resolve.ts | 308 +++++++++++++ app/bundler/lib/subpaths.ts | 289 ++++++++++++ app/bundler/lib/types.ts | 202 +++++++++ app/bundler/lib/utils.ts | 13 + app/bundler/lib/worker-entry.ts | 200 +++++++++ app/bundler/types.ts | 193 ++++++++ app/bundler/worker-client.ts | 136 ++++++ app/components/ImpactAnalyzer.vue | 141 ++++++ app/components/ImpactBundle.vue | 273 ++++++++++++ app/components/ImpactDependencyBar.vue | 208 +++++++++ app/components/ImpactInstall.vue | 172 +++++++ app/pages/[...package].vue | 44 +- app/pages/impact/[...path].vue | 143 ++++++ i18n/locales/en.json | 44 ++ nuxt.config.ts | 20 + package.json | 6 + pnpm-lock.yaml | 351 ++++++++++++++- 30 files changed, 5631 insertions(+), 7 deletions(-) create mode 100644 app/bundler/events.ts create mode 100644 app/bundler/lib/bundler.ts create mode 100644 app/bundler/lib/errors.ts create mode 100644 app/bundler/lib/fetch.test.ts create mode 100644 app/bundler/lib/fetch.ts create mode 100644 app/bundler/lib/hoist.test.ts create mode 100644 app/bundler/lib/hoist.ts create mode 100644 app/bundler/lib/installed-packages.test.ts create mode 100644 app/bundler/lib/installed-packages.ts create mode 100644 app/bundler/lib/module-type.test.ts create mode 100644 app/bundler/lib/module-type.ts create mode 100644 app/bundler/lib/registry.ts create mode 100644 app/bundler/lib/resolve.test.ts create mode 100644 app/bundler/lib/resolve.ts create mode 100644 app/bundler/lib/subpaths.ts create mode 100644 app/bundler/lib/types.ts create mode 100644 app/bundler/lib/utils.ts create mode 100644 app/bundler/lib/worker-entry.ts create mode 100644 app/bundler/types.ts create mode 100644 app/bundler/worker-client.ts create mode 100644 app/components/ImpactAnalyzer.vue create mode 100644 app/components/ImpactBundle.vue create mode 100644 app/components/ImpactDependencyBar.vue create mode 100644 app/components/ImpactInstall.vue create mode 100644 app/pages/impact/[...path].vue diff --git a/app/bundler/events.ts b/app/bundler/events.ts new file mode 100644 index 000000000..a2bfd07e3 --- /dev/null +++ b/app/bundler/events.ts @@ -0,0 +1,6 @@ +import type { ProgressMessage } from './types' + +/** + * emitted during package initialization and bundling. + */ +export const progress = createEventHook() diff --git a/app/bundler/lib/bundler.ts b/app/bundler/lib/bundler.ts new file mode 100644 index 000000000..562ed9dff --- /dev/null +++ b/app/bundler/lib/bundler.ts @@ -0,0 +1,257 @@ +import { encodeUtf8, getUtf8Length } from '@atcute/uint8array' +import * as zstd from '@bokuweb/zstd-wasm' +import { rolldown } from '@rolldown/browser' +import { memfs } from '@rolldown/browser/experimental' + +import { progress } from '../events' +import type { BundleChunk, BundleOptions, BundleResult } from '../types' + +import { BundleError } from './errors' +import { analyzeModule } from './module-type' + +const { volume } = memfs! + +// #region helpers + +const VIRTUAL_ENTRY_ID = '\0virtual:entry' + +/** + * get compressed size using a compression stream. + */ +async function getCompressedSize(code: string, format: CompressionFormat): Promise { + const { readable, writable } = new CompressionStream(format) + + { + const writer = writable.getWriter() + writer.write(encodeUtf8(code)) + writer.close() + } + + let size = 0 + { + const reader = readable.getReader() + while (true) { + // oxlint-disable-next-line no-await-in-loop + const { done, value: chunk } = await reader.read() + if (done) { + break + } + + size += chunk.byteLength + } + } + + return size +} + +/** + * get gzip size using compression stream. + */ +function getGzipSize(code: string): Promise { + return getCompressedSize(code, 'gzip') +} + +/** + * whether brotli compression is supported. + * - `undefined`: not yet checked + * - `true`: supported + * - `false`: not supported + */ +let isBrotliSupported: boolean | undefined + +/** + * get brotli size using compression stream, if supported. + * returns `undefined` if brotli is not supported by the browser. + */ +async function getBrotliSize(code: string): Promise { + if (isBrotliSupported === false) { + return undefined + } + + if (isBrotliSupported === undefined) { + try { + // @ts-expect-error 'brotli' is not in the type definition yet + const size = await getCompressedSize(code, 'brotli') + isBrotliSupported = true + return size + } catch { + isBrotliSupported = false + return undefined + } + } + + // @ts-expect-error 'brotli' is not in the type definition yet + return getCompressedSize(code, 'brotli') +} + +/** + * get zstd-compressed size using WASM. + */ +function getZstdSize(code: string): number { + const encoded = encodeUtf8(code) + const compressed = zstd.compress(encoded) + return compressed.byteLength +} + +// #endregion + +// #region core + +/** + * bundles a subpath from a package that's already loaded in rolldown's memfs. + * + * @param packageName the package name (e.g., "react") + * @param subpath the export subpath to bundle (e.g., ".", "./utils") + * @param selectedExports specific exports to include, or null for all + * @param options bundling options + * @returns bundle result with chunks, sizes, and exported names + */ +export async function bundlePackage( + packageName: string, + subpath: string, + selectedExports: string[] | null, + options: BundleOptions, +): Promise { + // track whether module is CJS (set in load hook) + let isCjs = false + + // bundle with rolldown + const bundle = await rolldown({ + input: { main: VIRTUAL_ENTRY_ID }, + cwd: '/', + external: options.rolldown?.external, + plugins: [ + { + name: 'virtual-entry', + resolveId(id: string) { + if (id === VIRTUAL_ENTRY_ID) { + return id + } + }, + async load(id: string) { + if (id !== VIRTUAL_ENTRY_ID) { + return + } + + const importPath = subpath === '.' ? packageName : `${packageName}${subpath.slice(1)}` + + // resolve the entry module + const resolved = await this.resolve(importPath) + if (!resolved) { + throw new BundleError(`failed to resolve entry module: ${importPath}`) + } + + // JSON files only have a default export + if (resolved.id.endsWith('.json')) { + return `export { default } from '${importPath}';\n` + } + + // read the source file + let source: string + try { + source = volume.readFileSync(resolved.id, 'utf8') as string + } catch { + throw new BundleError(`failed to read entry module: ${resolved.id}`) + } + + // parse and analyze the module + let ast + try { + ast = this.parse(source) + } catch { + throw new BundleError(`failed to parse entry module: ${resolved.id}`) + } + + const moduleInfo = analyzeModule(ast) + isCjs = moduleInfo.type === 'cjs' + + // CJS modules can't be tree-shaken effectively, just re-export default + if (moduleInfo.type === 'cjs') { + return `export { default } from '${importPath}';\n` + } + + // unknown/side-effects only modules have no exports + if (moduleInfo.type === 'unknown') { + return `export {} from '${importPath}';\n` + } + + // ESM module handling + if (selectedExports === null) { + // re-export everything + let code = `export * from '${importPath}';\n` + if (moduleInfo.hasDefaultExport) { + code += `export { default } from '${importPath}';\n` + } + return code + } + + // specific exports selected (empty array = export nothing) + // quote names to handle non-identifier exports + const quoted = selectedExports.map(e => JSON.stringify(e)) + return `export { ${quoted.join(', ')} } from '${importPath}';\n` + }, + }, + ], + }) + + const output = await bundle.generate({ + format: 'esm', + minify: options.rolldown?.minify ?? true, + }) + + // process all chunks + const rawChunks = output.output.filter(o => o.type === 'chunk') + + progress.trigger({ type: 'progress', kind: 'compress' }) + + const chunks: BundleChunk[] = await Promise.all( + rawChunks.map(async chunk => { + const code = chunk.code + const size = getUtf8Length(code) + const [gzipSize, brotliSize, zstdSize] = await Promise.all([ + getGzipSize(code), + getBrotliSize(code), + getZstdSize(code), + ]) + + return { + fileName: chunk.fileName, + code, + size, + gzipSize, + brotliSize, + zstdSize, + isEntry: chunk.isEntry, + exports: chunk.exports || [], + } + }), + ) + + // find entry chunk for exports + const entryChunk = chunks.find(c => c.isEntry) + if (!entryChunk) { + throw new BundleError('no entry chunk found in bundle output') + } + + // aggregate sizes + const totalSize = chunks.reduce((acc, c) => acc + c.size, 0) + const totalGzipSize = chunks.reduce((acc, c) => acc + c.gzipSize, 0) + const totalBrotliSize = isBrotliSupported + ? chunks.reduce((acc, c) => acc + c.brotliSize!, 0) + : undefined + const totalZstdSize = chunks.reduce((acc, c) => acc + c.zstdSize!, 0) + + await bundle.close() + + return { + chunks, + size: totalSize, + gzipSize: totalGzipSize, + brotliSize: totalBrotliSize, + zstdSize: totalZstdSize, + exports: entryChunk.exports, + isCjs, + } +} + +// #endregion diff --git a/app/bundler/lib/errors.ts b/app/bundler/lib/errors.ts new file mode 100644 index 000000000..b8149e587 --- /dev/null +++ b/app/bundler/lib/errors.ts @@ -0,0 +1,81 @@ +/** + * base class for all teardown errors. + */ +export class TeardownError extends Error { + constructor(message: string) { + super(message) + this.name = 'TeardownError' + } +} + +/** + * thrown when a package cannot be found in the registry. + */ +export class PackageNotFoundError extends TeardownError { + readonly packageName: string + readonly registry: string + + constructor(packageName: string, registry: string) { + super(`package not found: ${packageName}`) + this.name = 'PackageNotFoundError' + this.packageName = packageName + this.registry = registry + } +} + +/** + * thrown when no version of a package satisfies the requested range. + */ +export class NoMatchingVersionError extends TeardownError { + readonly packageName: string + readonly range: string + + constructor(packageName: string, range: string) { + super(`no version of ${packageName} satisfies ${range}`) + this.name = 'NoMatchingVersionError' + this.packageName = packageName + this.range = range + } +} + +/** + * thrown when a package specifier is malformed. + */ +export class InvalidSpecifierError extends TeardownError { + readonly specifier: string + + constructor(specifier: string, reason?: string) { + super( + reason ? `invalid specifier: ${specifier} (${reason})` : `invalid specifier: ${specifier}`, + ) + this.name = 'InvalidSpecifierError' + this.specifier = specifier + } +} + +/** + * thrown when a network request fails. + */ +export class FetchError extends TeardownError { + readonly url: string + readonly status: number + readonly statusText: string + + constructor(url: string, status: number, statusText: string) { + super(`fetch failed: ${status} ${statusText}`) + this.name = 'FetchError' + this.url = url + this.status = status + this.statusText = statusText + } +} + +/** + * thrown when bundling fails. + */ +export class BundleError extends TeardownError { + constructor(message: string) { + super(message) + this.name = 'BundleError' + } +} diff --git a/app/bundler/lib/fetch.test.ts b/app/bundler/lib/fetch.test.ts new file mode 100644 index 000000000..d1ba73e28 --- /dev/null +++ b/app/bundler/lib/fetch.test.ts @@ -0,0 +1,272 @@ +import { Volume } from 'memfs' +import { describe, expect, it } from 'vitest' + +import { DEFAULT_EXCLUDE_PATTERNS, fetchPackagesToVolume } from './fetch' +import { hoist } from './hoist' +import { resolve } from './resolve' + +// #region fetchPackagesToVolume + +describe('fetchPackagesToVolume', () => { + describe('basic fetching', () => { + it('fetches and extracts a simple package with dependencies', async () => { + const result = await resolve(['is-odd@3.0.1'], { installPeers: false }) + const hoisted = hoist(result.roots) + const volume = new Volume() + await fetchPackagesToVolume(hoisted, volume) + + // verify is-odd + const isOddPackageJson = volume.readFileSync('/node_modules/is-odd/package.json', 'utf8') + const json = JSON.parse(isOddPackageJson as string) + expect(json.name).toBe('is-odd') + + // verify dependency (is-number) + const isNumberPackageJson = volume.readFileSync( + '/node_modules/is-number/package.json', + 'utf8', + ) + expect(isNumberPackageJson).toBeDefined() + }) + + it('respects concurrency limit', async () => { + const result = await resolve(['is-odd@3.0.1'], { installPeers: false }) + const hoisted = hoist(result.roots) + const volume = new Volume() + + await fetchPackagesToVolume(hoisted, volume, { concurrency: 1 }) + expect(Object.keys(volume.toJSON()).length).toBeGreaterThan(0) + }) + }) + + describe('scoped packages', () => { + it('fetches scoped packages correctly', async () => { + const result = await resolve(['@babel/parser@7.23.0'], { installPeers: false }) + const hoisted = hoist(result.roots) + const volume = new Volume() + await fetchPackagesToVolume(hoisted, volume) + + const packageJson = volume.readFileSync('/node_modules/@babel/parser/package.json', 'utf8') + const json = JSON.parse(packageJson as string) + expect(json.name).toBe('@babel/parser') + }) + }) + + describe('nested packages', () => { + it('handles version conflicts with nested packages', async () => { + // is-odd depends on is-number@6.x, but we also request is-number@7.x + const result = await resolve(['is-odd@3.0.1', 'is-number@7.0.0'], { installPeers: false }) + const hoisted = hoist(result.roots) + const volume = new Volume() + await fetchPackagesToVolume(hoisted, volume) + + // is-number@7 at root (explicitly requested) + const rootIsNumber = volume.readFileSync('/node_modules/is-number/package.json', 'utf8') + const rootJson = JSON.parse(rootIsNumber as string) + expect(rootJson.version).toBe('7.0.0') + + // is-number@6 nested under is-odd + const files = volume.toJSON() + const nestedIsNumber = Object.keys(files).find(p => + p.includes('/is-odd/node_modules/is-number/'), + ) + expect(nestedIsNumber).toBeDefined() + }) + }) + + describe('file exclusions', () => { + it('excludes files matching default patterns', async () => { + const result = await resolve(['is-odd@3.0.1'], { installPeers: false }) + const hoisted = hoist(result.roots) + const volume = new Volume() + await fetchPackagesToVolume(hoisted, volume) + + const files = volume.toJSON() + for (const path of Object.keys(files)) { + const filename = path.split('/').pop()! + expect(filename.toUpperCase()).not.toMatch(/^README/) + expect(filename.toUpperCase()).not.toMatch(/^LICENSE/) + } + }) + + it('can disable exclusions with empty array', async () => { + const result = await resolve(['is-odd@3.0.1'], { installPeers: false }) + const hoisted = hoist(result.roots) + + const volumeNoExclude = new Volume() + await fetchPackagesToVolume(hoisted, volumeNoExclude, { exclude: [] }) + + const volumeWithExclude = new Volume() + await fetchPackagesToVolume(hoisted, volumeWithExclude) + + const noExcludeCount = Object.keys(volumeNoExclude.toJSON()).length + const withExcludeCount = Object.keys(volumeWithExclude.toJSON()).length + expect(noExcludeCount).toBeGreaterThanOrEqual(withExcludeCount) + }) + }) +}) + +// #endregion + +// #region unpackedSize calculation + +describe('unpackedSize calculation', () => { + it('populates unpackedSize from tarball when registry does not provide it', async () => { + // JSR packages don't have unpackedSize in registry metadata + const result = await resolve(['jsr:@luca/flag@1.0.1']) + const hoisted = hoist(result.roots) + const volume = new Volume() + + const rootNode = hoisted.root.get('@luca/flag')! + expect(rootNode.unpackedSize).toBeUndefined() + + await fetchPackagesToVolume(hoisted, volume) + + expect(rootNode.unpackedSize).toBeGreaterThan(0) + }) + + it('preserves registry-provided unpackedSize for npm packages', async () => { + const result = await resolve(['is-odd@3.0.1'], { installPeers: false }) + const hoisted = hoist(result.roots) + const volume = new Volume() + + const rootNode = hoisted.root.get('is-odd')! + const registrySize = rootNode.unpackedSize + expect(registrySize).toBeGreaterThan(0) + + await fetchPackagesToVolume(hoisted, volume) + + expect(rootNode.unpackedSize).toBe(registrySize) + }) + + it('includes excluded files in size calculation', async () => { + const result = await resolve(['is-odd@3.0.1'], { installPeers: false }) + const hoisted = hoist(result.roots) + + // clear registry-provided size to force calculation from tarball + const rootNode = hoisted.root.get('is-odd')! + rootNode.unpackedSize = undefined + + const volume = new Volume() + await fetchPackagesToVolume(hoisted, volume) + + const extractedFiles = volume.toJSON() + const extractedSize = Object.values(extractedFiles).reduce( + (sum, content) => sum + (content as string).length, + 0, + ) + + // tarball size >= extracted size (includes excluded files) + expect(rootNode.unpackedSize).toBeGreaterThanOrEqual(extractedSize) + }) +}) + +// #endregion + +// #region DEFAULT_EXCLUDE_PATTERNS + +describe('DEFAULT_EXCLUDE_PATTERNS', () => { + describe('documentation files', () => { + it('matches README files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('README.md'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('README'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('readme.txt'))).toBe(true) + }) + + it('matches LICENSE files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('LICENSE'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('LICENSE.md'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('LICENCE'))).toBe(true) + }) + + it('matches CHANGELOG and HISTORY files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('CHANGELOG.md'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('HISTORY.md'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('changelog.txt'))).toBe(true) + }) + + it('matches example directories', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('examples/basic.js'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('example/demo.ts'))).toBe(true) + }) + }) + + describe('test files', () => { + it('matches test directories', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('__tests__/foo.js'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('test/index.js'))).toBe(true) + }) + + it('matches test file patterns', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('foo.test.js'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('bar.spec.ts'))).toBe(true) + }) + }) + + describe('config files', () => { + it('matches TypeScript config files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('tsconfig.json'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('tsconfig.build.json'))).toBe(true) + }) + + it('matches linter configs', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('.eslintrc'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('.eslintrc.json'))).toBe(true) + }) + + it('matches test runner configs', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('jest.config.js'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('vitest.config.ts'))).toBe(true) + }) + + it('matches dot directories', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('.github/workflows/ci.yml'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('.vscode/settings.json'))).toBe(true) + }) + }) + + describe('type definition files', () => { + it('matches TypeScript declaration files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('index.d.ts'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('types.d.mts'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('lib/foo.d.cts'))).toBe(true) + }) + + it('matches Flow type files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('index.js.flow'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('types.flow'))).toBe(true) + }) + + it('does not match TypeScript source files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('index.ts'))).toBe(false) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('index.mts'))).toBe(false) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('index.cts'))).toBe(false) + }) + }) + + describe('source maps', () => { + it('matches source map files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('index.js.map'))).toBe(true) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('dist/bundle.js.map'))).toBe(true) + }) + }) + + describe('source files (should NOT match)', () => { + it('does not match JavaScript/TypeScript source files', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('index.js'))).toBe(false) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('src/utils.ts'))).toBe(false) + }) + + it('does not match package.json', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('package.json'))).toBe(false) + }) + + it('does not match dist/lib directories', () => { + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('dist/index.js'))).toBe(false) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('lib/index.js'))).toBe(false) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('cjs/index.js'))).toBe(false) + expect(DEFAULT_EXCLUDE_PATTERNS.some(p => p.test('esm/index.js'))).toBe(false) + }) + }) +}) + +// #endregion diff --git a/app/bundler/lib/fetch.ts b/app/bundler/lib/fetch.ts new file mode 100644 index 000000000..4d08c0805 --- /dev/null +++ b/app/bundler/lib/fetch.ts @@ -0,0 +1,210 @@ +import { untar } from '@mary/tar' +import type { Volume } from 'memfs' + +import { progress } from '../events' + +import { FetchError } from './errors' +import type { HoistedNode, HoistedResult } from './types' + +/** + * options for fetching packages. + */ +export interface FetchOptions { + /** max concurrent fetches (default 6) */ + concurrency?: number + /** regex patterns for files to exclude (matched against path after "package/" prefix is stripped) */ + exclude?: RegExp[] +} + +/** + * default patterns for files that are not needed for bundling. + * matches against the file path within the package (e.g., "README.md", "docs/guide.md") + */ +export const DEFAULT_EXCLUDE_PATTERNS: RegExp[] = [ + // docs and meta files + /^README(\..*)?$/i, + /^LICENSE(\..*)?$/i, + /^LICENCE(\..*)?$/i, + /^CHANGELOG(\..*)?$/i, + /^HISTORY(\..*)?$/i, + /^CONTRIBUTING(\..*)?$/i, + /^AUTHORS(\..*)?$/i, + /^SECURITY(\..*)?$/i, + /^CODE_OF_CONDUCT(\..*)?$/i, + /^\.github\//, + /^\.vscode\//, + /^\.idea\//, + /^docs?\//i, + + // test files + /^__tests__\//, + /^tests?\//i, + /^specs?\//i, + /\.(test|spec)\.[jt]sx?$/, + /\.stories\.[jt]sx?$/, + + // config files + /^\..+rc(\..*)?$/, + /^\.editorconfig$/, + /^\.gitignore$/, + /^\.npmignore$/, + /^\.eslint/, + /^\.prettier/, + /^tsconfig(\..+)?\.json$/, + /^jest\.config/, + /^vitest\.config/, + /^rollup\.config/, + /^webpack\.config/, + /^vite\.config/, + /^babel\.config/, + + // source maps (usually not needed in bundling) + /\.map$/, + + // typescript declaration files (not needed for bundling) + /\.d\.[cm]?ts$/, + + // flow type annotations + /\.js\.flow$/, + /\.flow$/, + + // example/demo directories + /^examples?\//i, +] + +/** + * fetches a tarball and writes its contents directly to a volume. + * handles gzip decompression and strips the "package/" prefix from paths. + * + * @param url the tarball URL + * @param destPath the destination path in the volume (e.g., "/node_modules/react") + * @param volume the volume to write to + * @param exclude regex patterns for files to skip + * @returns the total size of extracted files in bytes + */ +async function fetchTarballToVolume( + url: string, + destPath: string, + volume: Volume, + exclude: RegExp[] = [], +): Promise { + const response = await fetch(url) + if (!response.ok) { + throw new FetchError(url, response.status, response.statusText) + } + + const body = response.body + if (!body) { + throw new FetchError(url, 0, 'response has no body') + } + + // decompress gzip -> extract tar + const decompressed = body.pipeThrough(new DecompressionStream('gzip')) + + let totalSize = 0 + + for await (const entry of untar(decompressed)) { + // skip directories + if (entry.type !== 'file') { + continue + } + + // count size from tar header for all files (including excluded ones) + totalSize += entry.size + + // npm tarballs have files under "package/" prefix - strip it + let path = entry.name + if (path.startsWith('package/')) { + path = path.slice(8) + } + + // check if file should be excluded (skip extraction but size already counted) + if (exclude.some(pattern => pattern.test(path))) { + continue + } + + const content = await entry.bytes() + const fullPath = `${destPath}/${path}` + + // ensure parent directories exist + const parentDir = fullPath.slice(0, fullPath.lastIndexOf('/')) + if (!volume.existsSync(parentDir)) { + volume.mkdirSync(parentDir, { recursive: true }) + } + + volume.writeFileSync(fullPath, content) + } + + return totalSize +} + +/** + * fetches all packages in a hoisted result and writes them to a volume. + * uses default exclude patterns to skip unnecessary files. + * + * @param hoisted the hoisted package structure + * @param volume the volume to write to + * @param options fetch options + */ +export async function fetchPackagesToVolume( + hoisted: HoistedResult, + volume: Volume, + options: FetchOptions = {}, +): Promise { + const concurrency = options.concurrency ?? 6 + const exclude = options.exclude ?? DEFAULT_EXCLUDE_PATTERNS + const queue: Array<{ node: HoistedNode; basePath: string }> = [] + + // collect all nodes into a flat queue + function collectNodes(node: HoistedNode, basePath: string): void { + queue.push({ node, basePath }) + if (node.nested.size > 0) { + const nestedBasePath = `${basePath}/${node.name}/node_modules` + for (const nested of node.nested.values()) { + collectNodes(nested, nestedBasePath) + } + } + } + + for (const node of hoisted.root.values()) { + collectNodes(node, '/node_modules') + } + + // process queue with concurrency limit using a simple semaphore pattern + let index = 0 + let completed = 0 + const total = queue.length + + async function worker(): Promise { + while (true) { + const i = index++ + if (i >= queue.length) { + break + } + + const { node, basePath } = queue[i]! + const packagePath = `${basePath}/${node.name}` + + // oxlint-disable-next-line no-await-in-loop + const extractedSize = await fetchTarballToVolume(node.tarball, packagePath, volume, exclude) + + // use extracted size if registry didn't provide unpackedSize (e.g., JSR packages) + if (node.unpackedSize === undefined) { + node.unpackedSize = extractedSize + } + + completed++ + progress.trigger({ + type: 'progress', + kind: 'fetch', + current: completed, + total, + name: node.name, + }) + } + } + + // start concurrent workers + const workers = Array.from({ length: Math.min(concurrency, queue.length) }, () => worker()) + await Promise.all(workers) +} diff --git a/app/bundler/lib/hoist.test.ts b/app/bundler/lib/hoist.test.ts new file mode 100644 index 000000000..8241c4e63 --- /dev/null +++ b/app/bundler/lib/hoist.test.ts @@ -0,0 +1,459 @@ +import { describe, expect, it } from 'vitest' + +import { hoist } from './hoist' +import type { HoistedNode, HoistedResult, ResolvedPackage } from './types' + +// #region test helpers + +/** + * creates a mock resolved package for testing. + * supports: "name", "name@version", "@scope/name", "@scope/name@version" + */ +function pkg(spec: string, deps: ResolvedPackage[] = []): ResolvedPackage { + let name: string + let version = '1.0.0' + + if (spec.startsWith('@')) { + // scoped package: @scope/name or @scope/name@version + const slashIdx = spec.indexOf('/') + const atIdx = spec.indexOf('@', slashIdx) + if (atIdx === -1) { + name = spec + } else { + name = spec.slice(0, atIdx) + version = spec.slice(atIdx + 1) + } + } else if (spec.includes('@')) { + // unscoped with version: name@version + const atIdx = spec.indexOf('@') + name = spec.slice(0, atIdx) + version = spec.slice(atIdx + 1) + } else { + // unscoped without version: name + name = spec + } + + return { + name, + version, + tarball: `https://registry.npmjs.org/${name}/-/${name}-${version}.tgz`, + dependencies: new Map(deps.map(d => [d.name, d])), + } +} + +/** counts packages at root level vs nested */ +function countLevels(paths: string[]): { root: number; nested: number } { + let root = 0 + let nested = 0 + for (const path of paths) { + const depth = (path.match(/node_modules/g) || []).length + if (depth === 1) { + root++ + } else { + nested++ + } + } + return { root, nested } +} + +/** + * converts a hoisted result to a flat list of paths. + * useful for debugging and testing. + * + * @param result the hoisted result + * @returns array of paths like ["node_modules/react", "node_modules/react/node_modules/scheduler"] + */ +function hoistedToPaths(result: HoistedResult): string[] { + const paths: string[] = [] + + function walk(nodes: Map, prefix: string): void { + for (const [name, node] of nodes) { + const path = `${prefix}/${name}` + paths.push(path) + if (node.nested.size > 0) { + walk(node.nested, `${path}/node_modules`) + } + } + } + + walk(result.root, 'node_modules') + // oxlint-disable-next-line unicorn/no-array-sort + return paths.sort() +} + +// #endregion + +// #region basic hoisting + +describe('hoist', () => { + describe('basic hoisting', () => { + it('hoists single dependency to root', () => { + // A -> B => both at root + const b = pkg('B') + const a = pkg('A', [b]) + const result = hoist([a]) + const paths = hoistedToPaths(result) + + expect(paths).toContain('node_modules/A') + expect(paths).toContain('node_modules/B') + expect(countLevels(paths)).toEqual({ root: 2, nested: 0 }) + }) + + it('hoists deep dependency chain to root', () => { + // A -> B -> C -> D => all at root + const d = pkg('D') + const c = pkg('C', [d]) + const b = pkg('B', [c]) + const a = pkg('A', [b]) + const result = hoist([a]) + + expect(countLevels(hoistedToPaths(result))).toEqual({ + root: 4, + nested: 0, + }) + }) + + it('deduplicates shared dependencies', () => { + // A -> C, B -> C => A, B, C all at root (C shared) + const c = pkg('C') + const a = pkg('A', [c]) + const b = pkg('B', [c]) + const result = hoist([a, b]) + const paths = hoistedToPaths(result) + + expect(countLevels(paths)).toEqual({ root: 3, nested: 0 }) + expect(paths.filter(p => p.includes('/C')).length).toBe(1) + }) + + it('handles package with no dependencies', () => { + const a = pkg('A') + const result = hoist([a]) + + expect(countLevels(hoistedToPaths(result))).toEqual({ + root: 1, + nested: 0, + }) + }) + + it('handles empty roots array', () => { + const result = hoist([]) + + expect(result.root.size).toBe(0) + expect(hoistedToPaths(result)).toHaveLength(0) + }) + }) + + // #endregion + + // #region version conflicts + + describe('version conflicts', () => { + it('nests conflicting version under parent', () => { + // A -> B@1, B@2 (root) => B@2 at root, B@1 nested under A + const b1 = pkg('B@1.0.0') + const b2 = pkg('B@2.0.0') + const a = pkg('A', [b1]) + const result = hoist([a, b2]) + const paths = hoistedToPaths(result) + + expect(result.root.get('B')?.version).toBe('2.0.0') + expect(paths).toContain('node_modules/A/node_modules/B') + expect(countLevels(paths)).toEqual({ root: 2, nested: 1 }) + }) + + it('first transitive version wins at root level', () => { + // A -> B@1, C -> B@2 => B@1 hoisted (processed first), B@2 nested under C + const b1 = pkg('B@1.0.0') + const b2 = pkg('B@2.0.0') + const a = pkg('A', [b1]) + const c = pkg('C', [b2]) + const result = hoist([a, c]) + const paths = hoistedToPaths(result) + + expect(result.root.get('B')?.version).toBe('1.0.0') + expect(paths).toContain('node_modules/C/node_modules/B') + }) + + it('root package takes precedence over transitive', () => { + // A@1 (root), B -> A@2 => A@1 at root, A@2 nested under B + const a2 = pkg('A@2.0.0') + const b = pkg('B', [a2]) + const a1 = pkg('A@1.0.0') + const result = hoist([a1, b]) + const paths = hoistedToPaths(result) + + expect(result.root.get('A')?.version).toBe('1.0.0') + expect(paths).toContain('node_modules/B/node_modules/A') + }) + + it('later root package overwrites earlier for same name', () => { + // A@1, A@2 (both roots) => A@2 at root (last wins) + const a1 = pkg('A@1.0.0') + const a2 = pkg('A@2.0.0') + const result = hoist([a1, a2]) + + expect(result.root.get('A')?.version).toBe('2.0.0') + expect(countLevels(hoistedToPaths(result))).toEqual({ + root: 1, + nested: 0, + }) + }) + + it('handles diamond dependency with version conflict', () => { + // A -> C -> D@1, B -> C -> D@2 => C deduplicated, D version conflict handled + const d1 = pkg('D@1.0.0') + const d2 = pkg('D@2.0.0') + const c1 = pkg('C', [d1]) + const c2 = pkg('C', [d2]) + const a = pkg('A', [c1]) + const b = pkg('B', [c2]) + const result = hoist([a, b]) + const paths = hoistedToPaths(result) + + // C should be deduplicated (same version) + expect(paths.filter(p => p.endsWith('/C')).length).toBe(1) + }) + + it('preserves require chain when hoisting would break resolution', () => { + // A -> B -> C@1, C@2 (root) => C@2 at root, C@1 nested to satisfy B + const c1 = pkg('C@1.0.0') + const c2 = pkg('C@2.0.0') + const b = pkg('B', [c1]) + const a = pkg('A', [b]) + const result = hoist([a, c2]) + const paths = hoistedToPaths(result) + + expect(result.root.get('C')?.version).toBe('2.0.0') + expect( + paths.some(p => p.includes('/B/node_modules/C') || p.includes('/A/node_modules/C')), + ).toBe(true) + }) + }) + + // #endregion + + // #region cyclic dependencies + + describe('cyclic dependencies', () => { + it('handles simple cycle between two packages', () => { + // A <-> B (mutual dependency) + const a: ResolvedPackage = { + name: 'A', + version: '1.0.0', + tarball: 'https://example.com/a.tgz', + dependencies: new Map(), + } + const b: ResolvedPackage = { + name: 'B', + version: '1.0.0', + tarball: 'https://example.com/b.tgz', + dependencies: new Map([['A', a]]), + } + a.dependencies.set('B', b) + + const result = hoist([a]) + + expect(countLevels(hoistedToPaths(result))).toEqual({ + root: 2, + nested: 0, + }) + }) + + it('handles longer cycle (A -> B -> C -> A)', () => { + const a: ResolvedPackage = { + name: 'A', + version: '1.0.0', + tarball: 'https://example.com/a.tgz', + dependencies: new Map(), + } + const c: ResolvedPackage = { + name: 'C', + version: '1.0.0', + tarball: 'https://example.com/c.tgz', + dependencies: new Map([['A', a]]), + } + const b: ResolvedPackage = { + name: 'B', + version: '1.0.0', + tarball: 'https://example.com/b.tgz', + dependencies: new Map([['C', c]]), + } + a.dependencies.set('B', b) + + const result = hoist([a]) + + expect(countLevels(hoistedToPaths(result))).toEqual({ + root: 3, + nested: 0, + }) + }) + + it('handles self-dependency', () => { + const a: ResolvedPackage = { + name: 'A', + version: '1.0.0', + tarball: 'https://example.com/a.tgz', + dependencies: new Map(), + } + a.dependencies.set('A', a) + + const result = hoist([a]) + + expect(countLevels(hoistedToPaths(result))).toEqual({ + root: 1, + nested: 0, + }) + }) + }) + + // #endregion + + // #region deep nesting + + describe('deep nesting', () => { + it('handles multiple levels of conflicts', () => { + // A -> B@1 -> C@1, B@2 -> C@2, C@3 (root) + const c1 = pkg('C@1.0.0') + const c2 = pkg('C@2.0.0') + const c3 = pkg('C@3.0.0') + const b1 = pkg('B@1.0.0', [c1]) + const b2 = pkg('B@2.0.0', [c2]) + const a = pkg('A', [b1]) + const result = hoist([a, b2, c3]) + const paths = hoistedToPaths(result) + + expect(result.root.get('C')?.version).toBe('3.0.0') + expect(result.root.get('B')?.version).toBe('2.0.0') + expect(paths).toContain('node_modules/A/node_modules/B') + }) + + it('nests package that would shadow parent dependency', () => { + // A@1 -> B -> A@2 => A@1 at root, A@2 nested under B + const a2 = pkg('A@2.0.0') + const b = pkg('B', [a2]) + const a1 = pkg('A@1.0.0', [b]) + const result = hoist([a1]) + const paths = hoistedToPaths(result) + + expect(result.root.get('A')?.version).toBe('1.0.0') + expect(paths).toContain('node_modules/B/node_modules/A') + }) + + it('handles deeply nested version conflict chain', () => { + // A -> B@1 -> C -> D@1, E -> B@2 -> C -> D@2 + const d1 = pkg('D@1.0.0') + const d2 = pkg('D@2.0.0') + const c1 = pkg('C', [d1]) + const c2 = pkg('C', [d2]) + const b1 = pkg('B@1.0.0', [c1]) + const b2 = pkg('B@2.0.0', [c2]) + const a = pkg('A', [b1]) + const e = pkg('E', [b2]) + const result = hoist([a, e]) + const paths = hoistedToPaths(result) + + expect(paths).toContain('node_modules/A') + expect(paths).toContain('node_modules/E') + expect(result.root.get('B')?.version).toBe('1.0.0') + expect(paths).toContain('node_modules/E/node_modules/B') + }) + }) + + // #endregion + + // #region scoped packages + + describe('scoped packages', () => { + it('hoists scoped packages to root', () => { + const scopedDep = pkg('@scope/dep') + const a = pkg('A', [scopedDep]) + const result = hoist([a]) + const paths = hoistedToPaths(result) + + expect(paths).toContain('node_modules/@scope/dep') + expect(result.root.get('@scope/dep')?.version).toBe('1.0.0') + }) + + it('handles scoped and unscoped packages with same last segment', () => { + // @scope/foo and foo are different packages, no conflict + const scopedFoo = pkg('@scope/foo') + const foo = pkg('foo') + const result = hoist([scopedFoo, foo]) + const paths = hoistedToPaths(result) + + expect(paths).toContain('node_modules/@scope/foo') + expect(paths).toContain('node_modules/foo') + expect(countLevels(paths)).toEqual({ root: 2, nested: 0 }) + }) + + it('handles version conflicts in scoped packages', () => { + const scopedV1 = pkg('@scope/pkg@1.0.0') + const scopedV2 = pkg('@scope/pkg@2.0.0') + const a = pkg('A', [scopedV1]) + const result = hoist([a, scopedV2]) + const paths = hoistedToPaths(result) + + expect(result.root.get('@scope/pkg')?.version).toBe('2.0.0') + expect(paths).toContain('node_modules/A/node_modules/@scope/pkg') + }) + }) + + // #endregion + + // #region metadata tracking + + describe('metadata tracking', () => { + it('maintains correct dependency counts', () => { + const c = pkg('C') + const b = pkg('B', [c]) + const a = pkg('A', [b, c]) + const result = hoist([a]) + + expect(result.root.get('A')?.dependencyCount).toBe(2) + expect(result.root.get('B')?.dependencyCount).toBe(1) + expect(result.root.get('C')?.dependencyCount).toBe(0) + }) + + it('shared transitive at different depths is only hoisted once', () => { + // A -> C -> D, B -> D => D hoisted once + const d = pkg('D') + const c = pkg('C', [d]) + const a = pkg('A', [c]) + const b = pkg('B', [d]) + const result = hoist([a, b]) + const paths = hoistedToPaths(result) + + expect(paths.filter(p => p.endsWith('/D'))).toHaveLength(1) + }) + }) + + // #endregion + + // #region hoistedToPaths utility + + describe('hoistedToPaths', () => { + it('returns sorted paths', () => { + const c = pkg('C') + const b = pkg('B') + const a = pkg('A', [b, c]) + const result = hoist([a]) + const paths = hoistedToPaths(result) + + expect(paths).toEqual(paths.toSorted()) + }) + + it('includes nested paths with full hierarchy', () => { + const b2 = pkg('B@2.0.0') + const b1 = pkg('B@1.0.0') + const a = pkg('A', [b1]) + const result = hoist([a, b2]) + const paths = hoistedToPaths(result) + + expect(paths).toContain('node_modules/A') + expect(paths).toContain('node_modules/A/node_modules/B') + expect(paths).toContain('node_modules/B') + }) + }) + + // #endregion +}) diff --git a/app/bundler/lib/hoist.ts b/app/bundler/lib/hoist.ts new file mode 100644 index 000000000..974de56e8 --- /dev/null +++ b/app/bundler/lib/hoist.ts @@ -0,0 +1,144 @@ +import type { HoistedNode, HoistedResult, ResolvedPackage } from './types' + +/** + * attempts to place a package at the root level. + * returns true if placement succeeded, false if there's a conflict. + * + * a conflict occurs when: + * - a different version of the same package is already at root + * + * @param root the current root node_modules map + * @param pkg the package to place + * @returns true if placed at root, false if needs nesting + */ +function tryPlaceAtRoot(root: Map, pkg: ResolvedPackage): boolean { + const existing = root.get(pkg.name) + + if (!existing) { + // no conflict, place at root + root.set(pkg.name, { + name: pkg.name, + version: pkg.version, + tarball: pkg.tarball, + integrity: pkg.integrity, + unpackedSize: pkg.unpackedSize, + dependencyCount: pkg.dependencies.size, + nested: new Map(), + }) + return true + } + + // same version already at root - reuse it + if (existing.version === pkg.version) { + return true + } + + // different version - conflict, needs nesting + return false +} + +/** + * creates a hoisted node from a resolved package. + */ +function createNode(pkg: ResolvedPackage): HoistedNode { + return { + name: pkg.name, + version: pkg.version, + tarball: pkg.tarball, + integrity: pkg.integrity, + unpackedSize: pkg.unpackedSize, + dependencyCount: pkg.dependencies.size, + nested: new Map(), + } +} + +/** + * hoists dependencies as high as possible in the tree. + * follows npm's placement algorithm: + * 1. explicitly requested (root) packages always get placed at root + * 2. transitive dependencies try to hoist to root + * 3. if conflict with a root package, nest under parent + * + * peer dependencies are handled by the resolver - they're added as regular + * dependencies of the package that requested them, so they naturally get + * hoisted to root if no conflict, or nested under the dependent if there's + * a version conflict. this ensures the bundler resolves peers correctly. + * + * @param roots the root packages from resolution + * @returns the hoisted node_modules structure + */ +export function hoist(roots: ResolvedPackage[]): HoistedResult { + const root = new Map() + + // track which packages we've visited to avoid infinite loops + const visited = new Set() + + // track which package names are explicitly requested (root packages) + // these take precedence over transitive dependencies + const rootPackageVersions = new Map() + for (const pkg of roots) { + rootPackageVersions.set(pkg.name, pkg.version) + } + + /** + * recursively process a package's dependencies. + * the package itself should already be placed. + */ + function processDependencies(pkg: ResolvedPackage, node: HoistedNode): void { + for (const dep of pkg.dependencies.values()) { + const depKey = `${dep.name}@${dep.version}` + + // skip if already processed + if (visited.has(depKey)) { + continue + } + visited.add(depKey) + + // check if this dep conflicts with a root package + const rootVersion = rootPackageVersions.get(dep.name) + if (rootVersion !== undefined && rootVersion !== dep.version) { + // conflict with explicit root package - must nest + const nestedNode = createNode(dep) + node.nested.set(dep.name, nestedNode) + processDependencies(dep, nestedNode) + continue + } + + // try to place at root + const placedAtRoot = tryPlaceAtRoot(root, dep) + if (placedAtRoot) { + const rootNode = root.get(dep.name)! + processDependencies(dep, rootNode) + } else { + // conflict at root with another transitive dep - nest + const nestedNode = createNode(dep) + node.nested.set(dep.name, nestedNode) + processDependencies(dep, nestedNode) + } + } + } + + // first pass: place all root packages at root level + // this ensures explicitly requested packages take precedence + for (const rootPkg of roots) { + const key = `${rootPkg.name}@${rootPkg.version}` + if (visited.has(key)) { + continue + } + visited.add(key) + + // root packages always go at root (overwrite if different version exists) + const node = createNode(rootPkg) + root.set(rootPkg.name, node) + } + + // second pass: process dependencies of all root packages + for (const rootPkg of roots) { + const node = root.get(rootPkg.name) + if (node) { + processDependencies(rootPkg, node) + } + } + + return { root } +} diff --git a/app/bundler/lib/installed-packages.test.ts b/app/bundler/lib/installed-packages.test.ts new file mode 100644 index 000000000..a191b745a --- /dev/null +++ b/app/bundler/lib/installed-packages.test.ts @@ -0,0 +1,116 @@ +import { describe, expect, it } from 'vitest' + +import { buildInstalledPackages } from './installed-packages' +import { resolve } from './resolve' + +describe('buildInstalledPackages', () => { + it('builds packages from a simple dependency tree', async () => { + const result = await resolve(['is-odd@3.0.1']) + const packages = buildInstalledPackages(result.roots[0]!, new Set()) + + // should have is-odd and is-number + const names = packages.map(p => p.name) + expect(names).toContain('is-odd') + expect(names).toContain('is-number') + + // is-odd should be level 0, is-number should be level 1 + const isOdd = packages.find(p => p.name === 'is-odd')! + const isNumber = packages.find(p => p.name === 'is-number')! + expect(isOdd.level).toBe(0) + expect(isNumber.level).toBe(1) + + // none should be marked as peer (no peer deps) + expect(packages.every(p => !p.isPeer)).toBe(true) + }) + + it('correctly sets dependents', async () => { + const result = await resolve(['is-odd@3.0.1']) + const packages = buildInstalledPackages(result.roots[0]!, new Set()) + + // is-odd is the root, no dependents + const isOdd = packages.find(p => p.name === 'is-odd')! + expect(isOdd.dependents.length).toBe(0) + + // is-number is depended on by is-odd + const isNumber = packages.find(p => p.name === 'is-number')! + expect(isNumber.dependents.length).toBe(1) + expect(isNumber.dependents[0]!.name).toBe('is-odd') + }) + + it('correctly sets dependencies', async () => { + const result = await resolve(['is-odd@3.0.1']) + const packages = buildInstalledPackages(result.roots[0]!, new Set()) + + // is-odd has 1 dependency (is-number) + const isOdd = packages.find(p => p.name === 'is-odd')! + expect(isOdd.dependencies.length).toBe(1) + expect(isOdd.dependencies[0]!.name).toBe('is-number') + }) + + it('marks peer dependencies correctly', async () => { + // use-sync-external-store has react as a peer dependency + const result = await resolve(['use-sync-external-store@1.2.0']) + const peerDepNames = new Set(['react']) + const packages = buildInstalledPackages(result.roots[0]!, peerDepNames) + + // react and its deps should be marked as peer + const react = packages.find(p => p.name === 'react') + expect(react).toBeDefined() + expect(react!.isPeer).toBe(true) + + // use-sync-external-store should not be marked as peer + const main = packages.find(p => p.name === 'use-sync-external-store')! + expect(main.isPeer).toBe(false) + + // the dependency edge to react should be marked as peer + const reactDep = main.dependencies.find(d => d.name === 'react') + expect(reactDep).toBeDefined() + expect(reactDep!.isPeer).toBe(true) + }) + + it('marks transitive peer deps correctly', async () => { + // use-sync-external-store@1.2.0 has react as peer + // react has loose-envify as a regular dep + // loose-envify should be marked as peer (only reachable through react) + const result = await resolve(['use-sync-external-store@1.2.0']) + const peerDepNames = new Set(['react']) + const packages = buildInstalledPackages(result.roots[0]!, peerDepNames) + + const looseEnvify = packages.find(p => p.name === 'loose-envify') + // loose-envify is a dep of react, which is peer-only + expect(looseEnvify!.isPeer).toBe(true) + }) + + it('marks direct peer deps as peer even when also a transitive dep', async () => { + // if a package is a direct peer dep of root but also reachable through + // a transitive non-peer path, it should still be marked as peer + const result = await resolve(['is-odd@3.0.1']) + + // pretend is-number is also a peer dep (but it's already a regular dep) + const peerDepNames = new Set(['is-number']) + const packages = buildInstalledPackages(result.roots[0]!, peerDepNames) + + // is-number should be marked as peer because it's a direct peer dep of root + const isNumber = packages.find(p => p.name === 'is-number')! + expect(isNumber.isPeer).toBe(true) + }) + + it('marks peer deps as peer when also reachable through transitive deps', async () => { + // graphql-request has graphql as a peer dep + // @graphql-typed-document-node/core (a regular dep) also depends on graphql + // graphql should still be marked as peer since it's a direct peer dep of root + const result = await resolve(['graphql-request@7.4.0']) + const peerDepNames = new Set(['graphql']) + const packages = buildInstalledPackages(result.roots[0]!, peerDepNames) + + // graphql should be marked as peer + const graphql = packages.find(p => p.name === 'graphql') + expect(graphql).toBeDefined() + expect(graphql!.isPeer).toBe(true) + + // @graphql-typed-document-node/core should NOT be marked as peer + const typedDocNode = packages.find(p => p.name === '@graphql-typed-document-node/core') + expect(typedDocNode).toBeDefined() + expect(typedDocNode!.isPeer).toBe(false) + }) +}) diff --git a/app/bundler/lib/installed-packages.ts b/app/bundler/lib/installed-packages.ts new file mode 100644 index 000000000..b08d97e34 --- /dev/null +++ b/app/bundler/lib/installed-packages.ts @@ -0,0 +1,126 @@ +import type { InstalledPackage, PackageRef } from '../types' + +import type { ResolvedPackage } from './types' + +/** + * builds the installed packages list from the resolved dependency tree. + * also identifies which packages are only reachable through peer dependencies. + * + * @param root the root resolved package + * @param peerDepNames names of the root package's peer dependencies + * @returns array of installed packages with peer status + */ +export function buildInstalledPackages( + root: ResolvedPackage, + peerDepNames: Set, +): InstalledPackage[] { + // collect all unique packages and compute levels + const packageMap = new Map< + string, + { + pkg: ResolvedPackage + level: number + dependents: PackageRef[] + dependencies: PackageRef[] + } + >() + + // track which packages are reachable without going through peer deps + const reachableWithoutPeers = new Set() + + // first pass: collect packages and compute levels + { + const visited = new Set() + + function walk(pkg: ResolvedPackage, level: number, inPeerSubtree: boolean): void { + const key = `${pkg.name}@${pkg.version}` + + // update level to shortest path + const existing = packageMap.get(key) + if (existing) { + if (level < existing.level) { + existing.level = level + } + } else { + packageMap.set(key, { pkg, level, dependents: [], dependencies: [] }) + } + + // track if reachable without peers + if (!inPeerSubtree) { + reachableWithoutPeers.add(key) + } + + // avoid infinite loops from cycles + if (visited.has(key)) { + return + } + visited.add(key) + + for (const [depName, dep] of pkg.dependencies) { + // check if this edge goes through a root peer dep + const isPeerEdge = pkg === root && peerDepNames.has(depName) + walk(dep, level + 1, inPeerSubtree || isPeerEdge) + } + + visited.delete(key) + } + + walk(root, 0, false) + } + + // second pass: build dependency/dependent relationships + // we need to read peerDependencies from each package's manifest + for (const [_key, entry] of packageMap) { + const pkg = entry.pkg + + for (const [depName, dep] of pkg.dependencies) { + const depKey = `${dep.name}@${dep.version}` + const depEntry = packageMap.get(depKey) + if (!depEntry) { + continue + } + + // check if this is a peer dependency edge by looking at the manifest + // note: during resolution, peer deps are added to dependencies map + // we need to check the original peerDependencies field + const isPeerDep = pkg === root && peerDepNames.has(depName) + + // add to this package's dependencies + entry.dependencies.push({ + name: dep.name, + version: dep.version, + isPeer: isPeerDep, + }) + + // add to the dependency's dependents + depEntry.dependents.push({ + name: pkg.name, + version: pkg.version, + isPeer: isPeerDep, + }) + } + } + + // build final array + const packages: InstalledPackage[] = [] + for (const [key, { pkg, level, dependents, dependencies }] of packageMap) { + // a package is a peer if: + // 1. it's a direct peer dependency of the root, OR + // 2. it's only reachable through peer dependency subtrees + const isDirectPeerOfRoot = peerDepNames.has(pkg.name) + const isOnlyReachableThroughPeers = !reachableWithoutPeers.has(key) + + packages.push({ + name: pkg.name, + version: pkg.version, + size: pkg.unpackedSize ?? 0, + path: `node_modules/${pkg.name}`, + level, + dependents, + dependencies, + isPeer: isDirectPeerOfRoot || isOnlyReachableThroughPeers, + }) + } + + return packages +} diff --git a/app/bundler/lib/module-type.test.ts b/app/bundler/lib/module-type.test.ts new file mode 100644 index 000000000..266ef47b2 --- /dev/null +++ b/app/bundler/lib/module-type.test.ts @@ -0,0 +1,230 @@ +import { parseAst } from '@rolldown/browser/parseAst' +import { describe, expect, it } from 'vitest' + +import { analyzeModule, type ModuleInfo } from './module-type' + +function analyze(code: string): ModuleInfo { + const ast = parseAst(code) + return analyzeModule(ast) +} + +describe('analyzeModule', () => { + describe('ESM detection', () => { + it('detects export const', () => { + const info = analyze('export const foo = 1') + expect(info.type).toBe('esm') + expect(info.namedExports).toEqual(['foo']) + expect(info.hasDefaultExport).toBe(false) + }) + + it('detects export default class', () => { + const info = analyze('export default class {}') + expect(info.type).toBe('esm') + expect(info.hasDefaultExport).toBe(true) + }) + + it('detects export default function', () => { + const info = analyze('export default function() {}') + expect(info.type).toBe('esm') + expect(info.hasDefaultExport).toBe(true) + }) + + it('detects export default expression', () => { + const info = analyze('export default 42') + expect(info.type).toBe('esm') + expect(info.hasDefaultExport).toBe(true) + }) + + it('detects re-exports', () => { + const info = analyze("export { a, b } from './mod'") + expect(info.type).toBe('esm') + expect(info.namedExports).toEqual(['a', 'b']) + }) + + it('detects star exports', () => { + const info = analyze("export * from './mod'") + expect(info.type).toBe('esm') + // star exports don't add specific names + expect(info.namedExports).toEqual([]) + }) + + it('detects export { default } from as hasDefaultExport', () => { + const info = analyze("export { default } from './mod'") + expect(info.type).toBe('esm') + expect(info.hasDefaultExport).toBe(true) + }) + + it('detects export { foo as default }', () => { + const info = analyze("export { foo as default } from './mod'") + expect(info.type).toBe('esm') + expect(info.hasDefaultExport).toBe(true) + }) + + it('detects import statement', () => { + const info = analyze("import foo from './mod'") + expect(info.type).toBe('esm') + }) + + it('detects named imports', () => { + const info = analyze("import { a, b } from './mod'") + expect(info.type).toBe('esm') + }) + + it('detects import.meta.url', () => { + const info = analyze('console.log(import.meta.url)') + expect(info.type).toBe('esm') + }) + + it('detects multiple exports', () => { + const info = analyze(` + export const foo = 1; + export const bar = 2; + export function baz() {} + `) + expect(info.type).toBe('esm') + expect(info.namedExports).toEqual(['foo', 'bar', 'baz']) + }) + + it('detects export function', () => { + const info = analyze('export function foo() {}') + expect(info.type).toBe('esm') + expect(info.namedExports).toEqual(['foo']) + }) + + it('detects export class', () => { + const info = analyze('export class Foo {}') + expect(info.type).toBe('esm') + expect(info.namedExports).toEqual(['Foo']) + }) + + it('does not count dynamic import alone as ESM', () => { + const info = analyze("import('./dynamic')") + expect(info.type).toBe('unknown') + }) + }) + + describe('CJS detection', () => { + it('detects exports.foo assignment', () => { + const info = analyze('exports.foo = 1') + expect(info.type).toBe('cjs') + expect(info.namedExports).toEqual(['foo']) + }) + + it('detects module.exports.bar assignment', () => { + const info = analyze('module.exports.bar = 2') + expect(info.type).toBe('cjs') + expect(info.namedExports).toEqual(['bar']) + }) + + it('detects module.exports = { a, b }', () => { + const info = analyze('module.exports = { a, b }') + expect(info.type).toBe('cjs') + expect(info.namedExports).toEqual(['a', 'b']) + }) + + it('detects module.exports = { a: 1, b: 2 }', () => { + const info = analyze('module.exports = { a: 1, b: 2 }') + expect(info.type).toBe('cjs') + expect(info.namedExports).toEqual(['a', 'b']) + }) + + it('detects Object.defineProperty(exports, "x", ...)', () => { + const info = analyze('Object.defineProperty(exports, "x", { value: 1 })') + expect(info.type).toBe('cjs') + expect(info.namedExports).toEqual(['x']) + }) + + it('detects Object.defineProperty(module.exports, "y", ...)', () => { + const info = analyze('Object.defineProperty(module.exports, "y", { get: () => 1 })') + expect(info.type).toBe('cjs') + expect(info.namedExports).toEqual(['y']) + }) + + it('collects multiple CJS exports', () => { + const info = analyze(` + exports.foo = 1; + exports.bar = 2; + module.exports.baz = 3; + `) + expect(info.type).toBe('cjs') + expect(info.namedExports).toEqual(['foo', 'bar', 'baz']) + }) + + it('detects module.exports = require(...) re-export', () => { + const info = analyze("module.exports = require('./other')") + expect(info.type).toBe('cjs') + // can't know exports statically from re-export + expect(info.namedExports).toEqual([]) + }) + + it('detects conditional require re-export (React pattern)', () => { + const info = analyze(` + 'use strict'; + if (process.env.NODE_ENV === 'production') { + module.exports = require('./cjs/react.production.js'); + } else { + module.exports = require('./cjs/react.development.js'); + } + `) + expect(info.type).toBe('cjs') + }) + + it('detects conditional require with block statements', () => { + const info = analyze(` + if (condition) { + module.exports = require('./a'); + } else { + module.exports = require('./b'); + } + `) + expect(info.type).toBe('cjs') + }) + + it('detects conditional require without braces', () => { + const info = analyze(` + if (condition) + module.exports = require('./a'); + else + module.exports = require('./b'); + `) + expect(info.type).toBe('cjs') + }) + }) + + describe('unknown detection', () => { + it('returns unknown for empty file', () => { + const info = analyze('') + expect(info.type).toBe('unknown') + expect(info.namedExports).toEqual([]) + expect(info.hasDefaultExport).toBe(false) + }) + + it('returns unknown for side-effects only', () => { + const info = analyze("console.log('side effect')") + expect(info.type).toBe('unknown') + }) + + it('returns unknown for only dynamic imports', () => { + const info = analyze("const mod = import('./dynamic')") + expect(info.type).toBe('unknown') + }) + + it('returns unknown for iife', () => { + const info = analyze('(function() { console.log("hi"); })()') + expect(info.type).toBe('unknown') + }) + }) + + describe('mixed scenarios', () => { + it('ESM takes precedence over CJS patterns', () => { + // some files might have both patterns (e.g., dual builds) + const info = analyze(` + export const foo = 1; + exports.bar = 2; + `) + expect(info.type).toBe('esm') + // ESM export is captured, CJS is ignored when ESM detected first + expect(info.namedExports).toContain('foo') + }) + }) +}) diff --git a/app/bundler/lib/module-type.ts b/app/bundler/lib/module-type.ts new file mode 100644 index 000000000..483116390 --- /dev/null +++ b/app/bundler/lib/module-type.ts @@ -0,0 +1,395 @@ +import type { Expression, Program, Statement, StaticMemberExpression } from '@oxc-project/types' + +// #region types + +export type ModuleType = 'esm' | 'cjs' | 'unknown' + +/** + * information about a module's format and exports. + */ +export interface ModuleInfo { + /** detected module format */ + type: ModuleType + /** whether the module has a default export */ + hasDefaultExport: boolean + /** + * detected named exports. + * for ESM: export names from export statements. + * for CJS: static property assignments to exports/module.exports. + */ + namedExports: string[] +} + +// #endregion + +// #region helpers + +/** + * checks if a node is a string literal with type "Literal". + */ +function isStringLiteral(node: unknown): node is { type: 'Literal'; value: string } { + return ( + typeof node === 'object' && + node !== null && + (node as { type: string }).type === 'Literal' && + typeof (node as { value: unknown }).value === 'string' + ) +} + +/** + * checks if an expression is an identifier with the given name. + */ +function isIdentifier(node: Expression | null | undefined, name: string): boolean { + if (!node) { + return false + } + // IdentifierReference and IdentifierName both have type "Identifier" and name property + return node.type === 'Identifier' && (node as { name: string }).name === name +} + +/** + * checks if an expression is `exports` or `module.exports`. + */ +function isExportsObject(node: Expression): boolean { + if (isIdentifier(node, 'exports')) { + return true + } + + // module.exports + if (node.type === 'MemberExpression') { + const memberExpr = node as StaticMemberExpression + if (!memberExpr.computed) { + const obj = memberExpr.object + const prop = memberExpr.property + return isIdentifier(obj, 'module') && prop.type === 'Identifier' && prop.name === 'exports' + } + } + + return false +} + +/** + * gets the property name from a static member expression. + */ +function getStaticPropertyName(node: StaticMemberExpression): string | null { + if (node.computed) { + // computed property like exports["foo"] + const prop = node.property as unknown as Expression + if (isStringLiteral(prop)) { + return prop.value + } + return null + } + + // non-computed like exports.foo + const prop = node.property + if (prop.type === 'Identifier') { + return prop.name + } + + return null +} + +/** + * extracts property names from an object expression (for `module.exports = { a, b }`). + */ +function extractObjectPropertyNames(node: Expression): string[] { + if (node.type !== 'ObjectExpression') { + return [] + } + + const names: string[] = [] + for (const prop of node.properties) { + if (prop.type === 'SpreadElement') { + continue + } + + if (prop.type === 'Property') { + const key = prop.key + if (key.type === 'Identifier') { + names.push((key as { name: string }).name) + } else if (isStringLiteral(key)) { + names.push(key.value) + } + } + } + + return names +} + +// #endregion + +// #region detection + +/** + * checks if an expression is a require() call. + */ +function isRequireCall(expr: Expression): boolean { + return expr.type === 'CallExpression' && isIdentifier(expr.callee as Expression, 'require') +} + +/** + * checks an expression for CJS export patterns. + * returns the export names found, or null if not a CJS pattern. + */ +function checkCjsExpression(expr: Expression): string[] | null { + // assignment expressions: exports.foo = ... or module.exports = ... + if (expr.type === 'AssignmentExpression' && expr.operator === '=') { + const left = expr.left + + // exports.foo = ... or module.exports.foo = ... + if (left.type === 'MemberExpression') { + const memberExpr = left as unknown as StaticMemberExpression + const obj = memberExpr.object + + // direct assignment to exports.propertyName + if (isExportsObject(obj)) { + const propName = getStaticPropertyName(memberExpr) + if (propName !== null) { + return [propName] + } + return [] + } + + // module.exports = require('...') - CJS re-export + if (isExportsObject(left as unknown as Expression)) { + if (isRequireCall(expr.right)) { + // re-export, we can't know the exports statically + return [] + } + // module.exports = { a, b } + return extractObjectPropertyNames(expr.right) + } + } + } + + // Object.defineProperty(exports, 'name', ...) or Object.defineProperty(module.exports, 'name', ...) + if (expr.type === 'CallExpression') { + const callee = expr.callee + + if (callee.type === 'MemberExpression') { + const memberCallee = callee as StaticMemberExpression + if (!memberCallee.computed && isIdentifier(memberCallee.object, 'Object')) { + const prop = memberCallee.property + if (prop.type === 'Identifier' && prop.name === 'defineProperty') { + const args = expr.arguments + if (args.length >= 2) { + const target = args[0]! + const propArg = args[1]! + + if ( + target.type !== 'SpreadElement' && + isExportsObject(target) && + propArg.type !== 'SpreadElement' && + isStringLiteral(propArg) + ) { + return [propArg.value] + } + } + } + } + } + } + + return null +} + +/** + * checks a statement for CJS patterns and extracts export info. + * returns the export names found, or null if not a CJS pattern. + */ +function checkCjsStatement(stmt: Statement): string[] | null { + // handle expression statements + if (stmt.type === 'ExpressionStatement') { + return checkCjsExpression(stmt.expression) + } + + // handle if statements - check both branches for CJS patterns + // e.g., if (process.env.NODE_ENV === 'production') module.exports = require('./prod') + if (stmt.type === 'IfStatement') { + let result: string[] | null = null + + // check consequent + if (stmt.consequent.type === 'ExpressionStatement') { + result = checkCjsExpression(stmt.consequent.expression) + } else if (stmt.consequent.type === 'BlockStatement') { + for (const s of stmt.consequent.body) { + const r = checkCjsStatement(s) + if (r !== null) { + result = result ? [...result, ...r] : r + } + } + } + + // check alternate + if (stmt.alternate) { + if (stmt.alternate.type === 'ExpressionStatement') { + const r = checkCjsExpression(stmt.alternate.expression) + if (r !== null) { + result = result ? [...result, ...r] : r + } + } else if (stmt.alternate.type === 'BlockStatement') { + for (const s of stmt.alternate.body) { + const r = checkCjsStatement(s) + if (r !== null) { + result = result ? [...result, ...r] : r + } + } + } else if (stmt.alternate.type === 'IfStatement') { + const r = checkCjsStatement(stmt.alternate) + if (r !== null) { + result = result ? [...result, ...r] : r + } + } + } + + return result + } + + return null +} + +/** + * analyzes an Oxc AST to determine the module format and exports. + * + * @param ast the parsed program AST + * @returns module info with type, default export flag, and named exports + */ +export function analyzeModule(ast: Program): ModuleInfo { + let type: ModuleType = 'unknown' + let hasDefaultExport = false + const namedExports: string[] = [] + + for (const node of ast.body) { + // ESM: import declarations + if (node.type === 'ImportDeclaration') { + type = 'esm' + continue + } + + // ESM: export default + if (node.type === 'ExportDefaultDeclaration') { + type = 'esm' + hasDefaultExport = true + continue + } + + // ESM: export all (export * from '...') + if (node.type === 'ExportAllDeclaration') { + type = 'esm' + // star exports don't add to namedExports since we can't know them statically + continue + } + + // ESM: named exports + if (node.type === 'ExportNamedDeclaration') { + type = 'esm' + + // export { a, b } or export { a } from '...' + for (const spec of node.specifiers) { + const exported = spec.exported + let name: string + + if (exported.type === 'Identifier') { + name = (exported as { name: string }).name + } else if (isStringLiteral(exported)) { + name = exported.value + } else { + continue + } + + if (name === 'default') { + hasDefaultExport = true + } else { + namedExports.push(name) + } + } + + // export const foo = ... or export function bar() {} + if (node.declaration) { + const decl = node.declaration + + if (decl.type === 'VariableDeclaration') { + for (const declarator of decl.declarations) { + if (declarator.id.type === 'Identifier') { + namedExports.push((declarator.id as { name: string }).name) + } + } + } else if (decl.type === 'FunctionDeclaration' || decl.type === 'ClassDeclaration') { + if (decl.id) { + namedExports.push((decl.id as { name: string }).name) + } + } + } + + continue + } + + // ESM: import.meta usage + if (node.type === 'ExpressionStatement') { + if (containsImportMeta(node.expression)) { + type = 'esm' + continue + } + } + + // CJS detection (only if not already ESM) + if (type !== 'esm') { + const cjsExports = checkCjsStatement(node) + if (cjsExports !== null) { + type = 'cjs' + namedExports.push(...cjsExports) + } + } + } + + return { type, hasDefaultExport, namedExports } +} + +/** + * recursively checks if an expression contains import.meta. + */ +function containsImportMeta(expr: Expression): boolean { + if (expr.type === 'MetaProperty') { + const meta = expr.meta + const prop = expr.property + return meta.name === 'import' && prop.name === 'meta' + } + + if (expr.type === 'MemberExpression') { + const memberExpr = expr as StaticMemberExpression + return containsImportMeta(memberExpr.object) + } + + if (expr.type === 'CallExpression') { + // check callee and arguments + if (containsImportMeta(expr.callee as Expression)) { + return true + } + for (const arg of expr.arguments) { + if (arg.type !== 'SpreadElement' && containsImportMeta(arg)) { + return true + } + } + } + + if (expr.type === 'BinaryExpression' || expr.type === 'LogicalExpression') { + return containsImportMeta(expr.left as Expression) || containsImportMeta(expr.right) + } + + if (expr.type === 'UnaryExpression') { + return containsImportMeta(expr.argument) + } + + if (expr.type === 'ConditionalExpression') { + return ( + containsImportMeta(expr.test) || + containsImportMeta(expr.consequent) || + containsImportMeta(expr.alternate) + ) + } + + return false +} + +// #endregion diff --git a/app/bundler/lib/registry.ts b/app/bundler/lib/registry.ts new file mode 100644 index 000000000..87a0e3df5 --- /dev/null +++ b/app/bundler/lib/registry.ts @@ -0,0 +1,103 @@ +import * as v from 'valibot' + +import { FetchError, InvalidSpecifierError, PackageNotFoundError } from './errors' +import { abbreviatedPackumentSchema, type AbbreviatedPackument, type Registry } from './types' + +const NPM_REGISTRY = 'https://registry.npmjs.org' +const JSR_REGISTRY = 'https://npm.jsr.io' + +/** + * cache for packuments to avoid refetching during resolution. + * key format: "registry:name" (e.g., "npm:react" or "jsr:@luca/flag") + */ +const packumentCache = new Map() + +/** + * transforms a JSR package name to the npm-compatible format. + * `@scope/name` becomes `@jsr/scope__name` + * + * @param name the JSR package name (must be scoped) + * @returns the transformed npm-compatible name + */ +export function transformJsrName(name: string): string { + if (!name.startsWith('@')) { + throw new InvalidSpecifierError(name, 'JSR packages must be scoped') + } + // @scope/name -> @jsr/scope__name + const withoutAt = name.slice(1) // "scope/name" + const transformed = withoutAt.replace('/', '__') // "scope__name" + return `@jsr/${transformed}` +} + +/** + * reverses the JSR npm-compatible name back to the canonical format. + * `@jsr/scope__name` becomes `@scope/name` + * + * @param name the npm-compatible JSR package name + * @returns the canonical JSR package name + */ +export function reverseJsrName(name: string): string { + if (!name.startsWith('@jsr/')) { + throw new InvalidSpecifierError(name, 'not a JSR npm-compatible name') + } + // @jsr/scope__name -> @scope/name + const withoutPrefix = name.slice(5) // "scope__name" + const restored = withoutPrefix.replace('__', '/') // "scope/name" + return `@${restored}` +} + +/** + * fetches the abbreviated packument from a registry. + * the abbreviated format contains only installation-relevant metadata. + * + * @param name the package name (can be scoped like @scope/pkg) + * @param registry which registry to fetch from (defaults to 'npm') + * @returns the abbreviated packument with all versions + * @throws if the package doesn't exist, network fails, or response is invalid + */ +export async function fetchPackument( + name: string, + registry: Registry = 'npm', +): Promise { + const cacheKey = `${registry}:${name}` + const cached = packumentCache.get(cacheKey) + if (cached) { + return cached + } + + let registryUrl: string + let fetchName: string + + if (registry === 'jsr') { + registryUrl = JSR_REGISTRY + fetchName = transformJsrName(name) + } else { + registryUrl = NPM_REGISTRY + fetchName = name + } + + const encodedName = fetchName.startsWith('@') + ? `@${encodeURIComponent(fetchName.slice(1))}` + : encodeURIComponent(fetchName) + + const url = `${registryUrl}/${encodedName}` + + const response = await fetch(url, { + headers: { + // request abbreviated format (corgi) for smaller payloads + Accept: 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8', + }, + }) + + if (!response.ok) { + if (response.status === 404) { + throw new PackageNotFoundError(name, registry) + } + throw new FetchError(url, response.status, response.statusText) + } + + const json = await response.json() + const packument = v.parse(abbreviatedPackumentSchema, json) + packumentCache.set(cacheKey, packument) + return packument +} diff --git a/app/bundler/lib/resolve.test.ts b/app/bundler/lib/resolve.test.ts new file mode 100644 index 000000000..dfd072507 --- /dev/null +++ b/app/bundler/lib/resolve.test.ts @@ -0,0 +1,496 @@ +import { describe, expect, it } from 'vitest' + +import { reverseJsrName, transformJsrName } from './registry' +import { parseSpecifier, pickVersion, resolve } from './resolve' +import type { AbbreviatedManifest } from './types' + +// #region test helpers + +/** creates a mock manifest for testing */ +function manifest(version: string, opts: { deprecated?: string } = {}): AbbreviatedManifest { + return { + name: 'test', + version, + deprecated: opts.deprecated, + dist: { tarball: `https://example.com/test-${version}.tgz`, shasum: 'abc' }, + } +} + +// #endregion + +// #region parseSpecifier + +describe('parseSpecifier', () => { + describe('npm packages', () => { + it('parses bare package name', () => { + expect(parseSpecifier('react')).toEqual({ + name: 'react', + range: 'latest', + registry: 'npm', + }) + }) + + it('parses package with exact version', () => { + expect(parseSpecifier('react@18.2.0')).toEqual({ + name: 'react', + range: '18.2.0', + registry: 'npm', + }) + }) + + it('parses package with semver range', () => { + expect(parseSpecifier('react@^18.0.0')).toEqual({ + name: 'react', + range: '^18.0.0', + registry: 'npm', + }) + }) + + it('parses npm: prefix as noop', () => { + expect(parseSpecifier('npm:react')).toEqual({ + name: 'react', + range: 'latest', + registry: 'npm', + }) + }) + + it('parses npm: prefix with version', () => { + expect(parseSpecifier('npm:react@18.2.0')).toEqual({ + name: 'react', + range: '18.2.0', + registry: 'npm', + }) + }) + }) + + describe('scoped packages', () => { + it('parses scoped package without version', () => { + expect(parseSpecifier('@babel/core')).toEqual({ + name: '@babel/core', + range: 'latest', + registry: 'npm', + }) + }) + + it('parses scoped package with exact version', () => { + expect(parseSpecifier('@babel/core@7.23.0')).toEqual({ + name: '@babel/core', + range: '7.23.0', + registry: 'npm', + }) + }) + + it('parses scoped package with semver range', () => { + expect(parseSpecifier('@types/node@^20.0.0')).toEqual({ + name: '@types/node', + range: '^20.0.0', + registry: 'npm', + }) + }) + + it('parses npm: prefix with scoped package', () => { + expect(parseSpecifier('npm:@babel/core@^7.0.0')).toEqual({ + name: '@babel/core', + range: '^7.0.0', + registry: 'npm', + }) + }) + }) + + describe('JSR packages', () => { + it('parses jsr package without version', () => { + expect(parseSpecifier('jsr:@luca/flag')).toEqual({ + name: '@luca/flag', + range: 'latest', + registry: 'jsr', + }) + }) + + it('parses jsr package with exact version', () => { + expect(parseSpecifier('jsr:@luca/flag@1.0.0')).toEqual({ + name: '@luca/flag', + range: '1.0.0', + registry: 'jsr', + }) + }) + + it('parses jsr package with semver range', () => { + expect(parseSpecifier('jsr:@std/path@^1.0.0')).toEqual({ + name: '@std/path', + range: '^1.0.0', + registry: 'jsr', + }) + }) + + it('throws for unscoped jsr package', () => { + expect(() => parseSpecifier('jsr:flag')).toThrow('JSR packages must be scoped') + }) + }) +}) + +// #endregion + +// #region JSR name utilities + +describe('JSR name utilities', () => { + describe('transformJsrName', () => { + it('transforms scoped JSR name to npm-compatible format', () => { + expect(transformJsrName('@luca/flag')).toBe('@jsr/luca__flag') + }) + + it('transforms @std packages', () => { + expect(transformJsrName('@std/path')).toBe('@jsr/std__path') + }) + + it('throws for unscoped package', () => { + expect(() => transformJsrName('flag')).toThrow('JSR packages must be scoped') + }) + }) + + describe('reverseJsrName', () => { + it('reverses npm-compatible JSR name to canonical format', () => { + expect(reverseJsrName('@jsr/luca__flag')).toBe('@luca/flag') + }) + + it('reverses @std packages', () => { + expect(reverseJsrName('@jsr/std__internal')).toBe('@std/internal') + }) + + it('throws for non-JSR name', () => { + expect(() => reverseJsrName('@babel/core')).toThrow('not a JSR npm-compatible name') + }) + }) +}) + +// #endregion + +// #region pickVersion + +describe('pickVersion', () => { + describe('dist-tags', () => { + const versions = { + '1.0.0': manifest('1.0.0'), + '2.0.0': manifest('2.0.0'), + '3.0.0-beta.1': manifest('3.0.0-beta.1'), + } + const distTags = { latest: '2.0.0', next: '3.0.0-beta.1' } + + it('resolves "latest" tag', () => { + expect(pickVersion(versions, distTags, 'latest')?.version).toBe('2.0.0') + }) + + it('resolves "next" tag', () => { + expect(pickVersion(versions, distTags, 'next')?.version).toBe('3.0.0-beta.1') + }) + + it('returns null for unknown tag', () => { + expect(pickVersion(versions, distTags, 'nonexistent')).toBeNull() + }) + + it('returns null when tag points to missing version', () => { + expect(pickVersion(versions, { latest: '9.9.9' }, 'latest')).toBeNull() + }) + + it('treats empty string as latest', () => { + expect(pickVersion(versions, distTags, '')?.version).toBe('2.0.0') + }) + }) + + describe('exact versions', () => { + const versions = { + '1.0.0': manifest('1.0.0'), + '2.0.0': manifest('2.0.0'), + } + const distTags = { latest: '2.0.0' } + + it('resolves exact version', () => { + expect(pickVersion(versions, distTags, '1.0.0')?.version).toBe('1.0.0') + }) + + it('handles v-prefixed version', () => { + expect(pickVersion(versions, distTags, 'v1.0.0')?.version).toBe('1.0.0') + }) + + it('handles = prefixed version', () => { + expect(pickVersion(versions, distTags, '= 1.0.0')?.version).toBe('1.0.0') + }) + + it('returns null for non-existent version', () => { + expect(pickVersion(versions, distTags, '9.9.9')).toBeNull() + }) + }) + + describe('semver ranges', () => { + const versions = { + '1.0.0': manifest('1.0.0'), + '1.5.0': manifest('1.5.0'), + '2.0.0': manifest('2.0.0'), + '2.5.0': manifest('2.5.0'), + '3.0.0': manifest('3.0.0'), + } + const distTags = { latest: '3.0.0' } + + it('resolves caret range (^)', () => { + expect(pickVersion(versions, distTags, '^1.0.0')?.version).toBe('1.5.0') + }) + + it('resolves tilde range (~)', () => { + expect(pickVersion(versions, distTags, '~1.0.0')?.version).toBe('1.0.0') + }) + + it('resolves >= range', () => { + expect(pickVersion(versions, distTags, '>=2.0.0')?.version).toBe('3.0.0') + }) + + it('resolves > range', () => { + expect(pickVersion(versions, distTags, '>2.0.0')?.version).toBe('3.0.0') + }) + + it('resolves < range', () => { + expect(pickVersion(versions, distTags, '<2.0.0')?.version).toBe('1.5.0') + }) + + it('resolves <= range', () => { + expect(pickVersion(versions, distTags, '<=2.0.0')?.version).toBe('2.0.0') + }) + + it('resolves compound range (>=x { + expect(pickVersion(versions, distTags, '>=1.0.0 <2.0.0')?.version).toBe('1.5.0') + }) + + it('resolves hyphen range (x - y)', () => { + expect(pickVersion(versions, distTags, '1.0.0 - 2.0.0')?.version).toBe('2.0.0') + }) + + it('resolves OR range (||)', () => { + expect(pickVersion(versions, distTags, '^1.0.0 || ^3.0.0')?.version).toBe('3.0.0') + }) + + it('resolves x-range (1.x)', () => { + expect(pickVersion(versions, distTags, '1.x')?.version).toBe('1.5.0') + }) + + it('returns null for unsatisfied range', () => { + expect(pickVersion(versions, distTags, '^9.0.0')).toBeNull() + }) + + it('returns null for empty versions object', () => { + expect(pickVersion({}, distTags, '^1.0.0')).toBeNull() + }) + }) + + describe('latest tag preference (pnpm behavior)', () => { + // pnpm prefers the version tagged as 'latest' even when newer versions exist, + // because publishers tag 'latest' intentionally (e.g., LTS versions) + + const versions = { + '18.0.0': manifest('18.0.0'), + '20.0.0': manifest('20.0.0'), + '21.0.0': manifest('21.0.0'), + } + + it('prefers latest over newer versions when range is satisfied', () => { + const distTags = { latest: '20.0.0' } + // 21.0.0 exists and satisfies >=18.0.0, but latest (20.0.0) should win + expect(pickVersion(versions, distTags, '>=18.0.0')?.version).toBe('20.0.0') + }) + + it('picks newer version when latest does not satisfy range', () => { + const distTags = { latest: '18.0.0' } + // latest (18.0.0) doesn't satisfy >=20.0.0, so pick highest matching (21.0.0) + expect(pickVersion(versions, distTags, '>=20.0.0')?.version).toBe('21.0.0') + }) + + it('prefers latest in OR ranges when satisfied', () => { + const distTags = { latest: '20.0.0' } + expect(pickVersion(versions, distTags, '^18.0.0 || ^20.0.0')?.version).toBe('20.0.0') + }) + }) + + describe('deprecated version handling', () => { + // pnpm/npm avoid deprecated versions when non-deprecated alternatives exist + + const versions = { + '1.0.0': manifest('1.0.0'), + '2.0.0': manifest('2.0.0', { deprecated: 'security issue' }), + '3.0.0': manifest('3.0.0'), + '3.1.0': manifest('3.1.0', { deprecated: 'use 3.0.0 instead' }), + } + const distTags = { latest: '3.0.0' } + + it('picks non-deprecated over deprecated when both satisfy', () => { + // ^3.0.0 matches 3.0.0 and 3.1.0, but 3.1.0 is deprecated + expect(pickVersion(versions, distTags, '^3.0.0')?.version).toBe('3.0.0') + }) + + it('prefers non-deprecated even if deprecated is higher', () => { + expect(pickVersion(versions, distTags, '>=3.0.0')?.version).toBe('3.0.0') + }) + + it('uses deprecated when no non-deprecated version satisfies', () => { + // ^2.0.0 only matches 2.0.0 which is deprecated + expect(pickVersion(versions, distTags, '^2.0.0')?.version).toBe('2.0.0') + }) + + it('picks highest deprecated when all are deprecated', () => { + const allDeprecated = { + '1.0.0': manifest('1.0.0', { deprecated: 'old' }), + '1.1.0': manifest('1.1.0', { deprecated: 'old' }), + } + expect(pickVersion(allDeprecated, { latest: '1.1.0' }, '^1.0.0')?.version).toBe('1.1.0') + }) + }) + + describe('prerelease handling', () => { + it('does not match prereleases with standard ranges', () => { + const versions = { + '1.0.0': manifest('1.0.0'), + '2.0.0-beta.1': manifest('2.0.0-beta.1'), + } + // ^1.0.0 should NOT match 2.0.0-beta.1 + expect(pickVersion(versions, { latest: '1.0.0' }, '^1.0.0')?.version).toBe('1.0.0') + }) + + it('prefers stable over prerelease when both satisfy', () => { + const versions = { + '1.0.0-alpha.1': manifest('1.0.0-alpha.1'), + '1.0.0': manifest('1.0.0'), + } + expect(pickVersion(versions, { latest: '1.0.0' }, '>=1.0.0-alpha.1')?.version).toBe('1.0.0') + }) + + it('matches explicit prerelease version', () => { + const versions = { + '1.0.0-beta.1': manifest('1.0.0-beta.1'), + '1.0.0-beta.2': manifest('1.0.0-beta.2'), + } + expect(pickVersion(versions, { latest: '1.0.0-beta.2' }, '1.0.0-beta.1')?.version).toBe( + '1.0.0-beta.1', + ) + }) + + it('matches prerelease range correctly', () => { + const versions = { + '1.0.0-beta.1': manifest('1.0.0-beta.1'), + '1.0.0-beta.2': manifest('1.0.0-beta.2'), + } + // ^1.0.0-beta.1 should match other 1.0.0 betas + expect(pickVersion(versions, { latest: '1.0.0-beta.2' }, '^1.0.0-beta.1')?.version).toBe( + '1.0.0-beta.2', + ) + }) + + it('sorts prereleases correctly (alpha < beta < rc)', () => { + const versions = { + '1.0.0-alpha.1': manifest('1.0.0-alpha.1'), + '1.0.0-beta.1': manifest('1.0.0-beta.1'), + '1.0.0-rc.1': manifest('1.0.0-rc.1'), + } + expect(pickVersion(versions, { latest: '1.0.0-rc.1' }, '>=1.0.0-alpha.1')?.version).toBe( + '1.0.0-rc.1', + ) + }) + + it('uses latest tag with wildcard when all versions are prerelease', () => { + const versions = { + '1.0.0-alpha.1': manifest('1.0.0-alpha.1'), + '1.0.0-beta.1': manifest('1.0.0-beta.1'), + } + // * with all prereleases should respect latest tag (pnpm/npm behavior) + expect(pickVersion(versions, { latest: '1.0.0-alpha.1' }, '*')?.version).toBe('1.0.0-alpha.1') + }) + + it('picks prerelease with wildcard when only prereleases exist', () => { + const versions = { + '1.0.0-alpha.1': manifest('1.0.0-alpha.1'), + } + expect(pickVersion(versions, { latest: '1.0.0-alpha.1' }, '*')?.version).toBe('1.0.0-alpha.1') + }) + }) +}) + +// #endregion + +// #region resolve (integration tests) + +describe('resolve', () => { + describe('basic resolution', () => { + it('resolves a single package with dependencies', async () => { + const result = await resolve(['is-odd@3.0.1']) + + expect(result.roots).toHaveLength(1) + expect(result.roots[0]!.name).toBe('is-odd') + expect(result.roots[0]!.version).toBe('3.0.1') + expect(result.roots[0]!.dependencies.has('is-number')).toBe(true) + }) + + it('resolves multiple packages', async () => { + const result = await resolve(['is-odd@3.0.1', 'is-even@1.0.0']) + + expect(result.roots).toHaveLength(2) + expect(result.roots[0]!.name).toBe('is-odd') + expect(result.roots[1]!.name).toBe('is-even') + }) + + it('deduplicates shared dependencies', async () => { + const result = await resolve(['is-odd@3.0.1', 'is-even@1.0.0']) + + const isNumberVersions = new Set() + for (const pkg of result.packages.values()) { + if (pkg.name === 'is-number') { + isNumberVersions.add(pkg.version) + } + } + expect(isNumberVersions.size).toBeGreaterThan(0) + }) + }) + + describe('JSR packages', () => { + it('resolves a JSR package', async () => { + const result = await resolve(['jsr:@luca/flag@1.0.1']) + + expect(result.roots).toHaveLength(1) + expect(result.roots[0]!.name).toBe('@luca/flag') + expect(result.roots[0]!.version).toBe('1.0.1') + expect(result.roots[0]!.tarball).toContain('npm.jsr.io') + }) + + it('resolves JSR package with JSR dependencies', async () => { + const result = await resolve(['jsr:@std/path@1.1.4']) + + expect(result.roots[0]!.name).toBe('@std/path') + // dependency stored under npm-compatible name, resolved to canonical + expect(result.roots[0]!.dependencies.has('@jsr/std__internal')).toBe(true) + const internal = result.roots[0]!.dependencies.get('@jsr/std__internal')! + expect(internal.name).toBe('@std/internal') + expect(internal.tarball).toContain('npm.jsr.io') + }) + }) + + describe('peer dependencies', () => { + it('auto-installs required peer dependencies', async () => { + const result = await resolve(['use-sync-external-store@1.2.0']) + + const mainPkg = result.roots[0]! + expect(mainPkg.dependencies.has('react')).toBe(true) + expect(Array.from(result.packages.values()).some(p => p.name === 'react')).toBe(true) + }) + + it('skips optional peer dependencies', async () => { + const result = await resolve(['use-sync-external-store@1.2.0']) + + // react is required, should be present + const mainPkg = result.roots[0]! + expect(mainPkg.dependencies.has('react')).toBe(true) + }) + + it('respects installPeers: false option', async () => { + const result = await resolve(['use-sync-external-store@1.2.0'], { installPeers: false }) + + expect(result.roots).toHaveLength(1) + expect(result.roots[0]!.name).toBe('use-sync-external-store') + }) + }) +}) + +// #endregion diff --git a/app/bundler/lib/resolve.ts b/app/bundler/lib/resolve.ts new file mode 100644 index 000000000..3ef215a1c --- /dev/null +++ b/app/bundler/lib/resolve.ts @@ -0,0 +1,308 @@ +import * as semver from 'semver' + +import { progress } from '../events' + +import { InvalidSpecifierError, NoMatchingVersionError } from './errors' +import { fetchPackument, reverseJsrName } from './registry' +import type { + AbbreviatedManifest, + PackageSpecifier, + Registry, + ResolvedPackage, + ResolutionResult, +} from './types' + +/** + * parses a package specifier string into name, range, and registry. + * handles scoped packages, JSR packages, and various formats: + * - "foo" -> { name: "foo", range: "latest", registry: "npm" } + * - "foo@^1.0.0" -> { name: "foo", range: "^1.0.0", registry: "npm" } + * - "@scope/foo@~2.0.0" -> { name: "@scope/foo", range: "~2.0.0", registry: "npm" } + * - "npm:foo@^1.0.0" -> { name: "foo", range: "^1.0.0", registry: "npm" } + * - "jsr:@luca/flag" -> { name: "@luca/flag", range: "latest", registry: "jsr" } + * - "jsr:@luca/flag@^1.0.0" -> { name: "@luca/flag", range: "^1.0.0", registry: "jsr" } + * + * @param spec the package specifier string + * @returns parsed specifier with name, range, and registry + */ +export function parseSpecifier(spec: string): PackageSpecifier { + let registry: Registry = 'npm' + let rest = spec + + // check for registry prefixes + if (spec.startsWith('jsr:')) { + registry = 'jsr' + rest = spec.slice(4) // remove "jsr:" + } else if (spec.startsWith('npm:')) { + rest = spec.slice(4) // remove "npm:", registry already 'npm' + } + + // handle scoped packages: @scope/name or @scope/name@version + if (rest.startsWith('@')) { + const slashIdx = rest.indexOf('/') + if (slashIdx === -1) { + throw new InvalidSpecifierError(spec, 'scoped package missing slash') + } + const atIdx = rest.indexOf('@', slashIdx) + if (atIdx === -1) { + return { name: rest, range: 'latest', registry } + } + return { name: rest.slice(0, atIdx), range: rest.slice(atIdx + 1), registry } + } + + // JSR packages must be scoped + if (registry === 'jsr') { + throw new InvalidSpecifierError(spec, 'JSR packages must be scoped') + } + + // handle regular packages: name or name@version + const atIdx = rest.indexOf('@') + if (atIdx === -1) { + return { name: rest, range: 'latest', registry } + } + return { name: rest.slice(0, atIdx), range: rest.slice(atIdx + 1), registry } +} + +/** + * picks the best version from a packument that satisfies a range. + * follows npm/pnpm's algorithm: + * 1. if range is a dist-tag, use that version + * 2. if range is empty, treat as 'latest' + * 3. if range is a specific version (possibly with v prefix), use that + * 4. if 'latest' tag satisfies the range, prefer it over newer versions + * 5. otherwise, find highest non-deprecated version that satisfies the range + * 6. fall back to deprecated version if no non-deprecated match + * + * @param versions available versions (version string -> manifest) + * @param distTags dist-tags mapping (e.g., { latest: "1.2.3" }) + * @param range the version range to satisfy + * @returns the best matching manifest, or null if none match + */ +export function pickVersion( + versions: Record, + distTags: Record, + range: string, +): AbbreviatedManifest | null { + // empty range means latest + if (range === '') { + return versions[distTags.latest!] ?? null + } + + // check if range is a dist-tag + if (range in distTags) { + const taggedVersion = distTags[range]! + return versions[taggedVersion] ?? null + } + + // normalize loose version formats (v1.0.0, = 1.0.0) + const cleanedRange = semver.validRange(range, { loose: true }) ?? range + + // check if range is an exact version + if (versions[range]) { + return versions[range] + } + + // check cleaned version (handles v1.0.0 -> 1.0.0) + const cleanedVersion = semver.clean(range, { loose: true }) + if (cleanedVersion && versions[cleanedVersion]) { + return versions[cleanedVersion] + } + + // for wildcard ranges, use loose mode to include prereleases + const isWildcard = range === '*' || range === 'x' || range === '' + const satisfiesOptions = { loose: true, includePrerelease: isWildcard } + + // prefer 'latest' tag if it satisfies the range (pnpm behavior) + // publishers tag 'latest' intentionally, so respect that choice + const latestVersion = distTags.latest + if (latestVersion && versions[latestVersion]) { + if (semver.satisfies(latestVersion, cleanedRange, satisfiesOptions)) { + return versions[latestVersion] + } + } + + // find all versions satisfying the range + const validVersions = Object.keys(versions) + .filter(v => semver.satisfies(v, cleanedRange, satisfiesOptions)) + // oxlint-disable-next-line unicorn/no-array-sort + .sort(semver.rcompare) + + if (validVersions.length === 0) { + return null + } + + // prefer non-deprecated versions (pnpm behavior) + const nonDeprecated = validVersions.find(v => !versions[v]!.deprecated) + if (nonDeprecated !== undefined) { + return versions[nonDeprecated]! + } + + // fall back to deprecated if no alternatives + return versions[validVersions[0]!]! +} + +/** + * options for dependency resolution. + */ +export interface ResolveOptions { + /** + * whether to auto-install peer dependencies. + * when true, required (non-optional) peer dependencies are resolved automatically. + * @default true + */ + installPeers?: boolean +} + +/** + * context for tracking resolution state across recursive calls. + */ +interface ResolutionContext { + /** all resolved packages by "registry:name@version" key for deduping */ + resolved: Map + /** packages currently being resolved (for cycle detection) */ + resolving: Set + /** resolution options */ + options: Required +} + +/** + * resolves a single package and its dependencies recursively. + * + * @param name package name + * @param range version range to satisfy + * @param registry which registry to fetch from + * @param ctx resolution context for deduping and cycle detection + * @returns the resolved package tree + */ +async function resolvePackage( + name: string, + range: string, + registry: Registry, + ctx: ResolutionContext, +): Promise { + const packument = await fetchPackument(name, registry) + const manifest = pickVersion(packument.versions, packument['dist-tags'], range) + + if (!manifest) { + throw new NoMatchingVersionError(name, range) + } + + progress.trigger({ type: 'progress', kind: 'resolve', name, version: manifest.version }) + + const key = `${registry}:${name}@${manifest.version}` + + // check if already resolved (deduplication) + const existing = ctx.resolved.get(key) + if (existing) { + return existing + } + + // cycle detection - if we're already resolving this, return a placeholder + // the actual dependencies will be filled in by the original resolution + if (ctx.resolving.has(key)) { + // create a minimal resolved package for the cycle + const cyclic: ResolvedPackage = { + name, + version: manifest.version, + tarball: manifest.dist.tarball, + integrity: manifest.dist.integrity, + dependencies: new Map(), + } + return cyclic + } + + ctx.resolving.add(key) + + // create the resolved package + const resolved: ResolvedPackage = { + name, + version: manifest.version, + tarball: manifest.dist.tarball, + integrity: manifest.dist.integrity, + unpackedSize: manifest.dist.unpackedSize, + dependencies: new Map(), + } + + // register early so cycles can find it + ctx.resolved.set(key, resolved) + + // collect all dependencies to resolve (regular deps + peer deps) + const depsToResolve: Array<[string, string]> = [] + + // add regular dependencies + const deps = manifest.dependencies ?? {} + for (const [depName, depRange] of Object.entries(deps)) { + depsToResolve.push([depName, depRange]) + } + + // add peer dependencies as regular dependencies of this package + // this ensures they get hoisted correctly - placed at root if no conflict, + // or nested under this package if there's a version conflict + if (ctx.options.installPeers && manifest.peerDependencies) { + const peerMeta = manifest.peerDependenciesMeta ?? {} + for (const [peerName, peerRange] of Object.entries(manifest.peerDependencies)) { + const isOptional = peerMeta[peerName]?.optional === true + if (!isOptional) { + // only add if not already in regular deps (regular deps take precedence) + if (!(peerName in deps)) { + depsToResolve.push([peerName, peerRange]) + } + } + } + } + + // resolve all dependencies in parallel + const resolvedDeps = await Promise.all( + depsToResolve.map(async ([depName, depRange]) => { + // when a JSR package depends on @jsr/*, reverse to canonical name and fetch from JSR + // otherwise use npm (even for @jsr/* from npm packages - that's what the author intended) + let resolvedName = depName + let depRegistry: Registry = 'npm' + if (registry === 'jsr' && depName.startsWith('@jsr/')) { + resolvedName = reverseJsrName(depName) + depRegistry = 'jsr' + } + const dep = await resolvePackage(resolvedName, depRange, depRegistry, ctx) + return [depName, dep] as const + }), + ) + + for (const [depName, dep] of resolvedDeps) { + resolved.dependencies.set(depName, dep) + } + + ctx.resolving.delete(key) + return resolved +} + +/** + * resolves one or more packages and all their dependencies. + * this is the main entry point for dependency resolution. + * + * @param specifiers package specifiers to resolve (e.g., ["react@^18.0.0", "jsr:@luca/flag"]) + * @param options resolution options + * @returns the full resolution result with all packages + */ +export async function resolve( + specifiers: string[], + options: ResolveOptions = {}, +): Promise { + const ctx: ResolutionContext = { + resolved: new Map(), + resolving: new Set(), + options: { + installPeers: options.installPeers ?? true, + }, + } + + const parsedSpecs = specifiers.map(parseSpecifier) + + const roots = await Promise.all( + parsedSpecs.map(({ name, range, registry }) => resolvePackage(name, range, registry, ctx)), + ) + + return { + roots, + packages: ctx.resolved, + } +} diff --git a/app/bundler/lib/subpaths.ts b/app/bundler/lib/subpaths.ts new file mode 100644 index 000000000..5c56fd0a9 --- /dev/null +++ b/app/bundler/lib/subpaths.ts @@ -0,0 +1,289 @@ +import type { Volume } from 'memfs' + +import type { DiscoveredSubpaths, Subpath } from '../types' + +import type { PackageExports, PackageJson } from './types' + +export type { DiscoveredSubpaths, Subpath } + +// #region condition resolution + +/** + * condition priority for ESM browser bundling. + * higher index = higher priority. + */ +const CONDITION_PRIORITY = ['default', 'module', 'import', 'browser'] as const + +/** + * resolves a conditional export to a file path. + * handles nested conditions and returns the best match for ESM browser. + */ +function resolveCondition(value: PackageExports): string | null { + if (value === null) { + return null + } + + if (typeof value === 'string') { + return value + } + + if (Array.isArray(value)) { + // array means "try in order", take first + for (const item of value) { + const resolved = resolveCondition(item) + if (resolved) { + return resolved + } + } + return null + } + + if (typeof value === 'object') { + // check if this is a conditions object or a subpath object + const keys = Object.keys(value) + + // if any key starts with '.', this is a subpath object, not conditions + if (keys.some(k => k.startsWith('.'))) { + return null + } + + // this is a conditions object, find best match + let bestMatch: string | null = null + let bestPriority = -1 + + for (const [condition, target] of Object.entries(value)) { + const priority = CONDITION_PRIORITY.indexOf(condition as (typeof CONDITION_PRIORITY)[number]) + + if (priority > bestPriority) { + const resolved = resolveCondition(target as PackageExports) + if (resolved) { + bestMatch = resolved + bestPriority = priority + } + } + } + + return bestMatch + } + + return null +} + +// #endregion + +// #region wildcard expansion + +/** + * recursively lists all files in a directory. + */ +function listFilesRecursive(volume: Volume, dir: string): string[] { + const files: string[] = [] + + try { + const entries = volume.readdirSync(dir, { withFileTypes: true }) + for (const entry of entries) { + const fullPath = `${dir}/${entry.name}` + if (entry.isDirectory()) { + files.push(...listFilesRecursive(volume, fullPath)) + } else if (entry.isFile()) { + files.push(fullPath) + } + } + } catch { + // directory doesn't exist or can't be read + } + + return files +} + +/** + * expands a wildcard pattern against the volume files. + * + * @param subpath the subpath pattern with wildcard (e.g., "./*") + * @param target the target pattern (e.g., "./*.js") + * @param packagePath the package path in volume (e.g., "/node_modules/pkg") + * @param volume the volume to search in + * @returns expanded subpath entries + */ +function expandWildcard( + subpath: string, + target: string, + packagePath: string, + volume: Volume, +): Subpath[] { + const entries: Subpath[] = [] + + // extract the parts before and after the wildcard + const targetParts = target.split('*') + if (targetParts.length !== 2) { + // invalid pattern, skip + return entries + } + + const prefix = targetParts[0]! + const suffix = targetParts[1]! + const subpathParts = subpath.split('*') + if (subpathParts.length !== 2) { + return entries + } + + const subpathPrefix = subpathParts[0]! + const subpathSuffix = subpathParts[1]! + + // normalize the prefix to match volume paths + // target like "./src/*.js" becomes "/node_modules/pkg/src" + const searchDir = `${packagePath}/${prefix.replace(/^\.\//, '').replace(/\/$/, '')}` + + // list all files in the search directory + const allFiles = listFilesRecursive(volume, searchDir) + + for (const filePath of allFiles) { + // check if file matches the pattern + const relativePath = filePath.slice(searchDir.length + 1) + + if (suffix && !filePath.endsWith(suffix)) { + continue + } + + // extract the wildcard match + const match = suffix ? relativePath.slice(0, relativePath.length - suffix.length) : relativePath + + // construct the subpath + const expandedSubpath = `${subpathPrefix}${match}${subpathSuffix}` + + // construct the relative target + const expandedTarget = `./${prefix.replace(/^\.\//, '')}${match}${suffix}` + + entries.push({ + subpath: expandedSubpath, + target: expandedTarget, + isWildcard: true, + }) + } + + return entries +} + +// #endregion + +// #region main discovery + +/** + * discovers all available subpaths from a package's exports field. + * + * @param packageJson the package.json content + * @param volume the volume containing package files + * @returns discovered subpaths with default selection + */ +export function discoverSubpaths(packageJson: PackageJson, volume: Volume): DiscoveredSubpaths { + const entries: Subpath[] = [] + const packagePath = `/node_modules/${packageJson.name}` + + // check for exports field first (takes precedence) + if (packageJson.exports !== undefined) { + const exportsField = packageJson.exports + + if (typeof exportsField === 'string') { + // simple string export: "exports": "./index.js" + entries.push({ + subpath: '.', + target: exportsField, + isWildcard: false, + }) + } else if (Array.isArray(exportsField)) { + // array export: "exports": ["./index.js", "./index.cjs"] + const resolved = resolveCondition(exportsField) + if (resolved) { + entries.push({ + subpath: '.', + target: resolved, + isWildcard: false, + }) + } + } else if (typeof exportsField === 'object' && exportsField !== null) { + // object export - could be conditions or subpaths + const keys = Object.keys(exportsField) + const hasSubpaths = keys.some(k => k.startsWith('.')) + + if (hasSubpaths) { + // subpath exports + for (const [subpath, value] of Object.entries(exportsField)) { + if (!subpath.startsWith('.')) { + continue + } + + if (subpath.includes('*')) { + // wildcard pattern + const target = resolveCondition(value as PackageExports) + if (target && target.includes('*')) { + const expanded = expandWildcard(subpath, target, packagePath, volume) + entries.push(...expanded) + } + } else { + // regular subpath + const target = resolveCondition(value as PackageExports) + if (target) { + entries.push({ + subpath, + target, + isWildcard: false, + }) + } + } + } + } else { + // top-level conditions (no subpaths means this is conditions for ".") + const target = resolveCondition(exportsField) + if (target) { + entries.push({ + subpath: '.', + target, + isWildcard: false, + }) + } + } + } + } else { + // fallback to legacy fields + // priority: module > main > index.js + let legacyMain = packageJson.module || packageJson.main + + if (!legacyMain) { + // check if index.js exists + try { + volume.statSync(`${packagePath}/index.js`) + legacyMain = './index.js' + } catch { + // no index.js + } + } + + if (legacyMain) { + entries.push({ + subpath: '.', + target: legacyMain.startsWith('.') ? legacyMain : `./${legacyMain}`, + isWildcard: false, + }) + } + } + + // determine default subpath + let defaultSubpath: string | null = null + + // prefer "." if it exists + const mainEntry = entries.find(e => e.subpath === '.') + if (mainEntry) { + defaultSubpath = '.' + } else if (entries.length > 0) { + // otherwise, pick first alphabetically + entries.sort((a, b) => a.subpath.localeCompare(b.subpath)) + defaultSubpath = entries[0]!.subpath + } + + return { + subpaths: entries, + defaultSubpath, + } +} + +// #endregion diff --git a/app/bundler/lib/types.ts b/app/bundler/lib/types.ts new file mode 100644 index 000000000..cb4e50e75 --- /dev/null +++ b/app/bundler/lib/types.ts @@ -0,0 +1,202 @@ +import * as v from 'valibot' + +// #region package.json schema + +/** + * package exports field - can be a string, array, object, or nested conditions. + * @see https://nodejs.org/api/packages.html#exports + */ +const packageExportsSchema: v.GenericSchema = v.union([ + v.null(), + v.string(), + v.array(v.string()), + v.record( + v.string(), + v.lazy(() => packageExportsSchema), + ), +]) + +export type PackageExports = string | string[] | { [key: string]: PackageExports } | null + +/** + * base package.json schema with all standard fields. + * other schemas pick from this to ensure consistency. + * @see https://docs.npmjs.com/cli/v10/configuring-npm/package-json + */ +export const packageJsonSchema = v.object({ + name: v.string(), + version: v.string(), + description: v.optional(v.string()), + keywords: v.optional(v.array(v.string())), + homepage: v.optional(v.string()), + license: v.optional(v.string()), + main: v.optional(v.string()), + module: v.optional(v.string()), + browser: v.optional( + v.union([v.string(), v.record(v.string(), v.union([v.string(), v.literal(false)]))]), + ), + types: v.optional(v.string()), + typings: v.optional(v.string()), + exports: v.optional(packageExportsSchema), + type: v.optional(v.picklist(['module', 'commonjs'])), + bin: v.optional(v.union([v.string(), v.record(v.string(), v.string())])), + directories: v.optional(v.record(v.string(), v.string())), + dependencies: v.optional(v.record(v.string(), v.string())), + devDependencies: v.optional(v.record(v.string(), v.string())), + peerDependencies: v.optional(v.record(v.string(), v.string())), + peerDependenciesMeta: v.optional( + v.record(v.string(), v.object({ optional: v.optional(v.boolean()) })), + ), + bundleDependencies: v.optional(v.union([v.boolean(), v.array(v.string())])), + optionalDependencies: v.optional(v.record(v.string(), v.string())), + engines: v.optional(v.record(v.string(), v.string())), + os: v.optional(v.array(v.string())), + cpu: v.optional(v.array(v.string())), + deprecated: v.optional(v.union([v.string(), v.boolean()])), + sideEffects: v.optional(v.union([v.boolean(), v.array(v.string())])), +}) + +export type PackageJson = v.InferOutput + +// #endregion + +// #region abbreviated packument schemas + +/** + * distribution metadata for a package version. + */ +const distSchema = v.object({ + tarball: v.string(), + shasum: v.string(), + integrity: v.optional(v.string()), + fileCount: v.optional(v.number()), + unpackedSize: v.optional(v.number()), + signatures: v.optional( + v.array( + v.object({ + keyid: v.string(), + sig: v.string(), + }), + ), + ), +}) + +/** + * abbreviated manifest for a specific version. + * picks installation-relevant fields from package.json and adds registry metadata. + * @see https://github.com/npm/registry/blob/main/docs/responses/package-metadata.md#abbreviated-metadata-format + */ +export const abbreviatedManifestSchema = v.object({ + // pick installation-relevant fields from package.json + ...v.pick(packageJsonSchema, [ + 'name', + 'version', + 'deprecated', + 'dependencies', + 'devDependencies', + 'optionalDependencies', + 'bundleDependencies', + 'peerDependencies', + 'peerDependenciesMeta', + 'bin', + 'directories', + 'engines', + 'cpu', + 'os', + ]).entries, + // registry-specific fields + dist: distSchema, + hasInstallScript: v.optional(v.boolean()), + _hasShrinkwrap: v.optional(v.boolean()), +}) + +export type AbbreviatedManifest = v.InferOutput + +/** + * abbreviated packument - minimal metadata for package resolution. + * returned when requesting with Accept: application/vnd.npm.install-v1+json + * @see https://github.com/npm/registry/blob/main/docs/responses/package-metadata.md#abbreviated-metadata-format + */ +export const abbreviatedPackumentSchema = v.object({ + 'name': v.string(), + // optional because some registries (e.g., JSR's npm mirror) may not include it + 'modified': v.optional(v.string()), + 'dist-tags': v.pipe( + v.record(v.string(), v.string()), + v.check(tags => 'latest' in tags, 'dist-tags must include "latest"'), + ), + 'versions': v.record(v.string(), abbreviatedManifestSchema), +}) + +export type AbbreviatedPackument = v.InferOutput + +// #endregion + +/** + * a resolved package with its dependencies. + * this is the output of the resolution step before hoisting. + */ +export interface ResolvedPackage { + name: string + version: string + /** the tarball URL for fetching */ + tarball: string + /** SRI integrity hash if available */ + integrity?: string + /** unpacked size in bytes (from registry) */ + unpackedSize?: number + /** resolved dependencies (name -> ResolvedPackage) */ + dependencies: Map +} + +/** + * supported package registries. + */ +export type Registry = 'npm' | 'jsr' + +/** + * the input to the resolver - a package specifier. + * can be just a name (uses latest) or name@version/range. + */ +export interface PackageSpecifier { + name: string + /** version, range, or dist-tag. defaults to 'latest' */ + range: string + /** which registry to fetch from. defaults to 'npm' */ + registry: Registry +} + +/** + * the full resolution result - a tree of resolved packages. + */ +export interface ResolutionResult { + /** the root package(s) that were requested */ + roots: ResolvedPackage[] + /** all unique packages in the resolution (for deduping) */ + packages: Map +} + +/** + * a node in the hoisted node_modules structure. + * represents what should be written to node_modules/{name} + */ +export interface HoistedNode { + name: string + version: string + tarball: string + integrity?: string + /** unpacked size in bytes (from registry) */ + unpackedSize?: number + /** number of direct dependencies */ + dependencyCount: number + /** nested node_modules for this package (when hoisting fails) */ + nested: Map +} + +/** + * the result of hoisting - a flat(ish) node_modules structure. + */ +export interface HoistedResult { + /** top-level node_modules entries */ + root: Map +} diff --git a/app/bundler/lib/utils.ts b/app/bundler/lib/utils.ts new file mode 100644 index 000000000..cecdcc43f --- /dev/null +++ b/app/bundler/lib/utils.ts @@ -0,0 +1,13 @@ +// matches ANSI escape sequences (colors, cursor movement, etc.) +// oxlint-disable-next-line no-control-regex +const ANSI_REGEX = /\x1b\[[0-9;]*[a-zA-Z]/g + +/** + * strips ANSI escape codes from a string. + * + * @param input string potentially containing ANSI codes + * @returns string with ANSI codes removed + */ +export function stripAnsi(input: string): string { + return input.replace(ANSI_REGEX, '') +} diff --git a/app/bundler/lib/worker-entry.ts b/app/bundler/lib/worker-entry.ts new file mode 100644 index 000000000..031803da5 --- /dev/null +++ b/app/bundler/lib/worker-entry.ts @@ -0,0 +1,200 @@ +import * as zstd from '@bokuweb/zstd-wasm' +import { memfs } from '@rolldown/browser/experimental' +import * as v from 'valibot' + +import { progress } from '../events' +import { + workerRequestSchema, + type BundleOptions, + type InitOptions, + type InitResult, + type WorkerResponse, +} from '../types' + +import { bundlePackage } from './bundler' +import { fetchPackagesToVolume } from './fetch' +import { hoist } from './hoist' +import { buildInstalledPackages } from './installed-packages' +import { resolve } from './resolve' +import { discoverSubpaths } from './subpaths' +import type { PackageJson } from './types' +import { stripAnsi } from './utils' + +const { volume } = memfs! + +// forward progress events to main thread +progress.on(msg => { + self.postMessage(msg) +}) + +// #region state + +let packageName: string | null = null +let initResult: InitResult | null = null + +let bundleInProgress = false +let pendingBundleRequest: { + id: number + subpath: string + selectedExports: string[] | null + options: BundleOptions +} | null = null + +// #endregion + +// #region handlers + +async function handleInit( + id: number, + packageSpec: string, + options: InitOptions = {}, +): Promise { + try { + volume.reset() + + const resolution = await resolve([packageSpec], options.resolve) + const hoisted = hoist(resolution.roots) + + await fetchPackagesToVolume(hoisted, volume, options.fetch) + + const mainPackage = resolution.roots[0]! + const pkgJsonPath = `/node_modules/${mainPackage.name}/package.json` + const pkgJsonContent = volume.readFileSync(pkgJsonPath, 'utf8') as string + const manifest = JSON.parse(pkgJsonContent) as PackageJson + + packageName = mainPackage.name + + const subpaths = discoverSubpaths(manifest, volume) + + // get peer dependency names from manifest + const peerDependencies = Object.keys(manifest.peerDependencies ?? {}) + const peerDepNames = new Set(peerDependencies) + + const packages = buildInstalledPackages(mainPackage!, peerDepNames) + const installSize = packages.reduce((sum, pkg) => sum + pkg.size, 0) + + initResult = { + name: mainPackage.name, + version: mainPackage.version, + subpaths, + installSize, + packages, + peerDependencies, + } + + const event = { + id, + type: 'init', + result: initResult, + } satisfies WorkerResponse + + self.postMessage(event) + } catch (error) { + const event = { + id, + type: 'error', + error: stripAnsi(String(error)), + } satisfies WorkerResponse + + self.postMessage(event) + } +} + +async function handleBundle( + id: number, + subpath: string, + selectedExports: string[] | null, + options: BundleOptions = {}, +): Promise { + if (!packageName) { + const event = { + id, + type: 'error', + error: 'not initialized - call init() first', + } satisfies WorkerResponse + + self.postMessage(event) + return + } + + // if a bundle is in progress, queue this one (replacing any previous pending) + if (bundleInProgress) { + // reject the previous pending request if any + if (pendingBundleRequest) { + const event = { + id: pendingBundleRequest.id, + type: 'error', + error: 'Superseded by newer request', + } satisfies WorkerResponse + + self.postMessage(event) + } + pendingBundleRequest = { id, subpath, selectedExports, options } + return + } + + await processBundleRequest(id, subpath, selectedExports, options) +} + +async function processBundleRequest( + id: number, + subpath: string, + selectedExports: string[] | null, + options: BundleOptions, +): Promise { + bundleInProgress = true + + try { + const result = await bundlePackage(packageName!, subpath, selectedExports, options) + self.postMessage({ id, type: 'bundle', result } satisfies WorkerResponse) + } catch (error) { + self.postMessage( + { id, type: 'error', error: stripAnsi(String(error)) } satisfies WorkerResponse, + '*', + ) + } finally { + bundleInProgress = false + + // process pending request if any + if (pendingBundleRequest) { + const pending = pendingBundleRequest + pendingBundleRequest = null + await processBundleRequest( + pending.id, + pending.subpath, + pending.selectedExports, + pending.options, + ) + } + } +} + +// #endregion + +// #region message handler + +self.addEventListener('message', (event: MessageEvent) => { + const parsed = v.safeParse(workerRequestSchema, event.data) + if (!parsed.success) { + return + } + + const request = parsed.output + + switch (request.type) { + case 'init': + handleInit(request.id, request.packageSpec, request.options) + break + case 'bundle': + handleBundle(request.id, request.subpath, request.selectedExports, request.options) + break + } +}) + +// init zstd wasm before signaling ready +await zstd.init() + +// signal to main thread that we're ready +self.postMessage({ type: 'ready' } satisfies WorkerResponse) + +// #endregion diff --git a/app/bundler/types.ts b/app/bundler/types.ts new file mode 100644 index 000000000..609682a27 --- /dev/null +++ b/app/bundler/types.ts @@ -0,0 +1,193 @@ +import * as v from 'valibot' + +// #region option schemas + +const resolveOptionsSchema = v.object({ + installPeers: v.optional(v.boolean()), +}) + +const fetchOptionsSchema = v.object({ + concurrency: v.optional(v.number()), + exclude: v.optional(v.array(v.instance(RegExp))), +}) + +const initOptionsSchema = v.object({ + resolve: v.optional(resolveOptionsSchema), + fetch: v.optional(fetchOptionsSchema), +}) + +export type InitOptions = v.InferOutput + +const bundleOptionsSchema = v.object({ + rolldown: v.optional( + v.object({ + external: v.optional(v.array(v.string())), + minify: v.optional(v.boolean()), + }), + ), +}) + +export type BundleOptions = v.InferOutput + +// #endregion + +// #region result schemas + +const subpathSchema = v.object({ + subpath: v.string(), + target: v.string(), + isWildcard: v.boolean(), +}) + +export type Subpath = v.InferOutput + +const discoveredSubpathsSchema = v.object({ + subpaths: v.array(subpathSchema), + defaultSubpath: v.nullable(v.string()), +}) + +export type DiscoveredSubpaths = v.InferOutput + +const packageRefSchema = v.object({ + name: v.string(), + version: v.string(), + isPeer: v.boolean(), +}) + +export type PackageRef = v.InferOutput + +const installedPackageSchema = v.object({ + name: v.string(), + version: v.string(), + size: v.number(), + path: v.string(), + level: v.number(), + dependents: v.array(packageRefSchema), + dependencies: v.array(packageRefSchema), + isPeer: v.boolean(), +}) + +export type InstalledPackage = v.InferOutput + +const initResultSchema = v.object({ + name: v.string(), + version: v.string(), + subpaths: discoveredSubpathsSchema, + installSize: v.number(), + packages: v.array(installedPackageSchema), + peerDependencies: v.array(v.string()), +}) + +export type InitResult = v.InferOutput + +const bundleChunkSchema = v.object({ + fileName: v.string(), + code: v.string(), + size: v.number(), + gzipSize: v.number(), + brotliSize: v.optional(v.number()), + zstdSize: v.number(), + isEntry: v.boolean(), + exports: v.array(v.string()), +}) + +export type BundleChunk = v.InferOutput + +const bundleResultSchema = v.object({ + chunks: v.array(bundleChunkSchema), + size: v.number(), + gzipSize: v.number(), + brotliSize: v.optional(v.number()), + zstdSize: v.number(), + exports: v.array(v.string()), + isCjs: v.boolean(), +}) + +export type BundleResult = v.InferOutput + +// #endregion + +// #region request schemas (worker parses these) + +const initRequestSchema = v.object({ + id: v.number(), + type: v.literal('init'), + packageSpec: v.string(), + options: v.optional(initOptionsSchema), +}) + +const bundleRequestSchema = v.object({ + id: v.number(), + type: v.literal('bundle'), + subpath: v.string(), + selectedExports: v.nullable(v.array(v.string())), + options: v.optional(bundleOptionsSchema), +}) + +export const workerRequestSchema = v.variant('type', [initRequestSchema, bundleRequestSchema]) + +export type WorkerRequest = v.InferOutput + +// #endregion + +// #region response schemas (main thread parses these) + +const readyResponseSchema = v.object({ + type: v.literal('ready'), +}) + +const initResponseSchema = v.object({ + id: v.number(), + type: v.literal('init'), + result: initResultSchema, +}) + +const bundleResponseSchema = v.object({ + id: v.number(), + type: v.literal('bundle'), + result: bundleResultSchema, +}) + +const errorResponseSchema = v.object({ + id: v.number(), + type: v.literal('error'), + error: v.string(), +}) + +const progressResponseSchema = v.variant('kind', [ + v.object({ + type: v.literal('progress'), + kind: v.literal('resolve'), + name: v.string(), + version: v.string(), + }), + v.object({ + type: v.literal('progress'), + kind: v.literal('fetch'), + current: v.number(), + total: v.number(), + name: v.string(), + }), + v.object({ + type: v.literal('progress'), + kind: v.literal('bundle'), + }), + v.object({ + type: v.literal('progress'), + kind: v.literal('compress'), + }), +]) + +export type ProgressMessage = v.InferOutput + +export const workerResponseSchema = v.variant('type', [ + readyResponseSchema, + initResponseSchema, + bundleResponseSchema, + errorResponseSchema, + progressResponseSchema, +]) + +export type WorkerResponse = v.InferOutput + +// #endregion diff --git a/app/bundler/worker-client.ts b/app/bundler/worker-client.ts new file mode 100644 index 000000000..534547d10 --- /dev/null +++ b/app/bundler/worker-client.ts @@ -0,0 +1,136 @@ +import * as v from 'valibot' +import { toRaw } from 'vue' + +import { progress } from './events' +import { + workerResponseSchema, + type BundleOptions, + type BundleResult, + type InitOptions, + type InitResult, + type WorkerRequest, +} from './types' + +export type { InitResult } + +/** + * a session for working with a package. + * holds the worker and initialization result. + */ +export interface PackageSession extends InitResult { + /** the worker instance for this session */ + worker: BundlerWorker +} + +/** + * client for communicating with a bundler worker. + * each instance spawns a new worker, intended for one package. + */ +export class BundlerWorker { + private worker: Worker + private nextId = 0 + private pending = new Map>() + private ready: Promise + private resolveReady!: () => void + + constructor() { + this.ready = new Promise(resolve => { + this.resolveReady = resolve + }) + + this.worker = new Worker(new URL('./lib/worker-entry.ts', import.meta.url), { type: 'module' }) + this.worker.addEventListener('message', this.handleMessage.bind(this)) + this.worker.addEventListener('error', this.handleError.bind(this)) + } + + private handleMessage(event: MessageEvent): void { + const parsed = v.safeParse(workerResponseSchema, event.data) + if (!parsed.success) { + return + } + + const response = parsed.output + + if (response.type === 'ready') { + this.resolveReady() + return + } + + // forward progress messages to global emitter + if (response.type === 'progress') { + progress.trigger(response) + return + } + + const deferred = this.pending.get(response.id) + if (!deferred) { + // response for a request we no longer care about (e.g., superseded bundle) + return + } + + this.pending.delete(response.id) + + if (response.type === 'error') { + deferred.reject(new Error(response.error)) + } else { + deferred.resolve(response.result) + } + } + + private handleError(_event: ErrorEvent): void { + // reject all pending requests + for (const deferred of this.pending.values()) { + deferred.reject(new Error('Worker error')) + } + this.pending.clear() + } + + private async send(message: WorkerRequest): Promise { + // wait for worker to be ready before sending + await this.ready + + const deferred = Promise.withResolvers() + this.pending.set(message.id, deferred as PromiseWithResolvers) + this.worker.postMessage(message) + return deferred.promise + } + + /** + * initializes the worker with a package. + * only the first call does work; subsequent calls return cached result. + */ + init(packageSpec: string, options?: InitOptions): Promise { + return this.send({ id: this.nextId++, type: 'init', packageSpec, options }) + } + + /** + * bundles a subpath from the initialized package. + * uses "latest wins" - if called while a bundle is in progress, + * the previous pending request is superseded. + */ + bundle( + subpath: string, + selectedExports: string[] | null, + options?: BundleOptions, + ): Promise { + return this.send({ + id: this.nextId++, + type: 'bundle', + subpath, + // unwrap Vue reactive proxy - postMessage can't clone proxies + selectedExports: selectedExports ? toRaw(selectedExports) : null, + options, + }) + } + + /** + * terminates the worker. + */ + terminate(): void { + this.worker.terminate() + for (const deferred of this.pending.values()) { + deferred.reject(new DOMException('Worker terminated', 'AbortError')) + } + this.pending.clear() + } +} diff --git a/app/components/ImpactAnalyzer.vue b/app/components/ImpactAnalyzer.vue new file mode 100644 index 000000000..e0a7037f8 --- /dev/null +++ b/app/components/ImpactAnalyzer.vue @@ -0,0 +1,141 @@ + + + diff --git a/app/components/ImpactBundle.vue b/app/components/ImpactBundle.vue new file mode 100644 index 000000000..a8e8c860d --- /dev/null +++ b/app/components/ImpactBundle.vue @@ -0,0 +1,273 @@ + + + diff --git a/app/components/ImpactDependencyBar.vue b/app/components/ImpactDependencyBar.vue new file mode 100644 index 000000000..a44976ba7 --- /dev/null +++ b/app/components/ImpactDependencyBar.vue @@ -0,0 +1,208 @@ + + + diff --git a/app/components/ImpactInstall.vue b/app/components/ImpactInstall.vue new file mode 100644 index 000000000..f2c030e9d --- /dev/null +++ b/app/components/ImpactInstall.vue @@ -0,0 +1,172 @@ + + + diff --git a/app/pages/[...package].vue b/app/pages/[...package].vue index 17de5aceb..afff2147a 100644 --- a/app/pages/[...package].vue +++ b/app/pages/[...package].vue @@ -235,6 +235,16 @@ const docsLink = computed(() => { } }) +// Impact URL: bundle size analysis +const impactLink = computed(() => { + if (!displayVersion.value) return null + + return { + name: 'impact' as const, + params: { path: [...pkg.value!.name.split('/'), 'v', displayVersion.value.version] }, + } +}) + const fundingUrl = computed(() => { let funding = displayVersion.value?.funding if (Array.isArray(funding)) funding = funding[0] @@ -348,6 +358,12 @@ onKeyStroke('d', () => { } }) +onKeyStroke('i', () => { + if (impactLink.value) { + router.push(impactLink.value) + } +}) + defineOgImageComponent('Package', { name: () => pkg.value?.name ?? 'Package', version: () => displayVersion.value?.version ?? '', @@ -465,7 +481,7 @@ function handleClick(event: MouseEvent) { - + @@ -611,7 +642,7 @@ function handleClick(event: MouseEvent) { {{ $t('package.links.fund') }} - +
  • +
  • + + +
  • diff --git a/app/pages/impact/[...path].vue b/app/pages/impact/[...path].vue new file mode 100644 index 000000000..d6f50c414 --- /dev/null +++ b/app/pages/impact/[...path].vue @@ -0,0 +1,143 @@ + + + diff --git a/i18n/locales/en.json b/i18n/locales/en.json index 00d69908d..b175f332a 100644 --- a/i18n/locales/en.json +++ b/i18n/locales/en.json @@ -150,6 +150,7 @@ "jsr": "jsr", "code": "code", "docs": "docs", + "impact": "impact", "fund": "fund" }, "docs": { @@ -565,6 +566,49 @@ "code": "code" } }, + "impact": { + "title": "Bundle Impact", + "analyzing": "Analyzing bundle...", + "version_required": "Version is required to analyze bundle impact", + "go_to_package": "Go to package", + "install_size": "Install Size", + "total_unpacked": "total from {count} packages", + "subpath": "Entry Point", + "bundle_size": "Bundle Size", + "size": { + "minified": "Minified", + "gzip": "Gzip", + "brotli": "Brotli", + "zstd": "Zstd" + }, + "exports_cjs_notice": "CommonJS modules cannot be tree-shaken", + "exports": "Exports", + "select_all": "All", + "select_none": "None", + "rebundling": "Rebundling...", + "dependencies": "Dependencies by Size", + "peer_dependencies": "Peer Dependencies", + "and_more": "and {count} more...", + "progress": { + "resolve": "Resolving {name}...", + "fetch": "Downloading ({current}/{total})...", + "bundle": "Bundling...", + "compress": "Compressing..." + }, + "error": "Failed to analyze bundle", + "filter_placeholder": "Filter packages...", + "level": "Level {level}", + "installed_by": "Installed by", + "dependencies_count": "Dependencies", + "no_packages_match": "No packages match your filter", + "sort": { + "level": "Dependency level", + "size": "Package size", + "installedBy": "Installed by count", + "dependencies": "Dependencies count", + "name": "Name" + } + }, "badges": { "provenance": { "verified": "verified", diff --git a/nuxt.config.ts b/nuxt.config.ts index 702481509..90830a140 100644 --- a/nuxt.config.ts +++ b/nuxt.config.ts @@ -99,6 +99,12 @@ export default defineNuxtConfig({ '/about': { prerender: true }, '/settings': { prerender: true }, '/oauth-client-metadata.json': { prerender: true }, + '/impact/**': { + headers: { + 'Cross-Origin-Opener-Policy': 'same-origin', + 'Cross-Origin-Embedder-Policy': 'require-corp', + }, + }, // proxy for insights '/_v/script.js': { proxy: 'https://npmx.dev/_vercel/insights/script.js' }, '/_v/view': { proxy: 'https://npmx.dev/_vercel/insights/view' }, @@ -229,6 +235,20 @@ export default defineNuxtConfig({ 'semver', 'validate-npm-package-name', ], + // Exclude rolldown from optimization - it has WASM that needs special handling + exclude: ['@rolldown/browser'], + }, + worker: { + format: 'es', + }, + server: { + headers: { + // Required for cross-origin isolation (SharedArrayBuffer) + 'Cross-Origin-Opener-Policy': 'same-origin', + 'Cross-Origin-Embedder-Policy': 'require-corp', + // Allow same-origin resources to be loaded under COEP + 'Cross-Origin-Resource-Policy': 'same-origin', + }, }, }, diff --git a/package.json b/package.json index 97a3ea02e..a646659da 100644 --- a/package.json +++ b/package.json @@ -36,21 +36,25 @@ "start:playwright:webserver": "NODE_ENV=test pnpm build && pnpm preview --port 5678" }, "dependencies": { + "@atcute/uint8array": "1.0.6", "@atproto/api": "^0.18.17", "@atproto/lex": "0.0.13", "@atproto/oauth-client-node": "^0.3.15", + "@bokuweb/zstd-wasm": "0.0.27", "@deno/doc": "jsr:^0.189.1", "@iconify-json/simple-icons": "1.2.68", "@iconify-json/vscode-icons": "1.2.40", "@intlify/core-base": "11.2.8", "@intlify/shared": "11.2.8", "@lunariajs/core": "https://pkg.pr.new/lunariajs/lunaria/@lunariajs/core@f07e1a3", + "@mary/tar": "jsr:^0.3.2", "@nuxt/a11y": "1.0.0-alpha.1", "@nuxt/fonts": "0.13.0", "@nuxt/scripts": "0.13.2", "@nuxtjs/color-mode": "4.0.0", "@nuxtjs/html-validator": "2.1.0", "@nuxtjs/i18n": "10.2.1", + "@rolldown/browser": "1.0.0-rc.1", "@shikijs/langs": "3.21.0", "@shikijs/themes": "3.21.0", "@upstash/redis": "1.36.1", @@ -80,6 +84,7 @@ "@iconify-json/svg-spinners": "1.2.4", "@npm/types": "2.1.0", "@nuxt/test-utils": "https://pkg.pr.new/@nuxt/test-utils@1499a48", + "@oxc-project/types": "0.110.0", "@playwright/test": "1.58.0", "@types/node": "24.10.9", "@types/sanitize-html": "2.16.0", @@ -97,6 +102,7 @@ "knip": "5.82.1", "lint-staged": "16.2.7", "marked": "17.0.1", + "memfs": "4.56.10", "playwright-core": "1.58.0", "simple-git-hooks": "2.13.1", "spdx-license-list": "6.11.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e3e228957..a92b2b452 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -20,6 +20,9 @@ importers: .: dependencies: + '@atcute/uint8array': + specifier: 1.0.6 + version: 1.0.6 '@atproto/api': specifier: ^0.18.17 version: 0.18.20 @@ -29,6 +32,9 @@ importers: '@atproto/oauth-client-node': specifier: ^0.3.15 version: 0.3.16 + '@bokuweb/zstd-wasm': + specifier: 0.0.27 + version: 0.0.27 '@deno/doc': specifier: jsr:^0.189.1 version: '@jsr/deno__doc@0.189.1(patch_hash=24f326e123c822a07976329a5afe91a8713e82d53134b5586625b72431c87832)' @@ -47,6 +53,9 @@ importers: '@lunariajs/core': specifier: https://pkg.pr.new/lunariajs/lunaria/@lunariajs/core@f07e1a3 version: https://pkg.pr.new/lunariajs/lunaria/@lunariajs/core@f07e1a3 + '@mary/tar': + specifier: jsr:^0.3.2 + version: '@jsr/mary__tar@0.3.2' '@nuxt/a11y': specifier: 1.0.0-alpha.1 version: 1.0.0-alpha.1(magicast@0.5.1)(vite@7.3.1(@types/node@24.10.9)(jiti@2.6.1)(lightningcss@1.31.1)(terser@5.46.0)(yaml@2.8.2)) @@ -65,6 +74,9 @@ importers: '@nuxtjs/i18n': specifier: 10.2.1 version: 10.2.1(@upstash/redis@1.36.1)(@vercel/kv@3.0.0)(@vue/compiler-dom@3.5.27)(db0@0.3.4(better-sqlite3@12.6.2))(eslint@9.39.2(jiti@2.6.1))(ioredis@5.9.2)(magicast@0.5.1)(rollup@4.57.0)(vue@3.5.27(typescript@5.9.3)) + '@rolldown/browser': + specifier: 1.0.0-rc.1 + version: 1.0.0-rc.1 '@shikijs/langs': specifier: 3.21.0 version: 3.21.0 @@ -147,6 +159,9 @@ importers: '@nuxt/test-utils': specifier: https://pkg.pr.new/@nuxt/test-utils@1499a48 version: https://pkg.pr.new/@nuxt/test-utils@1499a48(@playwright/test@1.58.0)(@voidzero-dev/vite-plus-test@0.0.0-833c515fa25cef20905a7f9affb156dfa6f151ab(@types/node@24.10.9)(esbuild@0.27.2)(happy-dom@20.4.0)(jiti@2.6.1)(terser@5.46.0)(typescript@5.9.3)(yaml@2.8.2))(@vue/test-utils@2.4.6)(happy-dom@20.4.0)(magicast@0.5.1)(playwright-core@1.58.0)(typescript@5.9.3) + '@oxc-project/types': + specifier: 0.110.0 + version: 0.110.0 '@playwright/test': specifier: 1.58.0 version: 1.58.0 @@ -198,6 +213,9 @@ importers: marked: specifier: 17.0.1 version: 17.0.1 + memfs: + specifier: 4.56.10 + version: 4.56.10(tslib@2.8.1) playwright-core: specifier: 1.58.0 version: 1.58.0 @@ -306,6 +324,9 @@ packages: resolution: {integrity: sha512-60vepv88RwcJtSHrD6MjIL6Ta3SOYbgfnkHb+ppAVK+o9mXprRtulx7VlRl3lN3bbvysAfCS7WMVfhUYemB0IQ==} engines: {node: '>= 16'} + '@atcute/uint8array@1.0.6': + resolution: {integrity: sha512-ucfRBQc7BFT8n9eCyGOzDHEMKF/nZwhS2pPao4Xtab1ML3HdFYcX2DM1tadCzas85QTGxHe5urnUAAcNKGRi9A==} + '@atproto-labs/did-resolver@0.2.6': resolution: {integrity: sha512-2K1bC04nI2fmgNcvof+yA28IhGlpWn2JKYlPa7To9JTKI45FINCGkQSGiL2nyXlyzDJJ34fZ1aq6/IRFIOIiqg==} @@ -971,6 +992,9 @@ packages: resolution: {integrity: sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==} engines: {node: '>=18'} + '@bokuweb/zstd-wasm@0.0.27': + resolution: {integrity: sha512-GDm2uOTK3ESjnYmSeLQifJnBsRCWajKLvN32D2ZcQaaCIJI/Hse9s74f7APXjHit95S10UImsRGkTsbwHmrtmg==} + '@bomb.sh/tab@0.0.11': resolution: {integrity: sha512-RSqyreeicYBALcMaNxIUJTBknftXsyW45VRq5gKDNwKroh0Re5SDoWwXZaphb+OTEzVdpm/BA8Uq6y0P+AtVYw==} hasBin: true @@ -1730,6 +1754,126 @@ packages: '@jsdevtools/ono@7.1.3': resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} + '@jsonjoy.com/base64@1.1.2': + resolution: {integrity: sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/base64@17.65.0': + resolution: {integrity: sha512-Xrh7Fm/M0QAYpekSgmskdZYnFdSGnsxJ/tHaolA4bNwWdG9i65S8m83Meh7FOxyJyQAdo4d4J97NOomBLEfkDQ==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/buffers@1.2.1': + resolution: {integrity: sha512-12cdlDwX4RUM3QxmUbVJWqZ/mrK6dFQH4Zxq6+r1YXKXYBNgZXndx2qbCJwh3+WWkCSn67IjnlG3XYTvmvYtgA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/buffers@17.65.0': + resolution: {integrity: sha512-eBrIXd0/Ld3p9lpDDlMaMn6IEfWqtHMD+z61u0JrIiPzsV1r7m6xDZFRxJyvIFTEO+SWdYF9EiQbXZGd8BzPfA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/codegen@1.0.0': + resolution: {integrity: sha512-E8Oy+08cmCf0EK/NMxpaJZmOxPqM+6iSe2S4nlSBrPZOORoDJILxtbSUEDKQyTamm/BVAhIGllOBNU79/dwf0g==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/codegen@17.65.0': + resolution: {integrity: sha512-7MXcRYe7n3BG+fo3jicvjB0+6ypl2Y/bQp79Sp7KeSiiCgLqw4Oled6chVv07/xLVTdo3qa1CD0VCCnPaw+RGA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-core@4.56.10': + resolution: {integrity: sha512-PyAEA/3cnHhsGcdY+AmIU+ZPqTuZkDhCXQ2wkXypdLitSpd6d5Ivxhnq4wa2ETRWFVJGabYynBWxIijOswSmOw==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-fsa@4.56.10': + resolution: {integrity: sha512-/FVK63ysNzTPOnCCcPoPHt77TOmachdMS422txM4KhxddLdbW1fIbFMYH0AM0ow/YchCyS5gqEjKLNyv71j/5Q==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-node-builtins@4.56.10': + resolution: {integrity: sha512-uUnKz8R0YJyKq5jXpZtkGV9U0pJDt8hmYcLRrPjROheIfjMXsz82kXMgAA/qNg0wrZ1Kv+hrg7azqEZx6XZCVw==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-node-to-fsa@4.56.10': + resolution: {integrity: sha512-oH+O6Y4lhn9NyG6aEoFwIBNKZeYy66toP5LJcDOMBgL99BKQMUf/zWJspdRhMdn/3hbzQsZ8EHHsuekbFLGUWw==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-node-utils@4.56.10': + resolution: {integrity: sha512-8EuPBgVI2aDPwFdaNQeNpHsyqPi3rr+85tMNG/lHvQLiVjzoZsvxA//Xd8aB567LUhy4QS03ptT+unkD/DIsNg==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-node@4.56.10': + resolution: {integrity: sha512-7R4Gv3tkUdW3dXfXiOkqxkElxKNVdd8BDOWC0/dbERd0pXpPY+s2s1Mino+aTvkGrFPiY+mmVxA7zhskm4Ue4Q==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-print@4.56.10': + resolution: {integrity: sha512-JW4fp5mAYepzFsSGrQ48ep8FXxpg4niFWHdF78wDrFGof7F3tKDJln72QFDEn/27M1yHd4v7sKHHVPh78aWcEw==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-snapshot@4.56.10': + resolution: {integrity: sha512-DkR6l5fj7+qj0+fVKm/OOXMGfDFCGXLfyHkORH3DF8hxkpDgIHbhf/DwncBMs2igu/ST7OEkexn1gIqoU6Y+9g==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/json-pack@1.21.0': + resolution: {integrity: sha512-+AKG+R2cfZMShzrF2uQw34v3zbeDYUqnQ+jg7ORic3BGtfw9p/+N6RJbq/kkV8JmYZaINknaEQ2m0/f693ZPpg==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/json-pack@17.65.0': + resolution: {integrity: sha512-e0SG/6qUCnVhHa0rjDJHgnXnbsacooHVqQHxspjvlYQSkHm+66wkHw6Gql+3u/WxI/b1VsOdUi0M+fOtkgKGdQ==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/json-pointer@1.0.2': + resolution: {integrity: sha512-Fsn6wM2zlDzY1U+v4Nc8bo3bVqgfNTGcn6dMgs6FjrEnt4ZCe60o6ByKRjOGlI2gow0aE/Q41QOigdTqkyK5fg==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/json-pointer@17.65.0': + resolution: {integrity: sha512-uhTe+XhlIZpWOxgPcnO+iSCDgKKBpwkDVTyYiXX9VayGV8HSFVJM67M6pUE71zdnXF1W0Da21AvnhlmdwYPpow==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/util@1.9.0': + resolution: {integrity: sha512-pLuQo+VPRnN8hfPqUTLTHk126wuYdXVxE6aDmjSeV4NCAgyxWbiOIeNJVtID3h1Vzpoi9m4jXezf73I6LgabgQ==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/util@17.65.0': + resolution: {integrity: sha512-cWiEHZccQORf96q2y6zU3wDeIVPeidmGqd9cNKJRYoVHTV0S1eHPy5JTbHpMnGfDvtvujQwQozOqgO9ABu6h0w==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + '@jsr/deno__cache-dir@0.25.0': resolution: {integrity: sha512-J5/kPR8sc+dsRyE8rnLPhqay7B4XgBmkv7QaeIV41sGibszQjIb8b599LssN1YWBbqPLakPRMVobg5u6LheWfg==, tarball: https://npm.jsr.io/~/11/@jsr/deno__cache-dir/0.25.0.tgz} @@ -1742,6 +1886,9 @@ packages: '@jsr/deno__graph@0.86.9': resolution: {integrity: sha512-+qrrma5/bL+hcG20mfaEeC8SLopqoyd1RjcKFMRu++3SAXyrTKuvuIjBJCn/NyN7X+kV+QrJG67BCHX38Rzw+g==, tarball: https://npm.jsr.io/~/11/@jsr/deno__graph/0.86.9.tgz} + '@jsr/mary__tar@0.3.2': + resolution: {integrity: sha512-tGVmuWdOvAmUEWH76m3C+rnXp1VSvZw9WXpNpaJnHabVGpqLAzg9pcRCsN7OBJwZylACEyUiUNQjiHeEu7SD8w==, tarball: https://npm.jsr.io/~/11/@jsr/mary__tar/0.3.2.tgz} + '@jsr/std__bytes@1.0.6': resolution: {integrity: sha512-St6yKggjFGhxS52IFLJWvkchRFbAKg2Xh8UxA4S1EGz7GJ2Ui+ssDDldj/w2c8vCxvl6qgR0HaYbKeFJNqujmA==, tarball: https://npm.jsr.io/~/11/@jsr/std__bytes/1.0.6.tgz} @@ -3181,6 +3328,10 @@ packages: cpu: [x64] os: [win32] + '@rolldown/browser@1.0.0-rc.1': + resolution: {integrity: sha512-n/h6Oo2Udn5IhtLqYPqtZor09BPu+ml35ccbMv9XsJOpBTExjp39sqhGB3qogqowu5xwb9JHp2pR2JfPJIta4w==} + hasBin: true + '@rolldown/pluginutils@1.0.0-beta.53': resolution: {integrity: sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==} @@ -5844,6 +5995,12 @@ packages: resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} engines: {node: '>=10.13.0'} + glob-to-regex.js@1.2.0: + resolution: {integrity: sha512-QMwlOQKU/IzqMUOAZWubUOT8Qft+Y0KQWnX9nK3ch0CJg0tTp4TvGZsTfudYKv2NzoQSyPcnA6TYeIQ3jGichQ==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + glob-to-regexp@0.4.1: resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} @@ -6063,6 +6220,10 @@ packages: resolution: {integrity: sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==} engines: {node: '>=18.18.0'} + hyperdyperid@1.2.0: + resolution: {integrity: sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A==} + engines: {node: '>=10.18'} + ico-endec@0.1.6: resolution: {integrity: sha512-ZdLU38ZoED3g1j3iEyzcQj+wAkY2xfWNkymszfJPoxucIUhK7NayQ+/C4Kv0nDFMIsbtbEHldv3V8PU494/ueQ==} @@ -6861,6 +7022,11 @@ packages: resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==} engines: {node: '>= 0.8'} + memfs@4.56.10: + resolution: {integrity: sha512-eLvzyrwqLHnLYalJP7YZ3wBe79MXktMdfQbvMrVD80K+NhrIukCVBvgP30zTJYEEDh9hZ/ep9z0KOdD7FSHo7w==} + peerDependencies: + tslib: '2' + merge-descriptors@2.0.0: resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==} engines: {node: '>=18'} @@ -8513,6 +8679,12 @@ packages: text-decoder@1.2.3: resolution: {integrity: sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==} + thingies@2.5.0: + resolution: {integrity: sha512-s+2Bwztg6PhWUD7XMfeYm5qliDdSiZm7M7n8KjTkIsm3l/2lgVRc2/Gx/v+ZX8lT4FMA+i8aQvhcWylldc+ZNw==} + engines: {node: '>=10.18'} + peerDependencies: + tslib: ^2 + thread-stream@2.7.0: resolution: {integrity: sha512-qQiRWsU/wvNolI6tbbCKd9iKaTnCXsTwVxhhKM6nctPdujTyztjlbUkUTUymidWcMnZ5pWR0ej4a0tjsW021vw==} @@ -8574,6 +8746,12 @@ packages: tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} + tree-dump@1.1.0: + resolution: {integrity: sha512-rMuvhU4MCDbcbnleZTFezWsaZXRFemSqAM+7jPnzUl1fo9w3YEKOxAeui0fz3OI4EU4hf23iyA7uQRVko+UaBA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + tree-kill@1.2.2: resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} hasBin: true @@ -9490,6 +9668,8 @@ snapshots: '@types/json-schema': 7.0.15 js-yaml: 4.1.1 + '@atcute/uint8array@1.0.6': {} + '@atproto-labs/did-resolver@0.2.6': dependencies: '@atproto-labs/fetch': 0.2.3 @@ -10465,6 +10645,8 @@ snapshots: '@bcoe/v8-coverage@1.0.2': {} + '@bokuweb/zstd-wasm@0.0.27': {} + '@bomb.sh/tab@0.0.11(cac@6.7.14)(citty@0.1.6)': optionalDependencies: cac: 6.7.14 @@ -10531,17 +10713,14 @@ snapshots: dependencies: '@emnapi/wasi-threads': 1.1.0 tslib: 2.8.1 - optional: true '@emnapi/runtime@1.8.1': dependencies: tslib: 2.8.1 - optional: true '@emnapi/wasi-threads@1.1.0': dependencies: tslib: 2.8.1 - optional: true '@esbuild/aix-ppc64@0.25.12': optional: true @@ -11057,6 +11236,133 @@ snapshots: '@jsdevtools/ono@7.1.3': {} + '@jsonjoy.com/base64@1.1.2(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/base64@17.65.0(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/buffers@1.2.1(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/buffers@17.65.0(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/codegen@1.0.0(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/codegen@17.65.0(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/fs-core@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + thingies: 2.5.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-fsa@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + thingies: 2.5.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-node-builtins@4.56.10(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/fs-node-to-fsa@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-fsa': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-node-utils@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-node@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-print': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-snapshot': 4.56.10(tslib@2.8.1) + glob-to-regex.js: 1.2.0(tslib@2.8.1) + thingies: 2.5.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-print@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + tree-dump: 1.1.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-snapshot@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/buffers': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/json-pack': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/util': 17.65.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/json-pack@1.21.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/base64': 1.1.2(tslib@2.8.1) + '@jsonjoy.com/buffers': 1.2.1(tslib@2.8.1) + '@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1) + '@jsonjoy.com/json-pointer': 1.0.2(tslib@2.8.1) + '@jsonjoy.com/util': 1.9.0(tslib@2.8.1) + hyperdyperid: 1.2.0 + thingies: 2.5.0(tslib@2.8.1) + tree-dump: 1.1.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/json-pack@17.65.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/base64': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/buffers': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/codegen': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/json-pointer': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/util': 17.65.0(tslib@2.8.1) + hyperdyperid: 1.2.0 + thingies: 2.5.0(tslib@2.8.1) + tree-dump: 1.1.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/json-pointer@1.0.2(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1) + '@jsonjoy.com/util': 1.9.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/json-pointer@17.65.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/util': 17.65.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/util@1.9.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/buffers': 1.2.1(tslib@2.8.1) + '@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/util@17.65.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/buffers': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/codegen': 17.65.0(tslib@2.8.1) + tslib: 2.8.1 + '@jsr/deno__cache-dir@0.25.0': dependencies: '@jsr/deno__graph': 0.86.9 @@ -11074,6 +11380,8 @@ snapshots: '@jsr/deno__graph@0.86.9': {} + '@jsr/mary__tar@0.3.2': {} + '@jsr/std__bytes@1.0.6': {} '@jsr/std__fmt@1.0.9': {} @@ -11163,7 +11471,6 @@ snapshots: '@emnapi/core': 1.8.1 '@emnapi/runtime': 1.8.1 '@tybys/wasm-util': 0.10.1 - optional: true '@noble/curves@1.9.7': dependencies: @@ -12732,6 +13039,10 @@ snapshots: '@rolldown/binding-win32-x64-msvc@1.0.0-rc.1': optional: true + '@rolldown/browser@1.0.0-rc.1': + dependencies: + '@napi-rs/wasm-runtime': 1.1.1 + '@rolldown/pluginutils@1.0.0-beta.53': {} '@rolldown/pluginutils@1.0.0-rc.1': {} @@ -13334,7 +13645,6 @@ snapshots: '@tybys/wasm-util@0.10.1': dependencies: tslib: 2.8.1 - optional: true '@types/chai@5.2.3': dependencies: @@ -15769,6 +16079,10 @@ snapshots: dependencies: is-glob: 4.0.3 + glob-to-regex.js@1.2.0(tslib@2.8.1): + dependencies: + tslib: 2.8.1 + glob-to-regexp@0.4.1: {} glob@10.5.0: @@ -16082,6 +16396,8 @@ snapshots: human-signals@8.0.1: {} + hyperdyperid@1.2.0: {} + ico-endec@0.1.6: {} iconv-lite@0.4.24: @@ -16943,6 +17259,23 @@ snapshots: media-typer@1.1.0: {} + memfs@4.56.10(tslib@2.8.1): + dependencies: + '@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-fsa': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-to-fsa': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-print': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-snapshot': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/json-pack': 1.21.0(tslib@2.8.1) + '@jsonjoy.com/util': 1.9.0(tslib@2.8.1) + glob-to-regex.js: 1.2.0(tslib@2.8.1) + thingies: 2.5.0(tslib@2.8.1) + tree-dump: 1.1.0(tslib@2.8.1) + tslib: 2.8.1 + merge-descriptors@2.0.0: {} merge-stream@2.0.0: {} @@ -19317,6 +19650,10 @@ snapshots: transitivePeerDependencies: - react-native-b4a + thingies@2.5.0(tslib@2.8.1): + dependencies: + tslib: 2.8.1 + thread-stream@2.7.0: dependencies: real-require: 0.2.0 @@ -19364,6 +19701,10 @@ snapshots: dependencies: punycode: 2.3.1 + tree-dump@1.1.0(tslib@2.8.1): + dependencies: + tslib: 2.8.1 + tree-kill@1.2.2: {} trim-lines@3.0.1: {} From 5033ec9d9223365ef07c6c4468f206473f49d890 Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Sat, 31 Jan 2026 12:45:01 +0000 Subject: [PATCH 2/2] [autofix.ci] apply automated fixes --- lunaria/files/en-US.json | 44 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/lunaria/files/en-US.json b/lunaria/files/en-US.json index 00d69908d..b175f332a 100644 --- a/lunaria/files/en-US.json +++ b/lunaria/files/en-US.json @@ -150,6 +150,7 @@ "jsr": "jsr", "code": "code", "docs": "docs", + "impact": "impact", "fund": "fund" }, "docs": { @@ -565,6 +566,49 @@ "code": "code" } }, + "impact": { + "title": "Bundle Impact", + "analyzing": "Analyzing bundle...", + "version_required": "Version is required to analyze bundle impact", + "go_to_package": "Go to package", + "install_size": "Install Size", + "total_unpacked": "total from {count} packages", + "subpath": "Entry Point", + "bundle_size": "Bundle Size", + "size": { + "minified": "Minified", + "gzip": "Gzip", + "brotli": "Brotli", + "zstd": "Zstd" + }, + "exports_cjs_notice": "CommonJS modules cannot be tree-shaken", + "exports": "Exports", + "select_all": "All", + "select_none": "None", + "rebundling": "Rebundling...", + "dependencies": "Dependencies by Size", + "peer_dependencies": "Peer Dependencies", + "and_more": "and {count} more...", + "progress": { + "resolve": "Resolving {name}...", + "fetch": "Downloading ({current}/{total})...", + "bundle": "Bundling...", + "compress": "Compressing..." + }, + "error": "Failed to analyze bundle", + "filter_placeholder": "Filter packages...", + "level": "Level {level}", + "installed_by": "Installed by", + "dependencies_count": "Dependencies", + "no_packages_match": "No packages match your filter", + "sort": { + "level": "Dependency level", + "size": "Package size", + "installedBy": "Installed by count", + "dependencies": "Dependencies count", + "name": "Name" + } + }, "badges": { "provenance": { "verified": "verified",