From 7483ab806da871cc318a86ace9a228590a34db4c Mon Sep 17 00:00:00 2001 From: Phil Pluckthun Date: Tue, 5 May 2026 17:00:42 +0100 Subject: [PATCH 01/26] feat(cli,metro-file-map): Add `@expo/metro-file-map` fork (#45373) # Why Extracted from #44567, created as a new base to cleanly apply final state of all patches and changes from upstream PRs. The on-demand fallback filesystem will be applied separately too from scratch and the linked draft PR. **Note:** This doesn't yet include any of the individual changes we'll be making. # How - Add `metro-file-map` fork ported to TypeScript - Add `metro-file-map` tests (without integration/index tests) ported to memfs - Add ESM unwrapping (won't function without this) - Alter cache target filename in `DiskCacheManager` (add `-expo-` prefix) - Add `__expo` branding to `FileMap` class - Add forking to `@expo/cli`'s `instantiateMetro` and `exportEmbedAsync` - Create `createFileMap` fork omitting `unstable_fileMapCacheManagerFactory` (due to altered `DiskCacheManager` target path and format in future PRs) - Fork via `replaceMetroFileMap` which alters the CJS exports (Dangerous, but there's no API to swap out `metro-file-map` yet) # Test Plan - Added unit tests - CI E2E covers fork itself **Note:** We need to do a publish on `main` once all patches have landed. This can only be done when all other PRs that'll base off of this are approved and by then merging them all in series and publishing right after. # Checklist - [x] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- packages/@expo/cli/CHANGELOG.md | 1 + packages/@expo/cli/package.json | 1 + .../cli/src/export/embed/exportEmbedAsync.ts | 13 +- .../start/server/metro/createFileMap-fork.ts | 146 ++ .../start/server/metro/instantiateMetro.ts | 31 +- packages/@expo/metro-file-map/.eslintrc.js | 2 + packages/@expo/metro-file-map/CHANGELOG.md | 11 + packages/@expo/metro-file-map/README.md | 1 + packages/@expo/metro-file-map/babel.config.js | 2 + .../@expo/metro-file-map/build/Watcher.d.ts | 52 + .../@expo/metro-file-map/build/Watcher.js | 249 ++++ .../build/cache/DiskCacheManager.d.ts | 25 + .../build/cache/DiskCacheManager.js | 110 ++ .../@expo/metro-file-map/build/constants.d.ts | 23 + .../@expo/metro-file-map/build/constants.js | 27 + .../crawlers/node/hasNativeFindSupport.d.ts | 7 + .../crawlers/node/hasNativeFindSupport.js | 28 + .../build/crawlers/node/index.d.ts | 10 + .../build/crawlers/node/index.js | 190 +++ .../build/crawlers/watchman/index.d.ts | 8 + .../build/crawlers/watchman/index.js | 285 ++++ .../build/crawlers/watchman/planQuery.d.ts | 31 + .../build/crawlers/watchman/planQuery.js | 99 ++ .../@expo/metro-file-map/build/index.d.ts | 137 ++ packages/@expo/metro-file-map/build/index.js | 806 +++++++++++ .../build/lib/FileProcessor.d.ts | 42 + .../metro-file-map/build/lib/FileProcessor.js | 180 +++ .../build/lib/FileSystemChangeAggregator.d.ts | 18 + .../build/lib/FileSystemChangeAggregator.js | 114 ++ .../build/lib/RootPathUtils.d.ts | 21 + .../metro-file-map/build/lib/RootPathUtils.js | 259 ++++ .../metro-file-map/build/lib/TreeFS.d.ts | 154 +++ .../@expo/metro-file-map/build/lib/TreeFS.js | 838 ++++++++++++ .../build/lib/checkWatchmanCapabilities.d.ts | 9 + .../build/lib/checkWatchmanCapabilities.js | 50 + .../lib/normalizePathSeparatorsToPosix.d.ts | 8 + .../lib/normalizePathSeparatorsToPosix.js | 20 + .../lib/normalizePathSeparatorsToSystem.d.ts | 8 + .../lib/normalizePathSeparatorsToSystem.js | 20 + .../build/lib/rootRelativeCacheKeys.d.ts | 11 + .../build/lib/rootRelativeCacheKeys.js | 55 + .../metro-file-map/build/lib/sorting.d.ts | 8 + .../@expo/metro-file-map/build/lib/sorting.js | 31 + .../build/plugins/DependencyPlugin.d.ts | 23 + .../build/plugins/DependencyPlugin.js | 61 + .../build/plugins/FileDataPlugin.d.ts | 28 + .../build/plugins/FileDataPlugin.js | 46 + .../build/plugins/HastePlugin.d.ts | 32 + .../build/plugins/HastePlugin.js | 358 +++++ .../build/plugins/MockPlugin.d.ts | 27 + .../build/plugins/MockPlugin.js | 157 +++ .../dependencies/dependencyExtractor.d.ts | 7 + .../dependencies/dependencyExtractor.js | 66 + .../build/plugins/dependencies/worker.d.ts | 18 + .../build/plugins/dependencies/worker.js | 30 + .../haste/DuplicateHasteCandidatesError.d.ts | 14 + .../haste/DuplicateHasteCandidatesError.js | 51 + .../plugins/haste/HasteConflictsError.d.ts | 12 + .../plugins/haste/HasteConflictsError.js | 49 + .../build/plugins/haste/computeConflicts.d.ts | 19 + .../build/plugins/haste/computeConflicts.js | 74 + .../plugins/haste/getPlatformExtension.d.ts | 7 + .../plugins/haste/getPlatformExtension.js | 19 + .../build/plugins/haste/worker.d.ts | 16 + .../build/plugins/haste/worker.js | 48 + .../build/plugins/mocks/getMockName.d.ts | 8 + .../build/plugins/mocks/getMockName.js | 17 + .../build/ts-declarations/fb-watchman.d.ts | 20 + .../build/ts-declarations/fb-watchman.js | 2 + .../@expo/metro-file-map/build/types.d.ts | 395 ++++++ packages/@expo/metro-file-map/build/types.js | 8 + .../build/watchers/AbstractWatcher.d.ts | 30 + .../build/watchers/AbstractWatcher.js | 97 ++ .../build/watchers/FallbackWatcher.d.ts | 18 + .../build/watchers/FallbackWatcher.js | 411 ++++++ .../build/watchers/NativeWatcher.d.ts | 39 + .../build/watchers/NativeWatcher.js | 156 +++ .../build/watchers/RecrawlWarning.d.ts | 18 + .../build/watchers/RecrawlWarning.js | 59 + .../build/watchers/WatchmanWatcher.d.ts | 22 + .../build/watchers/WatchmanWatcher.js | 263 ++++ .../metro-file-map/build/watchers/common.d.ts | 39 + .../metro-file-map/build/watchers/common.js | 60 + .../@expo/metro-file-map/build/worker.d.ts | 24 + packages/@expo/metro-file-map/build/worker.js | 73 + .../build/workerExclusionList.d.ts | 8 + .../build/workerExclusionList.js | 57 + packages/@expo/metro-file-map/jest.config.js | 13 + packages/@expo/metro-file-map/jest.setup.ts | 15 + packages/@expo/metro-file-map/package.json | 56 + packages/@expo/metro-file-map/src/Watcher.ts | 341 +++++ .../@expo/metro-file-map/src/__mocks__/fs.ts | 2 + .../src/__mocks__/fs/promises.ts | 2 + .../src/__mocks__/graceful-fs.ts | 2 + .../src/cache/DiskCacheManager.ts | 153 +++ .../cache/__tests__/DiskCacheManager.test.ts | 225 +++ .../@expo/metro-file-map/src/constants.ts | 56 + .../src/crawlers/node/__tests__/index.test.ts | 365 +++++ .../src/crawlers/node/hasNativeFindSupport.ts | 26 + .../metro-file-map/src/crawlers/node/index.ts | 208 +++ .../src/crawlers/watchman/index.ts | 335 +++++ .../src/crawlers/watchman/planQuery.ts | 131 ++ packages/@expo/metro-file-map/src/index.ts | 1050 ++++++++++++++ .../metro-file-map/src/lib/FileProcessor.ts | 270 ++++ .../src/lib/FileSystemChangeAggregator.ts | 133 ++ .../metro-file-map/src/lib/RootPathUtils.ts | 301 ++++ .../@expo/metro-file-map/src/lib/TreeFS.ts | 1214 +++++++++++++++++ .../src/lib/__tests__/FileProcessor.test.ts | 343 +++++ .../FileSystemChangeAggregator.test.ts | 89 ++ .../src/lib/__tests__/RootPathUtils.test.ts | 143 ++ .../src/lib/__tests__/TreeFS.test.ts | 1077 +++++++++++++++ .../checkWatchmanCapabilities.test.ts | 77 ++ .../normalizePathSeparatorsToSystem.test.ts | 22 + .../__tests__/rootRelativeCacheKeys.test.ts | 109 ++ .../src/lib/checkWatchmanCapabilities.ts | 60 + .../src/lib/normalizePathSeparatorsToPosix.ts | 17 + .../lib/normalizePathSeparatorsToSystem.ts | 17 + .../src/lib/rootRelativeCacheKeys.ts | 62 + .../@expo/metro-file-map/src/lib/sorting.ts | 32 + .../src/plugins/DependencyPlugin.ts | 68 + .../src/plugins/FileDataPlugin.ts | 69 + .../metro-file-map/src/plugins/HastePlugin.ts | 464 +++++++ .../metro-file-map/src/plugins/MockPlugin.ts | 198 +++ .../__tests__/dependencyExtractor.test.ts | 238 ++++ .../dependencies/dependencyExtractor.ts | 86 ++ .../src/plugins/dependencies/worker.ts | 42 + .../haste/DuplicateHasteCandidatesError.ts | 60 + .../src/plugins/haste/HasteConflictsError.ts | 58 + .../__tests__/getPlatformExtension.test.ts | 22 + .../src/plugins/haste/computeConflicts.ts | 93 ++ .../src/plugins/haste/getPlatformExtension.ts | 20 + .../src/plugins/haste/worker.ts | 48 + .../mocks/__tests__/getMockName.test.ts | 20 + .../src/plugins/mocks/getMockName.ts | 15 + .../src/ts-declarations/fb-watchman.ts | 22 + packages/@expo/metro-file-map/src/types.ts | 539 ++++++++ .../src/watchers/AbstractWatcher.ts | 80 ++ .../src/watchers/FallbackWatcher.ts | 451 ++++++ .../src/watchers/NativeWatcher.ts | 142 ++ .../src/watchers/RecrawlWarning.ts | 62 + .../src/watchers/WatchmanWatcher.ts | 358 +++++ .../metro-file-map/src/watchers/common.ts | 75 + packages/@expo/metro-file-map/src/worker.ts | 82 ++ .../metro-file-map/src/workerExclusionList.ts | 62 + packages/@expo/metro-file-map/tsconfig.json | 11 + pnpm-lock.yaml | 64 +- 146 files changed, 17503 insertions(+), 19 deletions(-) create mode 100644 packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts create mode 100644 packages/@expo/metro-file-map/.eslintrc.js create mode 100644 packages/@expo/metro-file-map/CHANGELOG.md create mode 100644 packages/@expo/metro-file-map/README.md create mode 100644 packages/@expo/metro-file-map/babel.config.js create mode 100644 packages/@expo/metro-file-map/build/Watcher.d.ts create mode 100644 packages/@expo/metro-file-map/build/Watcher.js create mode 100644 packages/@expo/metro-file-map/build/cache/DiskCacheManager.d.ts create mode 100644 packages/@expo/metro-file-map/build/cache/DiskCacheManager.js create mode 100644 packages/@expo/metro-file-map/build/constants.d.ts create mode 100644 packages/@expo/metro-file-map/build/constants.js create mode 100644 packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.d.ts create mode 100644 packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.js create mode 100644 packages/@expo/metro-file-map/build/crawlers/node/index.d.ts create mode 100644 packages/@expo/metro-file-map/build/crawlers/node/index.js create mode 100644 packages/@expo/metro-file-map/build/crawlers/watchman/index.d.ts create mode 100644 packages/@expo/metro-file-map/build/crawlers/watchman/index.js create mode 100644 packages/@expo/metro-file-map/build/crawlers/watchman/planQuery.d.ts create mode 100644 packages/@expo/metro-file-map/build/crawlers/watchman/planQuery.js create mode 100644 packages/@expo/metro-file-map/build/index.d.ts create mode 100644 packages/@expo/metro-file-map/build/index.js create mode 100644 packages/@expo/metro-file-map/build/lib/FileProcessor.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/FileProcessor.js create mode 100644 packages/@expo/metro-file-map/build/lib/FileSystemChangeAggregator.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/FileSystemChangeAggregator.js create mode 100644 packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/RootPathUtils.js create mode 100644 packages/@expo/metro-file-map/build/lib/TreeFS.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/TreeFS.js create mode 100644 packages/@expo/metro-file-map/build/lib/checkWatchmanCapabilities.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/checkWatchmanCapabilities.js create mode 100644 packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToPosix.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToPosix.js create mode 100644 packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToSystem.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToSystem.js create mode 100644 packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.js create mode 100644 packages/@expo/metro-file-map/build/lib/sorting.d.ts create mode 100644 packages/@expo/metro-file-map/build/lib/sorting.js create mode 100644 packages/@expo/metro-file-map/build/plugins/DependencyPlugin.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/DependencyPlugin.js create mode 100644 packages/@expo/metro-file-map/build/plugins/FileDataPlugin.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/FileDataPlugin.js create mode 100644 packages/@expo/metro-file-map/build/plugins/HastePlugin.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/HastePlugin.js create mode 100644 packages/@expo/metro-file-map/build/plugins/MockPlugin.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/MockPlugin.js create mode 100644 packages/@expo/metro-file-map/build/plugins/dependencies/dependencyExtractor.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/dependencies/dependencyExtractor.js create mode 100644 packages/@expo/metro-file-map/build/plugins/dependencies/worker.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/dependencies/worker.js create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/DuplicateHasteCandidatesError.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/DuplicateHasteCandidatesError.js create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/HasteConflictsError.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/HasteConflictsError.js create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/computeConflicts.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/computeConflicts.js create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/getPlatformExtension.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/getPlatformExtension.js create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/worker.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/haste/worker.js create mode 100644 packages/@expo/metro-file-map/build/plugins/mocks/getMockName.d.ts create mode 100644 packages/@expo/metro-file-map/build/plugins/mocks/getMockName.js create mode 100644 packages/@expo/metro-file-map/build/ts-declarations/fb-watchman.d.ts create mode 100644 packages/@expo/metro-file-map/build/ts-declarations/fb-watchman.js create mode 100644 packages/@expo/metro-file-map/build/types.d.ts create mode 100644 packages/@expo/metro-file-map/build/types.js create mode 100644 packages/@expo/metro-file-map/build/watchers/AbstractWatcher.d.ts create mode 100644 packages/@expo/metro-file-map/build/watchers/AbstractWatcher.js create mode 100644 packages/@expo/metro-file-map/build/watchers/FallbackWatcher.d.ts create mode 100644 packages/@expo/metro-file-map/build/watchers/FallbackWatcher.js create mode 100644 packages/@expo/metro-file-map/build/watchers/NativeWatcher.d.ts create mode 100644 packages/@expo/metro-file-map/build/watchers/NativeWatcher.js create mode 100644 packages/@expo/metro-file-map/build/watchers/RecrawlWarning.d.ts create mode 100644 packages/@expo/metro-file-map/build/watchers/RecrawlWarning.js create mode 100644 packages/@expo/metro-file-map/build/watchers/WatchmanWatcher.d.ts create mode 100644 packages/@expo/metro-file-map/build/watchers/WatchmanWatcher.js create mode 100644 packages/@expo/metro-file-map/build/watchers/common.d.ts create mode 100644 packages/@expo/metro-file-map/build/watchers/common.js create mode 100644 packages/@expo/metro-file-map/build/worker.d.ts create mode 100644 packages/@expo/metro-file-map/build/worker.js create mode 100644 packages/@expo/metro-file-map/build/workerExclusionList.d.ts create mode 100644 packages/@expo/metro-file-map/build/workerExclusionList.js create mode 100644 packages/@expo/metro-file-map/jest.config.js create mode 100644 packages/@expo/metro-file-map/jest.setup.ts create mode 100644 packages/@expo/metro-file-map/package.json create mode 100644 packages/@expo/metro-file-map/src/Watcher.ts create mode 100644 packages/@expo/metro-file-map/src/__mocks__/fs.ts create mode 100644 packages/@expo/metro-file-map/src/__mocks__/fs/promises.ts create mode 100644 packages/@expo/metro-file-map/src/__mocks__/graceful-fs.ts create mode 100644 packages/@expo/metro-file-map/src/cache/DiskCacheManager.ts create mode 100644 packages/@expo/metro-file-map/src/cache/__tests__/DiskCacheManager.test.ts create mode 100644 packages/@expo/metro-file-map/src/constants.ts create mode 100644 packages/@expo/metro-file-map/src/crawlers/node/__tests__/index.test.ts create mode 100644 packages/@expo/metro-file-map/src/crawlers/node/hasNativeFindSupport.ts create mode 100644 packages/@expo/metro-file-map/src/crawlers/node/index.ts create mode 100644 packages/@expo/metro-file-map/src/crawlers/watchman/index.ts create mode 100644 packages/@expo/metro-file-map/src/crawlers/watchman/planQuery.ts create mode 100644 packages/@expo/metro-file-map/src/index.ts create mode 100644 packages/@expo/metro-file-map/src/lib/FileProcessor.ts create mode 100644 packages/@expo/metro-file-map/src/lib/FileSystemChangeAggregator.ts create mode 100644 packages/@expo/metro-file-map/src/lib/RootPathUtils.ts create mode 100644 packages/@expo/metro-file-map/src/lib/TreeFS.ts create mode 100644 packages/@expo/metro-file-map/src/lib/__tests__/FileProcessor.test.ts create mode 100644 packages/@expo/metro-file-map/src/lib/__tests__/FileSystemChangeAggregator.test.ts create mode 100644 packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts create mode 100644 packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts create mode 100644 packages/@expo/metro-file-map/src/lib/__tests__/checkWatchmanCapabilities.test.ts create mode 100644 packages/@expo/metro-file-map/src/lib/__tests__/normalizePathSeparatorsToSystem.test.ts create mode 100644 packages/@expo/metro-file-map/src/lib/__tests__/rootRelativeCacheKeys.test.ts create mode 100644 packages/@expo/metro-file-map/src/lib/checkWatchmanCapabilities.ts create mode 100644 packages/@expo/metro-file-map/src/lib/normalizePathSeparatorsToPosix.ts create mode 100644 packages/@expo/metro-file-map/src/lib/normalizePathSeparatorsToSystem.ts create mode 100644 packages/@expo/metro-file-map/src/lib/rootRelativeCacheKeys.ts create mode 100644 packages/@expo/metro-file-map/src/lib/sorting.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/DependencyPlugin.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/FileDataPlugin.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/HastePlugin.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/MockPlugin.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/dependencies/__tests__/dependencyExtractor.test.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/dependencies/dependencyExtractor.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/dependencies/worker.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/haste/DuplicateHasteCandidatesError.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/haste/HasteConflictsError.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/haste/__tests__/getPlatformExtension.test.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/haste/computeConflicts.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/haste/getPlatformExtension.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/haste/worker.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/mocks/__tests__/getMockName.test.ts create mode 100644 packages/@expo/metro-file-map/src/plugins/mocks/getMockName.ts create mode 100644 packages/@expo/metro-file-map/src/ts-declarations/fb-watchman.ts create mode 100644 packages/@expo/metro-file-map/src/types.ts create mode 100644 packages/@expo/metro-file-map/src/watchers/AbstractWatcher.ts create mode 100644 packages/@expo/metro-file-map/src/watchers/FallbackWatcher.ts create mode 100644 packages/@expo/metro-file-map/src/watchers/NativeWatcher.ts create mode 100644 packages/@expo/metro-file-map/src/watchers/RecrawlWarning.ts create mode 100644 packages/@expo/metro-file-map/src/watchers/WatchmanWatcher.ts create mode 100644 packages/@expo/metro-file-map/src/watchers/common.ts create mode 100644 packages/@expo/metro-file-map/src/worker.ts create mode 100644 packages/@expo/metro-file-map/src/workerExclusionList.ts create mode 100644 packages/@expo/metro-file-map/tsconfig.json diff --git a/packages/@expo/cli/CHANGELOG.md b/packages/@expo/cli/CHANGELOG.md index 45a9a02b4081c9..caaa315981a9da 100644 --- a/packages/@expo/cli/CHANGELOG.md +++ b/packages/@expo/cli/CHANGELOG.md @@ -64,6 +64,7 @@ - Replace `TransformStream`-based HTML injection with `ServerDocumentContext` for SSR metadata and assets ([#44827](https://github.com/expo/expo/pull/44827) by [@hassankhan](https://github.com/hassankhan)) - Implement freestanding, faster TypeScript resolver ([#45227](https://github.com/expo/expo/pull/45227) by [@kitten](https://github.com/kitten)) - Provide Babel config path hint to Expo Metro transformer ([#45260](https://github.com/expo/expo/pull/45260) by [@kitten](https://github.com/kitten)) +- Add `@expo/metro-file-map` fork ([#45373](https://github.com/expo/expo/pull/45373) by [@kitten](https://github.com/kitten)) ## 55.0.12 — 2026-02-25 diff --git a/packages/@expo/cli/package.json b/packages/@expo/cli/package.json index 881d613d688699..599ef6ad3150e2 100644 --- a/packages/@expo/cli/package.json +++ b/packages/@expo/cli/package.json @@ -60,6 +60,7 @@ "@expo/log-box": "workspace:55.0.7", "@expo/metro": "56.0.0-rc.2", "@expo/metro-config": "workspace:~55.0.9", + "@expo/metro-file-map": "workspace:55.0.0-0", "@expo/osascript": "workspace:^2.4.2", "@expo/package-manager": "workspace:^1.10.3", "@expo/plist": "workspace:^0.5.2", diff --git a/packages/@expo/cli/src/export/embed/exportEmbedAsync.ts b/packages/@expo/cli/src/export/embed/exportEmbedAsync.ts index 90f8ecab8cbe5c..eee780f53612bc 100644 --- a/packages/@expo/cli/src/export/embed/exportEmbedAsync.ts +++ b/packages/@expo/cli/src/export/embed/exportEmbedAsync.ts @@ -22,6 +22,7 @@ import { isExecutingFromXcodebuild, logMetroErrorInXcode } from './xcodeCompiler import { Log } from '../../log'; import { DevServerManager } from '../../start/server/DevServerManager'; import { MetroBundlerDevServer } from '../../start/server/metro/MetroBundlerDevServer'; +import { replaceMetroFileMap } from '../../start/server/metro/createFileMap-fork'; import { loadMetroConfigAsync } from '../../start/server/metro/instantiateMetro'; import { DOM_COMPONENTS_BUNDLE_DIR } from '../../start/server/middleware/DomComponentsMiddleware'; import { getMetroDirectBundleOptionsForExpoConfig } from '../../start/server/middleware/metroOptions'; @@ -335,7 +336,7 @@ export async function createMetroServerAndBundleRequestAsync( exp, isExporting: true, getMetroBundler() { - return server.getBundler().getBundler(); + return metro.getBundler().getBundler(); }, } ); @@ -376,11 +377,13 @@ export async function createMetroServerAndBundleRequestAsync( (isHermes ? 'hermes-stable' : 'default')) as BundleOptions['unstable_transformProfile'], }; - const server = new Server(config, { - watch: false, - }); + const { metro } = await replaceMetroFileMap(() => ({ + metro: new Server(config, { + watch: false, + }), + })); - return { server, bundleRequest }; + return { server: metro, bundleRequest }; } export async function exportEmbedAssetsAsync( diff --git a/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts b/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts new file mode 100644 index 00000000000000..0ee10cd4b2a720 --- /dev/null +++ b/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts @@ -0,0 +1,146 @@ +// Copyright © 2024 650 Industries. +// Copyright (c) Meta Platforms, Inc. and affiliates. +// +// Forks https://github.com/facebook/metro/blob/01b4ad6/packages/metro/src/node-haste/DependencyGraph/createFileMap.js +// and redirects to `@expo/metro-file-map` + +import type MetroServer from '@expo/metro/metro/Server'; +import type { ConfigT } from '@expo/metro/metro-config'; +import FileMap, { DependencyPlugin, DiskCacheManager, HastePlugin } from '@expo/metro-file-map'; +import ciInfo from 'ci-info'; + +function getIgnorePattern(config: ConfigT): RegExp { + const { blockList, blacklistRE } = config.resolver; + const ignorePattern = blacklistRE || blockList; + if (!ignorePattern) { + return / ^/; + } + const combine = (regexes: RegExp[]) => + new RegExp( + regexes + .map((regex, index) => { + if (regex.flags !== regexes[0]!.flags) { + throw new Error( + 'Cannot combine blockList patterns, because they have different flags:\n' + + ' - Pattern 0: ' + + regexes[0]!.toString() + + '\n' + + ` - Pattern ${index}: ` + + regexes[index]!.toString() + ); + } + return '(' + regex.source + ')'; + }) + .join('|'), + regexes[0]?.flags ?? '' + ); + if (Array.isArray(ignorePattern)) { + return combine(ignorePattern); + } + return ignorePattern; +} + +interface CreateFileMapOptions { + extractDependencies?: boolean; + throwOnModuleCollision?: boolean; + watch?: boolean; + cacheFilePrefix?: string; +} + +/** + * Creates a `FileMap` using `@expo/metro-file-map`, matching the same config + * interpretation as Metro's original `createFileMap`. + */ +export default function createFileMap(config: ConfigT, options?: CreateFileMapOptions) { + const watch = options?.watch == null ? !ciInfo.isCI : options.watch; + + const { enabled: autoSaveEnabled, ...autoSaveOpts } = config.watcher.unstable_autoSaveCache ?? {}; + const autoSave = watch && autoSaveEnabled ? autoSaveOpts : false; + + const plugins = [...(config.unstable_fileMapPlugins ?? [])]; + let dependencyPlugin: DependencyPlugin | null = null; + + if (config.resolver.dependencyExtractor != null && options?.extractDependencies !== false) { + dependencyPlugin = new DependencyPlugin({ + dependencyExtractor: config.resolver.dependencyExtractor, + computeDependencies: true, + }); + plugins.push(dependencyPlugin); + } + + const hasteMap = new HastePlugin({ + platforms: new Set([...config.resolver.platforms, FileMap.H.NATIVE_PLATFORM]), + hasteImplModulePath: config.resolver.hasteImplModulePath ?? null, + enableHastePackages: config.resolver.enableGlobalPackages, + rootDir: config.projectRoot, + failValidationOnConflicts: options?.throwOnModuleCollision ?? true, + }); + plugins.push(hasteMap); + + const fileMap = new FileMap({ + // NOTE(@kitten): Dropped `config.unstable_fileMapCacheManagerFactory` + cacheManagerFactory: (factoryParams: any) => { + return new DiskCacheManager(factoryParams, { + cacheDirectory: config.fileMapCacheDirectory ?? config.hasteMapCacheDirectory, + cacheFilePrefix: options?.cacheFilePrefix, + autoSave, + }); + }, + perfLoggerFactory: config.unstable_perfLoggerFactory, + computeSha1: !config.watcher.unstable_lazySha1, + enableSymlinks: true, + extensions: Array.from( + new Set([ + ...config.resolver.sourceExts, + ...config.resolver.assetExts, + ...config.watcher.additionalExts, + ]) + ), + forceNodeFilesystemAPI: !config.resolver.useWatchman, + healthCheck: config.watcher.healthCheck, + ignorePattern: getIgnorePattern(config), + maxWorkers: config.maxWorkers, + plugins, + retainAllFiles: true, + resetCache: config.resetCache, + rootDir: config.projectRoot, + roots: config.watchFolders, + useWatchman: config.resolver.useWatchman, + watch, + watchmanDeferStates: config.watcher.watchman.deferStates, + }); + + return { + fileMap, + hasteMap, + dependencyPlugin, + }; +} + +function assertMetroFileMapPatched(metro: { getBundler(): any }): void { + const depGraph = metro.getBundler().getBundler()?._depGraph; + const fileMap = depGraph?._haste; + if (!fileMap || !fileMap.__expo) { + throw new Error( + '@expo/metro-file-map was not used by Metro. ' + + "The DependencyGraph's file map does not have the __expo flag, " + + 'which means the createFileMap module export was not replaced before ' + + 'Metro instantiated. Ensure replaceMetroFileMap() is called before runServer().' + ); + } +} + +export async function replaceMetroFileMap( + immediate: () => T | PromiseLike +): Promise { + const createFileMapModule = require('@expo/metro/metro/node-haste/DependencyGraph/createFileMap'); + Object.defineProperty(createFileMapModule, 'default', { + enumerable: true, + configurable: false, + writable: false, + value: createFileMap, + }); + const result = await immediate(); + assertMetroFileMapPatched(result.metro); + return result; +} diff --git a/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts b/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts index 6b57c92152208e..a56713c9f13ea3 100644 --- a/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts +++ b/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts @@ -21,6 +21,7 @@ import path from 'path'; import { createDevToolsPluginWebsocketEndpoint } from './DevToolsPluginWebsocketEndpoint'; import type { MetroBundlerDevServer } from './MetroBundlerDevServer'; import { MetroTerminalReporter } from './MetroTerminalReporter'; +import { replaceMetroFileMap } from './createFileMap-fork'; import { attachAtlasAsync } from './debugging/attachAtlas'; import { createDebugMiddleware } from './debugging/createDebugMiddleware'; import { createMetroMiddleware } from './dev-server/createMetroMiddleware'; @@ -381,19 +382,23 @@ export async function instantiateMetroAsync( } : undefined; - const { address, server, hmrServer, metro } = await runServer( - metroBundler, - metroConfig, - { - host: options.host, - websocketEndpoints, - watch: !isExporting && isWatchEnabled(), - secureServerOptions, - }, - { - mockServer: isExporting, - } - ); + const watch = !isExporting && isWatchEnabled(); + + const { address, server, hmrServer, metro } = await replaceMetroFileMap(() => { + return runServer( + metroBundler, + metroConfig, + { + host: options.host, + websocketEndpoints, + watch, + secureServerOptions, + }, + { + mockServer: isExporting, + } + ); + }); event('instantiate', { atlas: env.EXPO_ATLAS, diff --git a/packages/@expo/metro-file-map/.eslintrc.js b/packages/@expo/metro-file-map/.eslintrc.js new file mode 100644 index 00000000000000..2720197860feb8 --- /dev/null +++ b/packages/@expo/metro-file-map/.eslintrc.js @@ -0,0 +1,2 @@ +// @generated by expo-module-scripts +module.exports = require('expo-module-scripts/eslintrc.base.js'); diff --git a/packages/@expo/metro-file-map/CHANGELOG.md b/packages/@expo/metro-file-map/CHANGELOG.md new file mode 100644 index 00000000000000..a2ba70da60600e --- /dev/null +++ b/packages/@expo/metro-file-map/CHANGELOG.md @@ -0,0 +1,11 @@ +# Changelog + +## Unpublished + +### 🛠 Breaking changes + +### 🎉 New features + +### 🐛 Bug fixes + +### 💡 Others diff --git a/packages/@expo/metro-file-map/README.md b/packages/@expo/metro-file-map/README.md new file mode 100644 index 00000000000000..f3028721d52658 --- /dev/null +++ b/packages/@expo/metro-file-map/README.md @@ -0,0 +1 @@ +# `@expo/metro-file-map` diff --git a/packages/@expo/metro-file-map/babel.config.js b/packages/@expo/metro-file-map/babel.config.js new file mode 100644 index 00000000000000..e30780ee3ce12d --- /dev/null +++ b/packages/@expo/metro-file-map/babel.config.js @@ -0,0 +1,2 @@ +// @generated by expo-module-scripts +module.exports = require('expo-module-scripts/babel.config.cli'); diff --git a/packages/@expo/metro-file-map/build/Watcher.d.ts b/packages/@expo/metro-file-map/build/Watcher.d.ts new file mode 100644 index 00000000000000..78a366ae0a2f4e --- /dev/null +++ b/packages/@expo/metro-file-map/build/Watcher.d.ts @@ -0,0 +1,52 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import EventEmitter from 'events'; +import type { Console, CrawlerOptions, CrawlResult, PerfLogger, WatcherBackendChangeEvent } from './types'; +interface WatcherOptions { + abortSignal: AbortSignal; + computeSha1: boolean; + console: Console; + enableSymlinks: boolean; + extensions: readonly string[]; + forceNodeFilesystemAPI: boolean; + healthCheckFilePrefix: string; + ignoreForCrawl: (filePath: string) => boolean; + ignorePatternForWatch: RegExp; + previousState: CrawlerOptions['previousState']; + perfLogger: PerfLogger | undefined | null; + roots: readonly string[]; + rootDir: string; + useWatchman: boolean; + watch: boolean; + watchmanDeferStates: readonly string[]; +} +export type HealthCheckResult = { + type: 'error'; + timeout: number; + error: Error; + watcher: string | undefined | null; +} | { + type: 'success'; + timeout: number; + timeElapsed: number; + watcher: string | undefined | null; +} | { + type: 'timeout'; + timeout: number; + watcher: string | undefined | null; + pauseReason: string | undefined | null; +}; +export declare class Watcher extends EventEmitter { + #private; + constructor(options: WatcherOptions); + crawl(): Promise; + recrawl(subpath: string, currentFileSystem: CrawlerOptions['previousState']['fileSystem']): Promise; + watch(onChange: (change: WatcherBackendChangeEvent) => void): Promise; + close(): Promise; + checkHealth(timeout: number): Promise; +} +export {}; diff --git a/packages/@expo/metro-file-map/build/Watcher.js b/packages/@expo/metro-file-map/build/Watcher.js new file mode 100644 index 00000000000000..894f6708ca53a2 --- /dev/null +++ b/packages/@expo/metro-file-map/build/Watcher.js @@ -0,0 +1,249 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Watcher = void 0; +const events_1 = __importDefault(require("events")); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const perf_hooks_1 = require("perf_hooks"); +const node_1 = __importDefault(require("./crawlers/node")); +const watchman_1 = __importDefault(require("./crawlers/watchman")); +const FallbackWatcher_1 = __importDefault(require("./watchers/FallbackWatcher")); +const NativeWatcher_1 = __importDefault(require("./watchers/NativeWatcher")); +const WatchmanWatcher_1 = __importDefault(require("./watchers/WatchmanWatcher")); +const common_1 = require("./watchers/common"); +const debug = require('debug')('Metro:Watcher'); +const MAX_WAIT_TIME = 240000; +let nextInstanceId = 0; +class Watcher extends events_1.default { + #activeWatcher; + #backends = []; + #instanceId; + #nextHealthCheckId = 0; + #options; + #pendingHealthChecks = new Map(); + constructor(options) { + super(); + this.#options = options; + this.#instanceId = nextInstanceId++; + } + async crawl() { + this.#options.perfLogger?.point('crawl_start'); + const options = this.#options; + const result = await this.#crawl({ + previousState: options.previousState, + roots: options.roots, + useWatchman: options.useWatchman, + }); + this.#options.perfLogger?.point('crawl_end'); + return result; + } + async recrawl(subpath, currentFileSystem) { + return this.#crawl({ + previousState: { + clocks: new Map(), + fileSystem: currentFileSystem, + }, + roots: [path_1.default.join(this.#options.rootDir, subpath)], + subpath, + useWatchman: false, + }); + } + async #crawl(crawlOptions) { + const options = this.#options; + const { useWatchman, subpath } = crawlOptions; + const ignoreForCrawl = (filePath) => options.ignoreForCrawl(filePath) || + path_1.default.basename(filePath).startsWith(this.#options.healthCheckFilePrefix); + const crawl = useWatchman ? watchman_1.default : node_1.default; + let crawler = crawl === watchman_1.default ? 'watchman' : 'node'; + options.abortSignal.throwIfAborted(); + const crawlerOptions = { + abortSignal: options.abortSignal, + computeSha1: options.computeSha1, + console: options.console, + includeSymlinks: options.enableSymlinks, + extensions: options.extensions, + forceNodeFilesystemAPI: options.forceNodeFilesystemAPI, + ignore: ignoreForCrawl, + onStatus: (status) => { + this.emit('status', status); + }, + perfLogger: options.perfLogger, + previousState: crawlOptions.previousState, + rootDir: options.rootDir, + roots: crawlOptions.roots, + subpath, + }; + debug('Crawling roots: %s with %s crawler.', crawlOptions.roots, crawler); + let delta; + try { + delta = await crawl(crawlerOptions); + } + catch (firstError) { + if (crawl !== watchman_1.default) { + throw firstError; + } + crawler = 'node'; + options.console.warn('metro-file-map: Watchman crawl failed. Retrying once with node ' + + 'crawler.\n' + + " Usually this happens when watchman isn't running. Create an " + + "empty `.watchmanconfig` file in your project's root folder or " + + 'initialize a git or hg repository in your project.\n' + + ' ' + + firstError.toString()); + try { + delta = await (0, node_1.default)(crawlerOptions); + } + catch (retryError) { + throw new Error('Crawler retry failed:\n' + + ` Original error: ${firstError.message}\n` + + ` Retry error: ${retryError.message}\n`); + } + } + debug('Crawler "%s" returned %d added/modified, %d removed, %d clock(s).', crawler, delta.changedFiles.size, delta.removedFiles.size, 'clocks' in delta ? (delta.clocks?.size ?? 0) : 0); + return delta; + } + async watch(onChange) { + const { extensions, ignorePatternForWatch, useWatchman } = this.#options; + // WatchmanWatcher > NativeWatcher > FallbackWatcher + const WatcherImpl = (useWatchman + ? WatchmanWatcher_1.default + : NativeWatcher_1.default.isSupported() + ? NativeWatcher_1.default + : FallbackWatcher_1.default); + let watcher = 'fallback'; + if (WatcherImpl === WatchmanWatcher_1.default) { + watcher = 'watchman'; + } + else if (WatcherImpl === NativeWatcher_1.default) { + watcher = 'native'; + } + debug(`Using watcher: ${watcher}`); + this.#options.perfLogger?.annotate({ string: { watcher } }); + this.#activeWatcher = watcher; + const createWatcherBackend = (root) => { + const watcherOptions = { + dot: true, + globs: [ + // Ensure we always include package.json files, which are crucial for + /// module resolution. + '**/package.json', + // Ensure we always watch any health check files + '**/' + this.#options.healthCheckFilePrefix + '*', + ...extensions.map((extension) => '**/*.' + extension), + ], + ignored: ignorePatternForWatch, + watchmanDeferStates: this.#options.watchmanDeferStates, + }; + const watcher = new WatcherImpl(root, watcherOptions); + return new Promise(async (resolve, reject) => { + const rejectTimeout = setTimeout(() => reject(new Error('Failed to start watch mode.')), MAX_WAIT_TIME); + watcher.onFileEvent((change) => { + const basename = path_1.default.basename(change.relativePath); + if (basename.startsWith(this.#options.healthCheckFilePrefix)) { + if (change.event === common_1.TOUCH_EVENT) { + debug('Observed possible health check cookie: %s in %s', change.relativePath, root); + this.#handleHealthCheckObservation(basename); + } + return; + } + // Watchman handles recrawls internally - receiving a recrawl event + // when using Watchman would indicate a bug. Log an error and ignore. + if (change.event === 'recrawl' && useWatchman) { + this.#options.console.error('metro-file-map: Received unexpected recrawl event while using ' + + 'Watchman. Watchman recrawls are not implemented.'); + return; + } + onChange(change); + }); + await watcher.startWatching(); + clearTimeout(rejectTimeout); + resolve(watcher); + }); + }; + this.#backends = await Promise.all(this.#options.roots.map(createWatcherBackend)); + } + #handleHealthCheckObservation(basename) { + const resolveHealthCheck = this.#pendingHealthChecks.get(basename); + if (!resolveHealthCheck) { + return; + } + resolveHealthCheck(); + } + async close() { + await Promise.all(this.#backends.map((watcher) => watcher.stopWatching())); + this.#activeWatcher = null; + } + async checkHealth(timeout) { + const healthCheckId = this.#nextHealthCheckId++; + if (healthCheckId === Number.MAX_SAFE_INTEGER) { + this.#nextHealthCheckId = 0; + } + const watcher = this.#activeWatcher; + const basename = this.#options.healthCheckFilePrefix + + '-' + + process.pid + + '-' + + this.#instanceId + + '-' + + healthCheckId; + const healthCheckPath = path_1.default.join(this.#options.rootDir, basename); + let result; + const timeoutPromise = new Promise((resolve) => setTimeout(resolve, timeout)).then(() => { + if (!result) { + result = { + type: 'timeout', + pauseReason: this.#backends[0]?.getPauseReason(), + timeout, + watcher, + }; + } + }); + const startTime = perf_hooks_1.performance.now(); + debug('Creating health check cookie: %s', healthCheckPath); + const creationPromise = fs_1.default.promises + .writeFile(healthCheckPath, String(startTime)) + .catch((error) => { + if (!result) { + result = { + type: 'error', + error, + timeout, + watcher, + }; + } + }); + const observationPromise = new Promise((resolve) => { + this.#pendingHealthChecks.set(basename, resolve); + }).then(() => { + if (!result) { + result = { + type: 'success', + timeElapsed: perf_hooks_1.performance.now() - startTime, + timeout, + watcher, + }; + } + }); + await Promise.race([timeoutPromise, creationPromise.then(() => observationPromise)]); + this.#pendingHealthChecks.delete(basename); + // Chain a deletion to the creation promise (which may not have even settled yet!), + // don't await it, and swallow errors. This is just best-effort cleanup. + // eslint-disable-next-line @typescript-eslint/no-floating-promises + creationPromise.then(() => fs_1.default.promises.unlink(healthCheckPath).catch(() => { })); + debug('Health check result: %o', result); + if (result == null) { + throw new Error('health check result was not set by any promise branch'); + } + return result; + } +} +exports.Watcher = Watcher; diff --git a/packages/@expo/metro-file-map/build/cache/DiskCacheManager.d.ts b/packages/@expo/metro-file-map/build/cache/DiskCacheManager.d.ts new file mode 100644 index 00000000000000..138475f14646c5 --- /dev/null +++ b/packages/@expo/metro-file-map/build/cache/DiskCacheManager.d.ts @@ -0,0 +1,25 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { BuildParameters, CacheData, CacheManager, CacheManagerFactoryOptions, CacheManagerWriteOptions } from '../types'; +interface AutoSaveOptions { + readonly debounceMs: number; +} +interface DiskCacheConfig { + readonly autoSave?: Partial | boolean | undefined; + readonly cacheFilePrefix?: string | undefined | null; + readonly cacheDirectory?: string | undefined | null; +} +export declare class DiskCacheManager implements CacheManager { + #private; + constructor({ buildParameters }: CacheManagerFactoryOptions, { autoSave, cacheDirectory, cacheFilePrefix }: DiskCacheConfig); + static getCacheFilePath(buildParameters: BuildParameters, cacheFilePrefix?: string | null, cacheDirectory?: string | null): string; + getCacheFilePath(): string; + read(): Promise; + write(getSnapshot: () => CacheData, { changedSinceCacheRead, eventSource, onWriteError }: CacheManagerWriteOptions): Promise; + end(): Promise; +} +export {}; diff --git a/packages/@expo/metro-file-map/build/cache/DiskCacheManager.js b/packages/@expo/metro-file-map/build/cache/DiskCacheManager.js new file mode 100644 index 00000000000000..f3b0cc58a18fe0 --- /dev/null +++ b/packages/@expo/metro-file-map/build/cache/DiskCacheManager.js @@ -0,0 +1,110 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiskCacheManager = void 0; +const fs_1 = require("fs"); +const os_1 = require("os"); +const path_1 = __importDefault(require("path")); +const timers_1 = require("timers"); +const v8_1 = require("v8"); +const rootRelativeCacheKeys_1 = __importDefault(require("../lib/rootRelativeCacheKeys")); +const debug = require('debug')('Metro:FileMapCache'); +const DEFAULT_PREFIX = 'metro-file-map'; +const DEFAULT_DIRECTORY = (0, os_1.tmpdir)(); +const DEFAULT_AUTO_SAVE_DEBOUNCE_MS = 5000; +// NOTE(@kitten): We're incompatible with Metro, so need our own naming +const FIXED_PREFIX = 'expo'; +class DiskCacheManager { + #autoSaveOpts; + #cachePath; + #debounceTimeout = null; + #writePromise = Promise.resolve(); + #hasUnwrittenChanges = false; + #tryWrite; + #stopListening; + constructor({ buildParameters }, { autoSave = {}, cacheDirectory, cacheFilePrefix }) { + this.#cachePath = DiskCacheManager.getCacheFilePath(buildParameters, cacheFilePrefix, cacheDirectory); + // Normalise auto-save options. + if (autoSave) { + const { debounceMs = DEFAULT_AUTO_SAVE_DEBOUNCE_MS } = autoSave === true ? {} : autoSave; + this.#autoSaveOpts = { debounceMs }; + } + } + static getCacheFilePath(buildParameters, cacheFilePrefix, cacheDirectory) { + const { rootDirHash, relativeConfigHash } = (0, rootRelativeCacheKeys_1.default)(buildParameters); + return path_1.default.join(cacheDirectory ?? DEFAULT_DIRECTORY, `${cacheFilePrefix ?? DEFAULT_PREFIX}-${FIXED_PREFIX}-${rootDirHash}-${relativeConfigHash}`); + } + getCacheFilePath() { + return this.#cachePath; + } + async read() { + try { + return (0, v8_1.deserialize)(await fs_1.promises.readFile(this.#cachePath)); + } + catch (e) { + if (e?.code === 'ENOENT') { + // Cache file not found - not considered an error. + return null; + } + // Rethrow anything else. + throw e; + } + } + async write(getSnapshot, { changedSinceCacheRead, eventSource, onWriteError }) { + // Initialise a writer function using a promise queue to ensure writes are + // sequenced. + // eslint-disable-next-line no-multi-assign + const tryWrite = (this.#tryWrite = () => { + this.#writePromise = this.#writePromise + .then(async () => { + if (!this.#hasUnwrittenChanges) { + return; + } + const data = getSnapshot(); + this.#hasUnwrittenChanges = false; + await fs_1.promises.writeFile(this.#cachePath, (0, v8_1.serialize)(data)); + debug('Written cache to %s', this.#cachePath); + }) + .catch(onWriteError); + return this.#writePromise; + }); + // Set up auto-save on changes, if enabled. + if (this.#autoSaveOpts) { + const autoSave = this.#autoSaveOpts; + this.#stopListening?.(); + this.#stopListening = eventSource.onChange(() => { + this.#hasUnwrittenChanges = true; + if (this.#debounceTimeout) { + this.#debounceTimeout.refresh(); + } + else { + this.#debounceTimeout = (0, timers_1.setTimeout)(() => tryWrite(), autoSave.debounceMs).unref(); + } + }); + } + // Write immediately if state has changed since the cache was read. + if (changedSinceCacheRead) { + this.#hasUnwrittenChanges = true; + await tryWrite(); + } + } + async end() { + // Clear any timers + if (this.#debounceTimeout) { + (0, timers_1.clearTimeout)(this.#debounceTimeout); + } + // Remove event listeners + this.#stopListening?.(); + // Flush unwritten changes to disk (no-op if no changes) + await this.#tryWrite?.(); + } +} +exports.DiskCacheManager = DiskCacheManager; diff --git a/packages/@expo/metro-file-map/build/constants.d.ts b/packages/@expo/metro-file-map/build/constants.d.ts new file mode 100644 index 00000000000000..9b7796fce6a6b5 --- /dev/null +++ b/packages/@expo/metro-file-map/build/constants.d.ts @@ -0,0 +1,23 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +export interface HType { + readonly MTIME: 0; + readonly SIZE: 1; + readonly VISITED: 2; + readonly SHA1: 3; + readonly SYMLINK: 4; + readonly PLUGINDATA: 5; + readonly PATH: 0; + readonly TYPE: 1; + readonly MODULE: 0; + readonly PACKAGE: 1; + readonly GENERIC_PLATFORM: 'g'; + readonly NATIVE_PLATFORM: 'native'; +} +export type HTypeValue = 0 | 1 | 2 | 3 | 4 | 5 | 'g' | 'native'; +declare const H: HType; +export default H; diff --git a/packages/@expo/metro-file-map/build/constants.js b/packages/@expo/metro-file-map/build/constants.js new file mode 100644 index 00000000000000..149101282460d9 --- /dev/null +++ b/packages/@expo/metro-file-map/build/constants.js @@ -0,0 +1,27 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +const H = { + /* file map attributes */ + MTIME: 0, + SIZE: 1, + VISITED: 2, + SHA1: 3, + SYMLINK: 4, + PLUGINDATA: 5, + /* module map attributes */ + PATH: 0, + TYPE: 1, + /* module types */ + MODULE: 0, + PACKAGE: 1, + /* platforms */ + GENERIC_PLATFORM: 'g', + NATIVE_PLATFORM: 'native', +}; +exports.default = H; diff --git a/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.d.ts b/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.d.ts new file mode 100644 index 00000000000000..88de38315757e8 --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.d.ts @@ -0,0 +1,7 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +export default function hasNativeFindSupport(): Promise; diff --git a/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.js b/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.js new file mode 100644 index 00000000000000..2466394e6a7538 --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.js @@ -0,0 +1,28 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = hasNativeFindSupport; +const child_process_1 = require("child_process"); +async function hasNativeFindSupport() { + try { + return await new Promise((resolve) => { + // Check the find binary supports the non-POSIX -iname parameter wrapped in parens. + const args = ['.', '-type', 'f', '(', '-iname', '*.ts', '-o', '-iname', '*.js', ')']; + const child = (0, child_process_1.spawn)('find', args, { cwd: __dirname }); + child.on('error', () => { + resolve(false); + }); + child.on('exit', (code) => { + resolve(code === 0); + }); + }); + } + catch { + return false; + } +} diff --git a/packages/@expo/metro-file-map/build/crawlers/node/index.d.ts b/packages/@expo/metro-file-map/build/crawlers/node/index.d.ts new file mode 100644 index 00000000000000..c82d6a1a1e133f --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/node/index.d.ts @@ -0,0 +1,10 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ +import type { CrawlerOptions, CrawlResult } from '../../types'; +export default function nodeCrawl(options: CrawlerOptions): Promise; diff --git a/packages/@expo/metro-file-map/build/crawlers/node/index.js b/packages/@expo/metro-file-map/build/crawlers/node/index.js new file mode 100644 index 00000000000000..1ab4585093ce96 --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/node/index.js @@ -0,0 +1,190 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = nodeCrawl; +const child_process_1 = require("child_process"); +const fs = __importStar(require("graceful-fs")); +const os_1 = require("os"); +const path = __importStar(require("path")); +const hasNativeFindSupport_1 = __importDefault(require("./hasNativeFindSupport")); +const RootPathUtils_1 = require("../../lib/RootPathUtils"); +const debug = require('debug')('Metro:NodeCrawler'); +function find(roots, extensions, ignore, includeSymlinks, rootDir, console, callback) { + const result = new Map(); + let activeCalls = 0; + const pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); + function search(directory) { + activeCalls++; + fs.readdir(directory, { withFileTypes: true }, (err, entries) => { + activeCalls--; + if (err) { + console.warn(`Error "${err.code ?? err.message}" reading contents of "${directory}", skipping. Add this directory to your ignore list to exclude it.`); + } + else { + entries.forEach((entry) => { + const file = path.join(directory, entry.name.toString()); + if (ignore(file)) { + return; + } + if (entry.isSymbolicLink() && !includeSymlinks) { + return; + } + if (entry.isDirectory()) { + search(file); + return; + } + activeCalls++; + fs.lstat(file, (err, stat) => { + activeCalls--; + if (!err && stat) { + const ext = path.extname(file).substr(1); + if (stat.isSymbolicLink() || extensions.includes(ext)) { + result.set(pathUtils.absoluteToNormal(file), [ + stat.mtime.getTime(), + stat.size, + 0, + null, + stat.isSymbolicLink() ? 1 : 0, + null, + ]); + } + } + if (activeCalls === 0) { + callback(result); + } + }); + }); + } + if (activeCalls === 0) { + callback(result); + } + }); + } + if (roots.length > 0) { + roots.forEach(search); + } + else { + callback(result); + } +} +function findNative(roots, extensions, ignore, includeSymlinks, rootDir, console, callback) { + // Examples: + // ( ( -type f ( -iname *.js ) ) ) + // ( ( -type f ( -iname *.js -o -iname *.ts ) ) ) + // ( ( -type f ( -iname *.js ) ) -o -type l ) + // ( ( -type f ) -o -type l ) + const extensionClause = extensions.length + ? `( ${extensions.map((ext) => `-iname *.${ext}`).join(' -o ')} )` + : ''; // Empty inner expressions eg "( )" are not allowed + const expression = `( ( -type f ${extensionClause} ) ${includeSymlinks ? '-o -type l ' : ''})`; + const pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); + const child = (0, child_process_1.spawn)('find', [...roots, ...expression.split(' ')]); + let stdout = ''; + if (child.stdout == null) { + throw new Error('stdout is null - this should never happen. Please open up an issue at https://github.com/facebook/metro'); + } + child.stdout.setEncoding('utf-8'); + child.stdout.on('data', (data) => (stdout += data)); + child.stdout.on('close', () => { + const lines = stdout + .trim() + .split('\n') + .filter((x) => !ignore(x)); + const result = new Map(); + let count = lines.length; + if (!count) { + callback(new Map()); + } + else { + lines.forEach((filePath) => { + fs.lstat(filePath, (err, stat) => { + if (!err && stat) { + result.set(pathUtils.absoluteToNormal(filePath), [ + stat.mtime.getTime(), + stat.size, + 0, + null, + stat.isSymbolicLink() ? 1 : 0, + null, + ]); + } + if (--count === 0) { + callback(result); + } + }); + }); + } + }); +} +async function nodeCrawl(options) { + const { console, previousState, extensions, forceNodeFilesystemAPI, ignore, rootDir, includeSymlinks, perfLogger, roots, abortSignal, subpath, } = options; + abortSignal?.throwIfAborted(); + perfLogger?.point('nodeCrawl_start'); + const useNativeFind = !forceNodeFilesystemAPI && (0, os_1.platform)() !== 'win32' && (await (0, hasNativeFindSupport_1.default)()); + debug('Using system find: %s', useNativeFind); + return new Promise((resolve, reject) => { + const callback = (fileData) => { + const difference = previousState.fileSystem.getDifference(fileData, { + subpath, + }); + perfLogger?.point('nodeCrawl_end'); + try { + // TODO: Use AbortSignal.reason directly when Flow supports it + abortSignal?.throwIfAborted(); + } + catch (e) { + reject(e); + } + resolve(difference); + }; + if (useNativeFind) { + findNative(roots, extensions, ignore, includeSymlinks, rootDir, console, callback); + } + else { + find(roots, extensions, ignore, includeSymlinks, rootDir, console, callback); + } + }); +} diff --git a/packages/@expo/metro-file-map/build/crawlers/watchman/index.d.ts b/packages/@expo/metro-file-map/build/crawlers/watchman/index.d.ts new file mode 100644 index 00000000000000..879ee7c44b851d --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/watchman/index.d.ts @@ -0,0 +1,8 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { CrawlerOptions, CrawlResult } from '../../types'; +export default function watchmanCrawl({ abortSignal, computeSha1, extensions, ignore, includeSymlinks, onStatus, perfLogger, previousState, rootDir, roots, }: CrawlerOptions): Promise; diff --git a/packages/@expo/metro-file-map/build/crawlers/watchman/index.js b/packages/@expo/metro-file-map/build/crawlers/watchman/index.js new file mode 100644 index 00000000000000..dac9241b02f15d --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/watchman/index.js @@ -0,0 +1,285 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = watchmanCrawl; +const fb_watchman_1 = __importDefault(require("fb-watchman")); +const invariant_1 = __importDefault(require("invariant")); +const path = __importStar(require("path")); +const perf_hooks_1 = require("perf_hooks"); +const planQuery_1 = require("./planQuery"); +const RootPathUtils_1 = require("../../lib/RootPathUtils"); +const normalizePathSeparatorsToPosix_1 = __importDefault(require("../../lib/normalizePathSeparatorsToPosix")); +const normalizePathSeparatorsToSystem_1 = __importDefault(require("../../lib/normalizePathSeparatorsToSystem")); +const WATCHMAN_WARNING_INITIAL_DELAY_MILLISECONDS = 10000; +const WATCHMAN_WARNING_INTERVAL_MILLISECONDS = 20000; +const watchmanURL = 'https://facebook.github.io/watchman/docs/troubleshooting'; +function makeWatchmanError(error) { + error.message = + `Watchman error: ${error.message.trim()}. Make sure watchman ` + + `is running for this project. See ${watchmanURL}.`; + return error; +} +async function watchmanCrawl({ abortSignal, computeSha1, extensions, ignore, includeSymlinks, onStatus, perfLogger, previousState, rootDir, roots, }) { + abortSignal?.throwIfAborted(); + const client = new fb_watchman_1.default.Client(); + const pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); + abortSignal?.addEventListener('abort', () => client.end()); + perfLogger?.point('watchmanCrawl_start'); + const newClocks = new Map(); + let clientError; + client.on('error', (error) => { + clientError = makeWatchmanError(error); + }); + // TODO: Fix to use fb-watchman types + const cmd = async (command, ...args) => { + let didLogWatchmanWaitMessage = false; + const startTime = perf_hooks_1.performance.now(); + const logWatchmanWaitMessage = () => { + didLogWatchmanWaitMessage = true; + onStatus({ + type: 'watchman_slow_command', + timeElapsed: perf_hooks_1.performance.now() - startTime, + command, + }); + }; + let intervalOrTimeoutId = setTimeout(() => { + logWatchmanWaitMessage(); + intervalOrTimeoutId = setInterval(logWatchmanWaitMessage, WATCHMAN_WARNING_INTERVAL_MILLISECONDS); + }, WATCHMAN_WARNING_INITIAL_DELAY_MILLISECONDS); + try { + const response = await new Promise((resolve, reject) => { + // NOTE: dynamic call of command + return client.command([command, ...args], (error, result) => error ? reject(makeWatchmanError(error)) : resolve(result)); + }); + if ('warning' in response) { + onStatus({ + type: 'watchman_warning', + warning: response.warning, + command, + }); + } + return response; + } + finally { + // NOTE: clearInterval / clearTimeout are interchangeable + clearInterval(intervalOrTimeoutId); + if (didLogWatchmanWaitMessage) { + onStatus({ + type: 'watchman_slow_command_complete', + timeElapsed: perf_hooks_1.performance.now() - startTime, + command, + }); + } + } + }; + async function getWatchmanRoots(roots) { + perfLogger?.point('watchmanCrawl/getWatchmanRoots_start'); + const watchmanRoots = new Map(); + await Promise.all(roots.map(async (root, index) => { + perfLogger?.point(`watchmanCrawl/watchProject_${index}_start`); + const response = await cmd('watch-project', root); + perfLogger?.point(`watchmanCrawl/watchProject_${index}_end`); + const existing = watchmanRoots.get(response.watch); + // A root can only be filtered if it was never seen with a + // relative_path before. + const canBeFiltered = !existing || existing.directoryFilters.length > 0; + if (canBeFiltered) { + if (response.relative_path) { + watchmanRoots.set(response.watch, { + watcher: response.watcher, + directoryFilters: (existing?.directoryFilters || []).concat(response.relative_path), + }); + } + else { + // Make the filter directories an empty array to signal that this + // root was already seen and needs to be watched for all files or + // directories. + watchmanRoots.set(response.watch, { + watcher: response.watcher, + directoryFilters: [], + }); + } + } + })); + perfLogger?.point('watchmanCrawl/getWatchmanRoots_end'); + return watchmanRoots; + } + async function queryWatchmanForDirs(rootProjectDirMappings) { + perfLogger?.point('watchmanCrawl/queryWatchmanForDirs_start'); + const results = new Map(); + let isFresh = false; + await Promise.all(Array.from(rootProjectDirMappings).map(async ([posixSeparatedRoot, { directoryFilters, watcher }], index) => { + // Jest is only going to store one type of clock; a string that + // represents a local clock. However, the Watchman crawler supports + // a second type of clock that can be written by automation outside of + // Jest, called an "scm query", which fetches changed files based on + // source control mergebases. The reason this is necessary is because + // local clocks are not portable across systems, but scm queries are. + // By using scm queries, we can create the haste map on a different + // system and import it, transforming the clock into a local clock. + const since = previousState.clocks.get((0, normalizePathSeparatorsToPosix_1.default)(pathUtils.absoluteToNormal((0, normalizePathSeparatorsToSystem_1.default)(posixSeparatedRoot)))); + perfLogger?.annotate({ + bool: { + [`watchmanCrawl/query_${index}_has_clock`]: since != null, + }, + }); + const { query, queryGenerator } = (0, planQuery_1.planQuery)({ + since, + extensions, + directoryFilters, + includeSha1: computeSha1, + includeSymlinks, + }); + perfLogger?.annotate({ + string: { + [`watchmanCrawl/query_${index}_watcher`]: watcher ?? 'unknown', + [`watchmanCrawl/query_${index}_generator`]: queryGenerator, + }, + }); + perfLogger?.point(`watchmanCrawl/query_${index}_start`); + const response = await cmd('query', posixSeparatedRoot, query); + perfLogger?.point(`watchmanCrawl/query_${index}_end`); + // When a source-control query is used, we ignore the "is fresh" + // response from Watchman because it will be true despite the query + // being incremental. + const isSourceControlQuery = typeof since !== 'string' && since?.scm?.['mergebase-with'] != null; + if (!isSourceControlQuery) { + isFresh = isFresh || response.is_fresh_instance; + } + results.set(posixSeparatedRoot, response); + })); + perfLogger?.point('watchmanCrawl/queryWatchmanForDirs_end'); + return { + isFresh, + results, + }; + } + let removedFiles = new Set(); + let changedFiles = new Map(); + let results; + let isFresh = false; + let queryError; + try { + const watchmanRoots = await getWatchmanRoots(roots); + const watchmanFileResults = await queryWatchmanForDirs(watchmanRoots); + results = watchmanFileResults.results; + isFresh = watchmanFileResults.isFresh; + } + catch (e) { + queryError = e; + } + client.end(); + if (results == null) { + if (clientError) { + perfLogger?.annotate({ + string: { + 'watchmanCrawl/client_error': clientError.message ?? '[message missing]', + }, + }); + } + if (queryError) { + perfLogger?.annotate({ + string: { + 'watchmanCrawl/query_error': queryError.message ?? '[message missing]', + }, + }); + } + perfLogger?.point('watchmanCrawl_end'); + abortSignal?.throwIfAborted(); + throw queryError ?? clientError ?? new Error('Watchman file results missing'); + } + perfLogger?.point('watchmanCrawl/processResults_start'); + const freshFileData = new Map(); + for (const [watchRoot, response] of results) { + const fsRoot = (0, normalizePathSeparatorsToSystem_1.default)(watchRoot); + const relativeFsRoot = pathUtils.absoluteToNormal(fsRoot); + newClocks.set((0, normalizePathSeparatorsToPosix_1.default)(relativeFsRoot), + // Ensure we persist only the local clock. + typeof response.clock === 'string' ? response.clock : response.clock.clock); + for (const fileData of response.files) { + const filePath = fsRoot + path.sep + (0, normalizePathSeparatorsToSystem_1.default)(fileData.name); + const relativeFilePath = pathUtils.absoluteToNormal(filePath); + if (!fileData.exists) { + if (!isFresh) { + removedFiles.add(relativeFilePath); + } + // Whether watchman can return exists: false in a fresh instance + // response is unknown, but there's nothing we need to do in that case. + } + else if (!ignore(filePath)) { + const { mtime_ms, size } = fileData; + (0, invariant_1.default)(mtime_ms != null && size != null, 'missing file data in watchman response'); + const mtime = typeof mtime_ms === 'number' ? mtime_ms : mtime_ms.toNumber(); + let sha1hex = fileData['content.sha1hex']; + if (typeof sha1hex !== 'string' || sha1hex.length !== 40) { + sha1hex = undefined; + } + let symlinkInfo = 0; + if (fileData.type === 'l') { + symlinkInfo = fileData['symlink_target'] ?? 1; + } + const nextData = [mtime, size, 0, sha1hex ?? null, symlinkInfo, null]; + // If watchman is fresh, the removed files map starts with all files + // and we remove them as we verify they still exist. + if (isFresh) { + freshFileData.set(relativeFilePath, nextData); + } + else { + changedFiles.set(relativeFilePath, nextData); + } + } + } + } + if (isFresh) { + ({ changedFiles, removedFiles } = previousState.fileSystem.getDifference(freshFileData)); + } + perfLogger?.point('watchmanCrawl/processResults_end'); + perfLogger?.point('watchmanCrawl_end'); + abortSignal?.throwIfAborted(); + return { + changedFiles, + removedFiles, + clocks: newClocks, + }; +} diff --git a/packages/@expo/metro-file-map/build/crawlers/watchman/planQuery.d.ts b/packages/@expo/metro-file-map/build/crawlers/watchman/planQuery.d.ts new file mode 100644 index 00000000000000..f6c95c9e235a90 --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/watchman/planQuery.d.ts @@ -0,0 +1,31 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +type WatchmanExpression = readonly [string, ...any[]]; +type WatchmanQuerySince = string | Readonly<{ + scm: Readonly<{ + 'mergebase-with': string; + }>; +}>; +interface WatchmanQuery { + fields?: string[]; + expression?: WatchmanExpression; + since?: WatchmanQuerySince; + glob?: string[]; + glob_includedotfiles?: boolean; + suffix?: readonly string[]; +} +export declare function planQuery({ since, directoryFilters, extensions, includeSha1, includeSymlinks, }: { + readonly since: WatchmanQuerySince | null | undefined; + readonly directoryFilters: readonly string[]; + readonly extensions: readonly string[]; + readonly includeSha1: boolean; + readonly includeSymlinks: boolean; +}): { + query: WatchmanQuery; + queryGenerator: string; +}; +export {}; diff --git a/packages/@expo/metro-file-map/build/crawlers/watchman/planQuery.js b/packages/@expo/metro-file-map/build/crawlers/watchman/planQuery.js new file mode 100644 index 00000000000000..0f2aed4caf96c0 --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/watchman/planQuery.js @@ -0,0 +1,99 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.planQuery = planQuery; +function planQuery({ since, directoryFilters, extensions, includeSha1, includeSymlinks, }) { + const fields = ['name', 'exists', 'mtime_ms', 'size']; + if (includeSha1) { + fields.push('content.sha1hex'); + } + /** + * Note on symlink_target: + * + * Watchman supports requesting the symlink_target field, which is + * *potentially* more efficient if targets can be read from metadata without + * reading/materialising files. However, at the time of writing, Watchman has + * issues reporting symlink_target on some backends[1]. Additionally, though + * the Eden watcher is known to work, it reads links serially[2] on demand[3] + * - less efficiently than we can do ourselves. + * [1] https://github.com/facebook/watchman/issues/1084 + * [2] https://github.com/facebook/watchman/blob/v2023.01.02.00/watchman/watcher/eden.cpp#L476-L485 + * [3] https://github.com/facebook/watchman/blob/v2023.01.02.00/watchman/watcher/eden.cpp#L433-L434 + */ + if (includeSymlinks) { + fields.push('type'); + } + const allOfTerms = includeSymlinks + ? [['anyof', ['allof', ['type', 'f'], ['suffix', extensions]], ['type', 'l']]] + : [['type', 'f']]; + const query = { fields }; + /** + * Watchman "query planner". + * + * Watchman file queries consist of 1 or more generators that feed + * files through the expression evaluator. + * + * Strategy: + * 1. Select the narrowest possible generator so that the expression + * evaluator has fewer candidates to process. + * 2. Evaluate expressions from narrowest to broadest. + * 3. Don't use an expression to recheck a condition that the + * generator already guarantees. + * 4. Compose expressions to avoid combinatorial explosions in the + * number of terms. + * + * The ordering of generators/filters, from narrow to broad, is: + * - since = O(changes) + * - glob / dirname = O(files in a subtree of the repo) + * - suffix = O(files in the repo) + * + * We assume that file extensions are ~uniformly distributed in the + * repo but Haste map projects are focused on a handful of + * directories. Therefore `glob` < `suffix`. + */ + let queryGenerator; + if (since != null) { + // Prefer the since generator whenever we have a clock + query.since = since; + queryGenerator = 'since'; + // Filter on directories using an anyof expression + if (directoryFilters.length > 0) { + allOfTerms.push([ + 'anyof', + ...directoryFilters.map((dir) => ['dirname', dir]), + ]); + } + } + else if (directoryFilters.length > 0) { + // Use the `glob` generator and filter only by extension. + query.glob = directoryFilters.map((directory) => `${directory}/**`); + query.glob_includedotfiles = true; + queryGenerator = 'glob'; + } + else if (!includeSymlinks) { + // Use the `suffix` generator with no path/extension filtering, as long + // as we don't need (suffixless) directory symlinks. + query.suffix = extensions; + queryGenerator = 'suffix'; + } + else { + // Fall back to `all` if we need symlinks and don't have a clock or + // directory filters. + queryGenerator = 'all'; + } + // `includeSymlinks` implies we need (suffixless) directory links. In the + // case of the `suffix` generator, a suffix expression would be redundant. + if (!includeSymlinks && queryGenerator !== 'suffix') { + allOfTerms.push(['suffix', extensions]); + } + // If we only have one "all of" expression we can use it directly, otherwise + // wrap in ['allof', ...expressions]. By construction we should never have + // length 0. + query.expression = allOfTerms.length === 1 ? allOfTerms[0] : ['allof', ...allOfTerms]; + return { query, queryGenerator }; +} diff --git a/packages/@expo/metro-file-map/build/index.d.ts b/packages/@expo/metro-file-map/build/index.d.ts new file mode 100644 index 00000000000000..1f8edd42dc6825 --- /dev/null +++ b/packages/@expo/metro-file-map/build/index.d.ts @@ -0,0 +1,137 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import EventEmitter from 'events'; +import type { BuildParameters, BuildResult, CacheData, CacheManagerFactory, ChangeEventMetadata, Console, FileData, FileSystem, HasteMapData, HasteMapItem, HType, InputFileMapPlugin, PerfLoggerFactory } from './types'; +export type { BuildParameters, BuildResult, CacheData, ChangeEventMetadata, FileData, FileMap, FileSystem, HasteMapData, HasteMapItem, InputFileMapPlugin, }; +export interface InputOptions { + readonly computeSha1?: boolean | undefined | null; + readonly enableSymlinks?: boolean | undefined | null; + readonly extensions: readonly string[]; + readonly forceNodeFilesystemAPI?: boolean | undefined | null; + readonly ignorePattern?: RegExp | undefined | null; + readonly plugins?: readonly InputFileMapPlugin[] | undefined; + readonly retainAllFiles: boolean; + readonly rootDir: string; + readonly roots: readonly string[]; + readonly cacheManagerFactory?: CacheManagerFactory | undefined | null; + readonly console?: Console; + readonly healthCheck: HealthCheckOptions; + readonly maxFilesPerWorker?: number | undefined | null; + readonly maxWorkers: number; + readonly perfLoggerFactory?: PerfLoggerFactory | undefined | null; + readonly resetCache?: boolean | undefined | null; + readonly useWatchman?: boolean | undefined | null; + readonly watch?: boolean | undefined | null; + readonly watchmanDeferStates?: readonly string[]; +} +interface HealthCheckOptions { + readonly enabled: boolean; + readonly interval: number; + readonly timeout: number; + readonly filePrefix: string; +} +export { DiskCacheManager } from './cache/DiskCacheManager'; +export { default as DependencyPlugin } from './plugins/DependencyPlugin'; +export type { DependencyPluginOptions } from './plugins/DependencyPlugin'; +export { DuplicateHasteCandidatesError } from './plugins/haste/DuplicateHasteCandidatesError'; +export { HasteConflictsError } from './plugins/haste/HasteConflictsError'; +export { default as HastePlugin } from './plugins/HastePlugin'; +export type { HasteMap } from './types'; +export type { HealthCheckResult } from './Watcher'; +export type { CacheManager, CacheManagerFactory, CacheManagerFactoryOptions, CacheManagerWriteOptions, ChangeEvent, DependencyExtractor, WatcherStatus, } from './types'; +/** + * FileMap includes a JavaScript implementation of Facebook's haste module system. + * + * This implementation is inspired by https://github.com/facebook/node-haste + * and was built with for high-performance in large code repositories with + * hundreds of thousands of files. This implementation is scalable and provides + * predictable performance. + * + * Because the file map creation and synchronization is critical to startup + * performance and most tasks are blocked by I/O this class makes heavy use of + * synchronous operations. It uses worker processes for parallelizing file + * access and metadata extraction. + * + * The data structures created by `metro-file-map` can be used directly from the + * cache without further processing. The metadata objects in the `files` and + * `map` objects contain cross-references: a metadata object from one can look + * up the corresponding metadata object in the other map. Note that in most + * projects, the number of files will be greater than the number of haste + * modules one module can refer to many files based on platform extensions. + * + * type CacheData = { + * clocks: WatchmanClocks, + * files: {[filepath: string]: FileMetadata}, + * map: {[id: string]: HasteMapItem}, + * mocks: {[id: string]: string}, + * } + * + * // Watchman clocks are used for query synchronization and file system deltas. + * type WatchmanClocks = {[filepath: string]: string}; + * + * type FileMetadata = { + * id: ?string, // used to look up module metadata objects in `map`. + * mtime: number, // check for outdated files. + * size: number, // size of the file in bytes. + * visited: boolean, // whether the file has been parsed or not. + * dependencies: Array, // all relative dependencies of this file. + * sha1: ?string, // SHA-1 of the file, if requested via options. + * symlink: ?(1 | 0 | string), // Truthy if symlink, string is target + * }; + * + * // Modules can be targeted to a specific platform based on the file name. + * // Example: platform.ios.js and Platform.android.js will both map to the same + * // `Platform` module. The platform should be specified during resolution. + * type HasteMapItem = {[platform: string]: ModuleMetadata}; + * + * // + * type ModuleMetadata = { + * path: string, // the path to look up the file object in `files`. + * type: string, // the module type (either `package` or `module`). + * }; + * + * Note that the data structures described above are conceptual only. The actual + * implementation uses arrays and constant keys for metadata storage. Instead of + * `{id: 'flatMap', mtime: 3421, size: 42, visited: true, dependencies: []}` the real + * representation is similar to `['flatMap', 3421, 42, 1, []]` to save storage space + * and reduce parse and write time of a big JSON blob. + * + * The FileMap is created as follows: + * 1. read data from the cache or create an empty structure. + * + * 2. crawl the file system. + * * empty cache: crawl the entire file system. + * * cache available: + * * if watchman is available: get file system delta changes. + * * if watchman is unavailable: crawl the entire file system. + * * build metadata objects for every file. This builds the `files` part of + * the `FileMap`. + * + * 3. visit and extract metadata from changed files, including sha1, + * depedendencies, and any plugins. + * * this is done in parallel over worker processes to improve performance. + * * the worst case is to visit all files. + * * the best case is no file system access and retrieving all data from + * the cache. + * * the average case is a small number of changed files. + * + * 4. serialize the new `FileMap` in a cache file. + * + */ +export default class FileMap extends EventEmitter { + #private; + readonly __expo = true; + static create(options: InputOptions): FileMap; + constructor(options: InputOptions); + build(): Promise; + /** + * 1. read data from the cache or create an empty structure. + */ + read(): Promise; + end(): Promise; + static H: HType; +} diff --git a/packages/@expo/metro-file-map/build/index.js b/packages/@expo/metro-file-map/build/index.js new file mode 100644 index 00000000000000..cc9b698f821f86 --- /dev/null +++ b/packages/@expo/metro-file-map/build/index.js @@ -0,0 +1,806 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HastePlugin = exports.HasteConflictsError = exports.DuplicateHasteCandidatesError = exports.DependencyPlugin = exports.DiskCacheManager = void 0; +const events_1 = __importDefault(require("events")); +const fs_1 = require("fs"); +const invariant_1 = __importDefault(require("invariant")); +const path = __importStar(require("path")); +const perf_hooks_1 = require("perf_hooks"); +const Watcher_1 = require("./Watcher"); +const DiskCacheManager_1 = require("./cache/DiskCacheManager"); +const constants_1 = __importDefault(require("./constants")); +const FileProcessor_1 = require("./lib/FileProcessor"); +const FileSystemChangeAggregator_1 = require("./lib/FileSystemChangeAggregator"); +const RootPathUtils_1 = require("./lib/RootPathUtils"); +const TreeFS_1 = __importDefault(require("./lib/TreeFS")); +const checkWatchmanCapabilities_1 = __importDefault(require("./lib/checkWatchmanCapabilities")); +const normalizePathSeparatorsToPosix_1 = __importDefault(require("./lib/normalizePathSeparatorsToPosix")); +const normalizePathSeparatorsToSystem_1 = __importDefault(require("./lib/normalizePathSeparatorsToSystem")); +const debug = require('debug')('Metro:FileMap'); +var DiskCacheManager_2 = require("./cache/DiskCacheManager"); +Object.defineProperty(exports, "DiskCacheManager", { enumerable: true, get: function () { return DiskCacheManager_2.DiskCacheManager; } }); +var DependencyPlugin_1 = require("./plugins/DependencyPlugin"); +Object.defineProperty(exports, "DependencyPlugin", { enumerable: true, get: function () { return __importDefault(DependencyPlugin_1).default; } }); +var DuplicateHasteCandidatesError_1 = require("./plugins/haste/DuplicateHasteCandidatesError"); +Object.defineProperty(exports, "DuplicateHasteCandidatesError", { enumerable: true, get: function () { return DuplicateHasteCandidatesError_1.DuplicateHasteCandidatesError; } }); +var HasteConflictsError_1 = require("./plugins/haste/HasteConflictsError"); +Object.defineProperty(exports, "HasteConflictsError", { enumerable: true, get: function () { return HasteConflictsError_1.HasteConflictsError; } }); +var HastePlugin_1 = require("./plugins/HastePlugin"); +Object.defineProperty(exports, "HastePlugin", { enumerable: true, get: function () { return __importDefault(HastePlugin_1).default; } }); +// This should be bumped whenever a code change to `metro-file-map` itself +// would cause a change to the cache data structure and/or content (for a given +// filesystem state and build parameters). +const CACHE_BREAKER = '11'; +const CHANGE_INTERVAL = 30; +const NODE_MODULES = path.sep + 'node_modules' + path.sep; +const VCS_DIRECTORIES = /[/\\]\.(git|hg)[/\\]/.source; +const WATCHMAN_REQUIRED_CAPABILITIES = [ + 'field-content.sha1hex', + 'relative_root', + 'suffix-set', + 'wildmatch', +]; +/** + * FileMap includes a JavaScript implementation of Facebook's haste module system. + * + * This implementation is inspired by https://github.com/facebook/node-haste + * and was built with for high-performance in large code repositories with + * hundreds of thousands of files. This implementation is scalable and provides + * predictable performance. + * + * Because the file map creation and synchronization is critical to startup + * performance and most tasks are blocked by I/O this class makes heavy use of + * synchronous operations. It uses worker processes for parallelizing file + * access and metadata extraction. + * + * The data structures created by `metro-file-map` can be used directly from the + * cache without further processing. The metadata objects in the `files` and + * `map` objects contain cross-references: a metadata object from one can look + * up the corresponding metadata object in the other map. Note that in most + * projects, the number of files will be greater than the number of haste + * modules one module can refer to many files based on platform extensions. + * + * type CacheData = { + * clocks: WatchmanClocks, + * files: {[filepath: string]: FileMetadata}, + * map: {[id: string]: HasteMapItem}, + * mocks: {[id: string]: string}, + * } + * + * // Watchman clocks are used for query synchronization and file system deltas. + * type WatchmanClocks = {[filepath: string]: string}; + * + * type FileMetadata = { + * id: ?string, // used to look up module metadata objects in `map`. + * mtime: number, // check for outdated files. + * size: number, // size of the file in bytes. + * visited: boolean, // whether the file has been parsed or not. + * dependencies: Array, // all relative dependencies of this file. + * sha1: ?string, // SHA-1 of the file, if requested via options. + * symlink: ?(1 | 0 | string), // Truthy if symlink, string is target + * }; + * + * // Modules can be targeted to a specific platform based on the file name. + * // Example: platform.ios.js and Platform.android.js will both map to the same + * // `Platform` module. The platform should be specified during resolution. + * type HasteMapItem = {[platform: string]: ModuleMetadata}; + * + * // + * type ModuleMetadata = { + * path: string, // the path to look up the file object in `files`. + * type: string, // the module type (either `package` or `module`). + * }; + * + * Note that the data structures described above are conceptual only. The actual + * implementation uses arrays and constant keys for metadata storage. Instead of + * `{id: 'flatMap', mtime: 3421, size: 42, visited: true, dependencies: []}` the real + * representation is similar to `['flatMap', 3421, 42, 1, []]` to save storage space + * and reduce parse and write time of a big JSON blob. + * + * The FileMap is created as follows: + * 1. read data from the cache or create an empty structure. + * + * 2. crawl the file system. + * * empty cache: crawl the entire file system. + * * cache available: + * * if watchman is available: get file system delta changes. + * * if watchman is unavailable: crawl the entire file system. + * * build metadata objects for every file. This builds the `files` part of + * the `FileMap`. + * + * 3. visit and extract metadata from changed files, including sha1, + * depedendencies, and any plugins. + * * this is done in parallel over worker processes to improve performance. + * * the worst case is to visit all files. + * * the best case is no file system access and retrieving all data from + * the cache. + * * the average case is a small number of changed files. + * + * 4. serialize the new `FileMap` in a cache file. + * + */ +class FileMap extends events_1.default { + // NOTE(@kitten): Expo brand to recognize patched `metro-file-map -> @expo/metro-file-map` + __expo = true; + #buildPromise; + #cacheManager; + #canUseWatchmanPromise; + #changeID; + #changeInterval; + #console; + #crawlerAbortController; + #fileProcessor; + #healthCheckInterval; + #options; + #pathUtils; + #plugins; + #startupPerfLogger; + #watcher; + static create(options) { + return new FileMap(options); + } + constructor(options) { + super(); + if (options.perfLoggerFactory) { + this.#startupPerfLogger = options.perfLoggerFactory?.('START_UP').subSpan('fileMap') ?? null; + this.#startupPerfLogger?.point('constructor_start'); + } + // Add VCS_DIRECTORIES to provided ignorePattern + let ignorePattern; + if (options.ignorePattern) { + const inputIgnorePattern = options.ignorePattern; + if (inputIgnorePattern instanceof RegExp) { + ignorePattern = new RegExp(inputIgnorePattern.source.concat('|' + VCS_DIRECTORIES), inputIgnorePattern.flags); + } + else { + throw new Error('metro-file-map: the `ignorePattern` option must be a RegExp'); + } + } + else { + ignorePattern = new RegExp(VCS_DIRECTORIES); + } + this.#console = options.console || globalThis.console; + let dataSlot = constants_1.default.PLUGINDATA; + const indexedPlugins = []; + const pluginWorkers = []; + const plugins = options.plugins ?? []; + for (const plugin of plugins) { + const maybeWorker = plugin.getWorker(); + indexedPlugins.push({ + plugin, + dataIdx: maybeWorker != null ? dataSlot++ : null, + }); + if (maybeWorker != null) { + pluginWorkers.push(maybeWorker); + } + } + this.#plugins = indexedPlugins; + const buildParameters = { + cacheBreaker: CACHE_BREAKER, + computeSha1: options.computeSha1 || false, + enableSymlinks: options.enableSymlinks || false, + extensions: options.extensions, + forceNodeFilesystemAPI: !!options.forceNodeFilesystemAPI, + ignorePattern, + plugins, + retainAllFiles: options.retainAllFiles, + rootDir: options.rootDir, + roots: Array.from(new Set(options.roots)), + }; + this.#options = { + ...buildParameters, + healthCheck: options.healthCheck, + perfLoggerFactory: options.perfLoggerFactory, + resetCache: options.resetCache, + useWatchman: options.useWatchman == null ? true : options.useWatchman, + watch: !!options.watch, + watchmanDeferStates: options.watchmanDeferStates ?? [], + }; + const cacheFactoryOptions = { + buildParameters, + }; + this.#cacheManager = options.cacheManagerFactory + ? options.cacheManagerFactory.call(null, cacheFactoryOptions) + : new DiskCacheManager_1.DiskCacheManager(cacheFactoryOptions, {}); + this.#fileProcessor = new FileProcessor_1.FileProcessor({ + maxFilesPerWorker: options.maxFilesPerWorker, + maxWorkers: options.maxWorkers, + perfLogger: this.#startupPerfLogger, + pluginWorkers, + rootDir: options.rootDir, + }); + this.#buildPromise = null; + this.#pathUtils = new RootPathUtils_1.RootPathUtils(options.rootDir); + this.#startupPerfLogger?.point('constructor_end'); + this.#crawlerAbortController = new AbortController(); + this.#changeID = 0; + } + build() { + this.#startupPerfLogger?.point('build_start'); + if (!this.#buildPromise) { + this.#buildPromise = (async () => { + let initialData; + if (this.#options.resetCache !== true) { + initialData = await this.read(); + } + if (!initialData) { + debug('Not using a cache'); + } + else { + debug('Cache loaded (%d clock(s))', initialData.clocks.size); + } + const rootDir = this.#options.rootDir; + this.#startupPerfLogger?.point('constructFileSystem_start'); + const processFile = (normalPath, metadata, opts) => { + const result = this.#fileProcessor.processRegularFile(normalPath, metadata, { + computeSha1: opts.computeSha1, + maybeReturnContent: true, + }); + debug('Lazily processed file: %s', normalPath); + // Emit an event to inform caches that there is new data to save. + this.emit('metadata'); + return result?.content; + }; + const fileSystem = initialData != null + ? TreeFS_1.default.fromDeserializedSnapshot({ + // Typed `mixed` because we've read this from an external + // source. It'd be too expensive to validate at runtime, so + // trust our cache manager that this is correct. + fileSystemData: initialData.fileSystemData, + processFile, + rootDir, + }) + : new TreeFS_1.default({ processFile, rootDir }); + this.#startupPerfLogger?.point('constructFileSystem_end'); + const plugins = this.#plugins; + // Initialize plugins from cached file system and plugin state while + // crawling to build a diff of current state vs cached. `fileSystem` + // is not mutated during either operation. + const [fileDelta] = await Promise.all([ + this.#buildFileDelta({ + clocks: initialData?.clocks ?? new Map(), + fileSystem, + }), + Promise.all(plugins.map(({ plugin, dataIdx }) => plugin.initialize({ + files: { + lookup: (mixedPath) => { + const result = fileSystem.lookup(mixedPath); + if (!result.exists) { + return { exists: false }; + } + if (result.type === 'd') { + return { exists: true, type: 'd' }; + } + return { + exists: true, + type: 'f', + pluginData: dataIdx != null ? result.metadata[dataIdx] : null, + }; + }, + fileIterator: (opts) => mapIterable(fileSystem.metadataIterator(opts), ({ baseName, canonicalPath, metadata }) => ({ + baseName, + canonicalPath, + pluginData: dataIdx != null ? metadata[dataIdx] : null, + })), + }, + pluginState: initialData?.plugins.get(plugin.name), + }))), + ]); + // Update `fileSystem` and plugins based on the file delta. + const actualChanges = await this.#applyFileDelta(fileSystem, plugins, fileDelta); + const changeSize = actualChanges.getSize(); + // Validate plugins before persisting them. + plugins.forEach(({ plugin }) => plugin.assertValid()); + const watchmanClocks = new Map('clocks' in fileDelta ? fileDelta.clocks : []); + await this.#takeSnapshotAndPersist(fileSystem, watchmanClocks, plugins, changeSize > 0); + debug('Finished mapping files (%d changes).', changeSize); + await this.#watch(fileSystem, watchmanClocks, plugins); + return { fileSystem }; + })(); + } + return this.#buildPromise.then((result) => { + this.#startupPerfLogger?.point('build_end'); + return result; + }); + } + /** + * 1. read data from the cache or create an empty structure. + */ + async read() { + let data; + this.#startupPerfLogger?.point('read_start'); + try { + data = await this.#cacheManager.read(); + } + catch (e) { + this.#console.warn('Error while reading cache, falling back to a full crawl:\n', e); + this.#startupPerfLogger?.annotate({ + string: { cacheReadError: e.toString() }, + }); + } + this.#startupPerfLogger?.point('read_end'); + return data; + } + /** + * 2. crawl the file system. + */ + async #buildFileDelta(previousState) { + this.#startupPerfLogger?.point('buildFileDelta_start'); + const { computeSha1, enableSymlinks, extensions, forceNodeFilesystemAPI, ignorePattern, retainAllFiles, roots, rootDir, watch, watchmanDeferStates, } = this.#options; + this.#watcher = new Watcher_1.Watcher({ + abortSignal: this.#crawlerAbortController.signal, + computeSha1, + console: this.#console, + enableSymlinks, + extensions, + forceNodeFilesystemAPI, + healthCheckFilePrefix: this.#options.healthCheck.filePrefix, + // TODO: Refactor out the two different ignore strategies here. + ignoreForCrawl: (filePath) => { + const ignoreMatched = ignorePattern.test(filePath); + return ignoreMatched || (!retainAllFiles && filePath.includes(NODE_MODULES)); + }, + ignorePatternForWatch: ignorePattern, + perfLogger: this.#startupPerfLogger, + previousState, + rootDir, + roots, + useWatchman: await this.#shouldUseWatchman(), + watch, + watchmanDeferStates, + }); + const watcher = this.#watcher; + watcher.on('status', (status) => this.emit('status', status)); + const result = await watcher.crawl(); + this.#startupPerfLogger?.point('buildFileDelta_end'); + return result; + } + #maybeReadLink(normalPath, fileMetadata) { + // If we only need to read a link, it's more efficient to do it in-band + // (with async file IO) than to have the overhead of worker IO. + if (fileMetadata[constants_1.default.SYMLINK] === 1) { + return fs_1.promises + .readlink(this.#pathUtils.normalToAbsolute(normalPath)) + .then((symlinkTarget) => { + fileMetadata[constants_1.default.VISITED] = 1; + fileMetadata[constants_1.default.SYMLINK] = symlinkTarget; + }); + } + return null; + } + async #applyFileDelta(fileSystem, plugins, delta) { + this.#startupPerfLogger?.point('applyFileDelta_start'); + const { changedFiles, removedFiles } = delta; + this.#startupPerfLogger?.point('applyFileDelta_preprocess_start'); + // Remove files first so that we don't mistake moved modules + // modules as duplicates. + this.#startupPerfLogger?.point('applyFileDelta_remove_start'); + const changeAggregator = new FileSystemChangeAggregator_1.FileSystemChangeAggregator(); + for (const relativeFilePath of removedFiles) { + fileSystem.remove(relativeFilePath, changeAggregator); + } + this.#startupPerfLogger?.point('applyFileDelta_remove_end'); + const readLinkPromises = []; + const readLinkErrors = []; + const filesToProcess = []; + for (const [normalFilePath, fileData] of changedFiles) { + // A crawler may preserve the H.VISITED flag to indicate that the file + // contents are unchaged and it doesn't need visiting again. + if (fileData[constants_1.default.VISITED] === 1) { + continue; + } + if (fileData[constants_1.default.SYMLINK] === 0) { + filesToProcess.push([normalFilePath, fileData]); + } + else { + const maybeReadLink = this.#maybeReadLink(normalFilePath, fileData); + if (maybeReadLink) { + readLinkPromises.push(maybeReadLink.catch((error) => { + readLinkErrors.push({ normalFilePath, error }); + })); + } + } + } + this.#startupPerfLogger?.point('applyFileDelta_preprocess_end'); + debug('Found %d added/modified files and %d symlinks.', filesToProcess.length, readLinkPromises.length); + this.#startupPerfLogger?.point('applyFileDelta_process_start'); + const [batchResult] = await Promise.all([ + this.#fileProcessor.processBatch(filesToProcess, { + computeSha1: this.#options.computeSha1, + maybeReturnContent: false, + }), + Promise.all(readLinkPromises), + ]); + this.#startupPerfLogger?.point('applyFileDelta_process_end'); + // It's possible that a file could be deleted between being seen by the + // crawler and our attempt to process it. For our purposes, this is + // equivalent to the file being deleted before the crawl, being absent + // from `changedFiles`, and (if we loaded from cache, and the file + // existed previously) possibly being reported in `removedFiles`. + // + // Treat the file accordingly - don't add it to `FileSystem`, and remove + // it if it already exists. We're not emitting events at this point in + // startup, so there's nothing more to do. + this.#startupPerfLogger?.point('applyFileDelta_missing_start'); + for (const { normalFilePath, error } of batchResult.errors.concat(readLinkErrors)) { + if (['ENOENT', 'EACCESS'].includes(error.code ?? '')) { + delta.changedFiles.delete(normalFilePath); + fileSystem.remove(normalFilePath, changeAggregator); + } + else { + // Anything else is fatal. + throw error; + } + } + this.#startupPerfLogger?.point('applyFileDelta_missing_end'); + this.#startupPerfLogger?.point('applyFileDelta_add_start'); + fileSystem.bulkAddOrModify(changedFiles, changeAggregator); + this.#startupPerfLogger?.point('applyFileDelta_add_end'); + this.#startupPerfLogger?.point('applyFileDelta_updatePlugins_start'); + this.#plugins.forEach(({ plugin, dataIdx }) => { + plugin.onChanged(changeAggregator.getMappedView(dataIdx != null ? (metadata) => metadata[dataIdx] : () => null)); + }); + this.#startupPerfLogger?.point('applyFileDelta_updatePlugins_end'); + this.#startupPerfLogger?.point('applyFileDelta_end'); + return changeAggregator; + } + /** + * 4. Serialize a snapshot of our raw data via the configured cache manager + */ + async #takeSnapshotAndPersist(fileSystem, clocks, plugins, changedSinceCacheRead) { + this.#startupPerfLogger?.point('persist_start'); + await this.#cacheManager.write(() => ({ + clocks: new Map(clocks), + fileSystemData: fileSystem.getSerializableSnapshot(), + plugins: new Map(plugins.map(({ plugin }) => [plugin.name, plugin.getSerializableSnapshot()])), + }), { + changedSinceCacheRead, + eventSource: { + onChange: (cb) => { + // Inform the cache about changes to internal state, including: + // - File system changes + this.on('change', cb); + // - Changes to stored metadata, e.g. on lazy processing. + this.on('metadata', cb); + return () => { + this.removeListener('change', cb); + this.removeListener('metadata', cb); + }; + }, + }, + onWriteError: (error) => { + this.#console.warn('[metro-file-map] Cache write error\n:', error); + }, + }); + this.#startupPerfLogger?.point('persist_end'); + } + /** + * Watch mode + */ + async #watch(fileSystem, clocks, plugins) { + this.#startupPerfLogger?.point('watch_start'); + if (!this.#options.watch) { + this.#startupPerfLogger?.point('watch_end'); + return; + } + const hasWatchedExtension = (filePath) => this.#options.extensions.some((ext) => filePath.endsWith(ext)); + let nextEmit = null; + const emitChange = () => { + if (nextEmit == null) { + // Nothing to emit + return; + } + const { events, firstEventTimestamp, firstEnqueuedTimestamp } = nextEmit; + const changeAggregator = new FileSystemChangeAggregator_1.FileSystemChangeAggregator(); + // Process a sequence of events. Note that preserving ordering is + // important here - a file may be both removed and added in the same + // batch. + // `changeAggregator` flattens this over time into the net change from + // this sequence. + for (const event of events) { + const { relativeFilePath, clock } = event; + if (event.type === 'delete') { + fileSystem.remove(relativeFilePath, changeAggregator); + } + else { + fileSystem.addOrModify(relativeFilePath, event.metadata, changeAggregator); + } + this.#updateClock(clocks, clock); + } + const changeSize = changeAggregator.getSize(); + if (changeSize === 0) { + // We had events, but they've exactly cancelled each other out, reset + // so that timers are correct for the next change. + nextEmit = null; + return; + } + this.#plugins.forEach(({ plugin, dataIdx }) => { + plugin.onChanged(changeAggregator.getMappedView(dataIdx != null ? (metadata) => metadata[dataIdx] : () => null)); + }); + const toPublicMetadata = (metadata) => ({ + isSymlink: metadata[constants_1.default.SYMLINK] !== 0, + modifiedTime: metadata[constants_1.default.MTIME] ?? null, + }); + const changesWithMetadata = changeAggregator.getMappedView(toPublicMetadata); + const hmrPerfLogger = this.#options.perfLoggerFactory?.('HMR', { + key: this.#getNextChangeID(), + }); + if (hmrPerfLogger != null) { + hmrPerfLogger.start({ timestamp: firstEventTimestamp }); + hmrPerfLogger.point('waitingForChangeInterval_start', { + timestamp: firstEnqueuedTimestamp, + }); + hmrPerfLogger.point('waitingForChangeInterval_end'); + hmrPerfLogger.annotate({ int: { changeSize } }); + hmrPerfLogger.point('fileChange_start'); + } + const changeEvent = { + changes: changesWithMetadata, + logger: hmrPerfLogger, + rootDir: this.#options.rootDir, + }; + this.emit('change', changeEvent); + nextEmit = null; + }; + let changeQueue = Promise.resolve(); + const onChange = (change) => { + // Recrawl events bypass normal filtering - they trigger a full subdirectory scan + if (change.event !== 'recrawl' && + change.metadata && + // Ignore all directory events + (change.metadata.type === 'd' || + // Ignore regular files with unwatched extensions + (change.metadata.type === 'f' && !hasWatchedExtension(change.relativePath)) || + // Don't emit events relating to symlinks if enableSymlinks: false + (!this.#options.enableSymlinks && change.metadata?.type === 'l'))) { + return; + } + const absoluteFilePath = path.join(change.root, (0, normalizePathSeparatorsToSystem_1.default)(change.relativePath)); + // Ignore files (including symlinks) whose path matches ignorePattern + // (we don't ignore node_modules in watch mode) + if (this.#options.ignorePattern.test(absoluteFilePath)) { + return; + } + const relativeFilePath = this.#pathUtils.absoluteToNormal(absoluteFilePath); + const onChangeStartTime = perf_hooks_1.performance.timeOrigin + perf_hooks_1.performance.now(); + const enqueueEvent = (event) => { + nextEmit ??= { + events: [], + firstEnqueuedTimestamp: perf_hooks_1.performance.timeOrigin + perf_hooks_1.performance.now(), + firstEventTimestamp: onChangeStartTime, + }; + nextEmit.events.push(event); + }; + changeQueue = changeQueue + .then(async () => { + // If we get duplicate events for the same file, ignore them. + if (nextEmit != null && + nextEmit.events.find((event) => event.type === change.event && + event.relativeFilePath === relativeFilePath && + ((!('metadata' in event) && !change.metadata) || + ('metadata' in event && + change.metadata && + event.metadata[constants_1.default.MTIME] != null && + change.metadata.modifiedTime != null && + event.metadata[constants_1.default.MTIME] === change.metadata.modifiedTime)))) { + return null; + } + // If the file was added or modified, + // parse it and update the file map. + if (change.event === 'touch') { + (0, invariant_1.default)(change.metadata.size != null, 'since the file exists or changed, it should have known size'); + const fileMetadata = [ + change.metadata.modifiedTime ?? null, + change.metadata.size, + 0, + null, + change.metadata.type === 'l' ? 1 : 0, + null, + ]; + try { + if (change.metadata.type === 'l') { + await this.#maybeReadLink(relativeFilePath, fileMetadata); + } + else { + await this.#fileProcessor.processRegularFile(relativeFilePath, fileMetadata, { + computeSha1: this.#options.computeSha1, + maybeReturnContent: false, + }); + } + enqueueEvent({ + clock: change.clock, + relativeFilePath, + metadata: fileMetadata, + type: change.event, + }); + } + catch (e) { + if (!['ENOENT', 'EACCESS'].includes(e.code)) { + throw e; + } + // Swallow ENOENT/ACCESS errors silently. Safe because either: + // - We never knew about the file, so neither did any consumers. + // Or, + // - The watcher will soon (or has already) report a "delete" + // event for it, and we'll clean up in the usual way at that + // point. + } + } + else if (change.event === 'delete') { + enqueueEvent({ + clock: change.clock, + relativeFilePath, + type: 'delete', + }); + } + else if (change.event === 'recrawl') { + // Recrawl event: flush pending changes and re-crawl the directory + emitChange(); + // The relativePath is relative to the watcher root (change.root), + // but we need a path relative to rootDir for the recrawl. + const absoluteDirPath = path.join(change.root, (0, normalizePathSeparatorsToSystem_1.default)(change.relativePath)); + const subpath = this.#pathUtils.absoluteToNormal(absoluteDirPath); + // Crawl the specific subdirectory + const watcher = this.#watcher; + (0, invariant_1.default)(watcher != null, 'Watcher must be initialized'); + const crawlResult = await watcher.recrawl(subpath, fileSystem); + // Skip if no changes + if (crawlResult.changedFiles.size === 0 && crawlResult.removedFiles.size === 0) { + return null; + } + // Reuse the same batch processing logic as build() + const recrawlChangeAggregator = await this.#applyFileDelta(fileSystem, this.#plugins, crawlResult); + // Update clock if provided + this.#updateClock(clocks, change.clock); + // Skip emit if no changes after processing + if (recrawlChangeAggregator.getSize() === 0) { + return null; + } + // Emit changes directly + const toPublicMetadata = (metadata) => ({ + isSymlink: metadata[constants_1.default.SYMLINK] !== 0, + modifiedTime: metadata[constants_1.default.MTIME] ?? null, + }); + const changesWithMetadata = recrawlChangeAggregator.getMappedView(toPublicMetadata); + const changeEvent = { + changes: changesWithMetadata, + logger: null, + rootDir: this.#options.rootDir, + }; + this.emit('change', changeEvent); + } + else { + throw new Error(`metro-file-map: Unrecognized event type from watcher: ${change.event}`); + } + return null; + }) + .catch((error) => { + this.#console.error(`metro-file-map: watch error:\n ${error.stack}\n`); + }); + }; + this.#changeInterval = setInterval(emitChange, CHANGE_INTERVAL); + (0, invariant_1.default)(this.#watcher != null, 'Expected #watcher to have been initialised by build()'); + await this.#watcher.watch(onChange); + if (this.#options.healthCheck.enabled) { + const performHealthCheck = () => { + if (!this.#watcher) { + return; + } + // eslint-disable-next-line @typescript-eslint/no-floating-promises + this.#watcher.checkHealth(this.#options.healthCheck.timeout).then((result) => { + this.emit('healthCheck', result); + }); + }; + performHealthCheck(); + this.#healthCheckInterval = setInterval(performHealthCheck, this.#options.healthCheck.interval); + } + this.#startupPerfLogger?.point('watch_end'); + } + async end() { + if (this.#changeInterval) { + clearInterval(this.#changeInterval); + } + if (this.#healthCheckInterval) { + clearInterval(this.#healthCheckInterval); + } + this.#crawlerAbortController.abort(); + await Promise.all([ + this.#fileProcessor.end(), + this.#watcher?.close(), + this.#cacheManager.end(), + ]); + } + async #shouldUseWatchman() { + if (!this.#options.useWatchman) { + return false; + } + if (!this.#canUseWatchmanPromise) { + this.#canUseWatchmanPromise = (0, checkWatchmanCapabilities_1.default)(WATCHMAN_REQUIRED_CAPABILITIES) + .then(({ version }) => { + this.#startupPerfLogger?.annotate({ + string: { + watchmanVersion: version, + }, + }); + return true; + }) + .catch((e) => { + // TODO: Advise people to either install Watchman or set + // `useWatchman: false` here? + this.#startupPerfLogger?.annotate({ + string: { + watchmanFailedCapabilityCheck: e?.message ?? '[missing]', + }, + }); + return false; + }); + } + return this.#canUseWatchmanPromise; + } + #getNextChangeID() { + if (this.#changeID >= Number.MAX_SAFE_INTEGER) { + this.#changeID = 0; + } + return ++this.#changeID; + } + #updateClock(clocks, newClock) { + if (newClock == null) { + return; + } + const [absoluteWatchRoot, clockSpec] = newClock; + const relativeFsRoot = this.#pathUtils.absoluteToNormal(absoluteWatchRoot); + clocks.set((0, normalizePathSeparatorsToPosix_1.default)(relativeFsRoot), clockSpec); + } + static H = constants_1.default; +} +exports.default = FileMap; +// TODO: Replace with it.map() from Node 22+ +function mapIterable(it, fn) { + return (function* mapped() { + for (const item of it) { + yield fn(item); + } + })(); +} diff --git a/packages/@expo/metro-file-map/build/lib/FileProcessor.d.ts b/packages/@expo/metro-file-map/build/lib/FileProcessor.d.ts new file mode 100644 index 00000000000000..29a970f11a7a8b --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/FileProcessor.d.ts @@ -0,0 +1,42 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { FileMapPluginWorker, FileMetadata, PerfLogger } from '../types'; +interface ProcessFileRequest { + /** + * Populate metadata[H.SHA1] with the SHA1 of the file's contents. + */ + readonly computeSha1: boolean; + /** + * Only if processing has already required reading the file's contents, return + * the contents as a Buffer - null otherwise. Not supported for batches. + */ + readonly maybeReturnContent: boolean; +} +interface MaybeCodedError extends Error { + code?: string; +} +export declare class FileProcessor { + #private; + constructor(opts: Readonly<{ + maxFilesPerWorker?: number | null; + maxWorkers: number; + pluginWorkers?: readonly FileMapPluginWorker[] | null; + perfLogger?: PerfLogger | null; + rootDir: string; + }>); + processBatch(files: readonly [relativePath: string, FileMetadata][], req: ProcessFileRequest): Promise<{ + errors: { + normalFilePath: string; + error: MaybeCodedError; + }[]; + }>; + processRegularFile(normalPath: string, fileMetadata: FileMetadata, req: ProcessFileRequest): { + content: Buffer | undefined | null; + } | null; + end(): Promise; +} +export {}; diff --git a/packages/@expo/metro-file-map/build/lib/FileProcessor.js b/packages/@expo/metro-file-map/build/lib/FileProcessor.js new file mode 100644 index 00000000000000..b4b23c7d0a91b9 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/FileProcessor.js @@ -0,0 +1,180 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FileProcessor = void 0; +const jest_worker_1 = require("jest-worker"); +const path_1 = require("path"); +const constants_1 = __importDefault(require("../constants")); +const worker_1 = require("../worker"); +const RootPathUtils_1 = require("./RootPathUtils"); +const debug = require('debug')('Metro:FileMap'); +const NODE_MODULES_SEP = 'node_modules' + path_1.sep; +const MAX_FILES_PER_WORKER = 100; +class FileProcessor { + #maxFilesPerWorker; + #maxWorkers; + #perfLogger; + #pluginWorkers; + #inBandWorker; + #rootPathUtils; + constructor(opts) { + this.#maxFilesPerWorker = opts.maxFilesPerWorker ?? MAX_FILES_PER_WORKER; + this.#maxWorkers = opts.maxWorkers; + this.#pluginWorkers = opts.pluginWorkers ?? []; + this.#inBandWorker = new worker_1.Worker({ + plugins: this.#pluginWorkers.map((plugin) => plugin.worker), + }); + this.#perfLogger = opts.perfLogger; + this.#rootPathUtils = new RootPathUtils_1.RootPathUtils(opts.rootDir); + } + async processBatch(files, req) { + const errors = []; + const workerJobs = files + .map(([normalFilePath, fileMetadata]) => { + const maybeWorkerInput = this.#getWorkerInput(normalFilePath, fileMetadata, req); + if (!maybeWorkerInput) { + return null; + } + return [maybeWorkerInput, fileMetadata]; + }) + .filter((x) => x != null); + const numWorkers = Math.min(this.#maxWorkers, Math.ceil(workerJobs.length / this.#maxFilesPerWorker)); + const batchWorker = this.#getBatchWorker(numWorkers); + if (req.maybeReturnContent) { + throw new Error('Batch processing does not support returning file contents'); + } + await Promise.all(workerJobs.map(([workerInput, fileMetadata]) => { + return batchWorker + .processFile(workerInput) + .then((reply) => processWorkerReply(reply, workerInput.pluginsToRun, fileMetadata)) + .catch((error) => errors.push({ + normalFilePath: this.#rootPathUtils.absoluteToNormal(workerInput.filePath), + error: normalizeWorkerError(error), + })); + })); + await batchWorker.end(); + return { errors }; + } + processRegularFile(normalPath, fileMetadata, req) { + const workerInput = this.#getWorkerInput(normalPath, fileMetadata, req); + return workerInput + ? { + content: processWorkerReply(this.#inBandWorker.processFile(workerInput), workerInput.pluginsToRun, fileMetadata), + } + : null; + } + #getWorkerInput(normalPath, fileMetadata, req) { + if (fileMetadata[constants_1.default.SYMLINK] !== 0) { + // Only process regular files + return null; + } + const computeSha1 = req.computeSha1 && fileMetadata[constants_1.default.SHA1] == null; + const { maybeReturnContent } = req; + const nodeModulesIdx = normalPath.indexOf(NODE_MODULES_SEP); + // Path may begin 'node_modules/' or contain '/node_modules/'. + const isNodeModules = nodeModulesIdx === 0 || (nodeModulesIdx > 0 && normalPath[nodeModulesIdx - 1] === path_1.sep); + // Indices of plugins with a passing filter + const pluginsToRun = this.#pluginWorkers?.reduce((prev, plugin, idx) => { + if (plugin.filter({ isNodeModules, normalPath })) { + prev.push(idx); + } + return prev; + }, []) ?? []; + if (!computeSha1 && pluginsToRun.length === 0) { + // Nothing to process + return null; + } + // Use a cheaper worker configuration for node_modules files, because + // they may never be Haste modules or packages. + // + // Note that we'd only expect node_modules files to reach this point if + // retainAllFiles is true, or they're touched during watch mode. + if (isNodeModules) { + if (computeSha1) { + return { + computeSha1: true, + filePath: this.#rootPathUtils.normalToAbsolute(normalPath), + maybeReturnContent, + pluginsToRun, + }; + } + return null; + } + return { + computeSha1, + filePath: this.#rootPathUtils.normalToAbsolute(normalPath), + maybeReturnContent, + pluginsToRun, + }; + } + /** + * Creates workers or parses files and extracts metadata in-process. + */ + #getBatchWorker(numWorkers) { + if (numWorkers <= 1) { + // In-band worker with the same interface as a Jest worker farm + return { + processFile: async (message) => this.#inBandWorker.processFile(message), + end: async () => { }, + }; + } + const workerPath = require.resolve('../worker'); + debug('Creating worker farm of %d worker threads', numWorkers); + this.#perfLogger?.point('initWorkers_start'); + const jestWorker = new jest_worker_1.Worker(workerPath, { + exposedMethods: ['processFile'], + maxRetries: 3, + numWorkers, + enableWorkerThreads: true, + forkOptions: { + // Don't pass Node arguments down to workers. In particular, avoid + // unnecessarily registering Babel when we're running Metro from + // source (our worker is plain CommonJS). + execArgv: [], + }, + setupArgs: [ + { + plugins: this.#pluginWorkers.map((plugin) => plugin.worker), + }, + ], + }); + this.#perfLogger?.point('initWorkers_end'); + // Only log worker init once + this.#perfLogger = null; + return jestWorker; + } + async end() { } +} +exports.FileProcessor = FileProcessor; +function processWorkerReply(metadata, pluginsRun, fileMetadata) { + fileMetadata[constants_1.default.VISITED] = 1; + const pluginData = metadata.pluginData; + if (pluginData) { + for (const [i, pluginIdx] of pluginsRun.entries()) { + fileMetadata[constants_1.default.PLUGINDATA + pluginIdx] = pluginData[i]; + } + } + if (metadata.sha1 != null) { + fileMetadata[constants_1.default.SHA1] = metadata.sha1; + } + return metadata.content; +} +function normalizeWorkerError(mixedError) { + if (mixedError == null || + typeof mixedError !== 'object' || + mixedError.message == null || + mixedError.stack == null) { + const error = new Error(mixedError); + error.stack = ''; // Remove stack for stack-less errors. + return error; + } + return mixedError; +} diff --git a/packages/@expo/metro-file-map/build/lib/FileSystemChangeAggregator.d.ts b/packages/@expo/metro-file-map/build/lib/FileSystemChangeAggregator.d.ts new file mode 100644 index 00000000000000..e5bc9e752e37b4 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/FileSystemChangeAggregator.d.ts @@ -0,0 +1,18 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { CanonicalPath, FileMetadata, FileSystemListener, ReadonlyFileSystemChanges } from '../types'; +export declare class FileSystemChangeAggregator implements FileSystemListener { + #private; + directoryAdded(canonicalPath: CanonicalPath): void; + directoryRemoved(canonicalPath: CanonicalPath): void; + fileAdded(canonicalPath: CanonicalPath, data: FileMetadata): void; + fileModified(canonicalPath: CanonicalPath, oldData: FileMetadata, newData: FileMetadata): void; + fileRemoved(canonicalPath: CanonicalPath, data: FileMetadata): void; + getSize(): number; + getView(): ReadonlyFileSystemChanges; + getMappedView(metadataMapFn: (metadata: FileMetadata) => T): ReadonlyFileSystemChanges; +} diff --git a/packages/@expo/metro-file-map/build/lib/FileSystemChangeAggregator.js b/packages/@expo/metro-file-map/build/lib/FileSystemChangeAggregator.js new file mode 100644 index 00000000000000..c22883e3b0c31a --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/FileSystemChangeAggregator.js @@ -0,0 +1,114 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FileSystemChangeAggregator = void 0; +class FileSystemChangeAggregator { + // Mutually exclusive with removedDirectories + #addedDirectories = new Set(); + // Mutually exclusive with addedDirectories + #removedDirectories = new Set(); + // Mutually exclusive with modified and removed files + #addedFiles = new Map(); + // Mutually exclusive with added and removed files + #modifiedFiles = new Map(); + // Mutually exclusive with added and modified files + #removedFiles = new Map(); + // Removed files must be paired with the file's metadata the last time it was + // observable by consumers - ie, immediately *before* this batch. To report + // this accurately with minimal overhead, we'll note the current metadata of + // a file the first time it is modified or removed within a batch. If it is + // re-added, modified and removed again, we still have the initial metadata. + // This is particularly important if, say, a regular file is replaced by a + // symlink, or vice-versa. + #initialMetadata = new Map(); + directoryAdded(canonicalPath) { + // Only add to newDirectories if this directory wasn't previously removed + // (i.e., it's truly new). If it was removed and re-added, the net effect + // is no directory change. + if (!this.#removedDirectories.delete(canonicalPath)) { + this.#addedDirectories.add(canonicalPath); + } + } + directoryRemoved(canonicalPath) { + if (!this.#addedDirectories.delete(canonicalPath)) { + this.#removedDirectories.add(canonicalPath); + } + } + fileAdded(canonicalPath, data) { + if (this.#removedFiles.delete(canonicalPath)) { + // File was removed then re-added in the same batch - treat as modification + this.#modifiedFiles.set(canonicalPath, data); + } + else { + // New file + this.#addedFiles.set(canonicalPath, data); + } + } + fileModified(canonicalPath, oldData, newData) { + if (this.#addedFiles.has(canonicalPath)) { + // File did not exist before this batch. Further modification only + // updates metadata + this.#addedFiles.set(canonicalPath, newData); + } + else { + if (!this.#initialMetadata.has(canonicalPath)) { + this.#initialMetadata.set(canonicalPath, oldData); + } + this.#modifiedFiles.set(canonicalPath, newData); + } + } + fileRemoved(canonicalPath, data) { + // Check if this file was added in the same batch + if (!this.#addedFiles.delete(canonicalPath)) { + let initialData = this.#initialMetadata.get(canonicalPath); + if (!initialData) { + initialData = data; + this.#initialMetadata.set(canonicalPath, initialData); + } + // File was not added in this batch, so add to removed with last metadata + this.#modifiedFiles.delete(canonicalPath); + this.#removedFiles.set(canonicalPath, initialData); + } + // else: File was added then removed in the same batch - no net change + } + getSize() { + return (this.#addedDirectories.size + + this.#removedDirectories.size + + this.#addedFiles.size + + this.#modifiedFiles.size + + this.#removedFiles.size); + } + getView() { + return { + addedDirectories: this.#addedDirectories, + removedDirectories: this.#removedDirectories, + addedFiles: this.#addedFiles, + modifiedFiles: this.#modifiedFiles, + removedFiles: this.#removedFiles, + }; + } + getMappedView(metadataMapFn) { + return { + addedDirectories: this.#addedDirectories, + removedDirectories: this.#removedDirectories, + addedFiles: mapIterable(this.#addedFiles, metadataMapFn), + modifiedFiles: mapIterable(this.#modifiedFiles, metadataMapFn), + removedFiles: mapIterable(this.#removedFiles, metadataMapFn), + }; + } +} +exports.FileSystemChangeAggregator = FileSystemChangeAggregator; +function mapIterable(map, metadataMapFn) { + return { + *[Symbol.iterator]() { + for (const [path, metadata] of map) { + yield [path, metadataMapFn(metadata)]; + } + }, + }; +} diff --git a/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts b/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts new file mode 100644 index 00000000000000..946f6475090fbb --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts @@ -0,0 +1,21 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +export declare class RootPathUtils { + #private; + constructor(rootDir: string); + getBasenameOfNthAncestor(n: number): string; + getParts(): readonly string[]; + absoluteToNormal(absolutePath: string): string; + normalToAbsolute(normalPath: string): string; + relativeToNormal(relativePath: string): string; + getAncestorOfRootIdx(normalPath: string): number | null; + joinNormalToRelative(normalPath: string, relativePath: string): { + normalPath: string; + collapsedSegments: number; + }; + relative(from: string, to: string): string; +} diff --git a/packages/@expo/metro-file-map/build/lib/RootPathUtils.js b/packages/@expo/metro-file-map/build/lib/RootPathUtils.js new file mode 100644 index 00000000000000..dd96b48e80caf3 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/RootPathUtils.js @@ -0,0 +1,259 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RootPathUtils = void 0; +const invariant_1 = __importDefault(require("invariant")); +const path_1 = __importDefault(require("path")); +/** + * This module provides path utility functions - similar to `node:path` - + * optimised for Metro's use case (many paths, few roots) under assumptions + * typically safe to make within Metro - namely: + * + * - All input path separators must be system-native. + * - Double/redundant separators like '/foo//bar' are not supported. + * - All characters except separators are assumed to be valid in path segments. + * + * - A "well-formed" path is any path following the rules above. + * - A "normal" path is a root-relative well-formed path with no redundant + * indirections. Normal paths have no leading './`, and the normal path of + * the root is the empty string. + * + * Output and input paths are at least well-formed (normal where indicated by + * naming). + * + * Trailing path separators are preserved, except for fs roots in + * normalToAbsolute (fs roots always have a trailing separator), and the + * project root in absoluteToNormal and relativeToNormal (the project root is + * always the empty string, and is always a directory, so a trailing separator + * is redundant). + * + * As of Node 20, absoluteToNormal is ~8x faster than `path.relative` and + * `normalToAbsolute` is ~20x faster than `path.resolve`, benchmarked on the + * real inputs from building FB's product graph. Some well-formed inputs + * (e.g., /project/./foo/../bar), are handled but not optimised, and we fall + * back to `node:path` equivalents in those cases. + */ +const UP_FRAGMENT_SEP = '..' + path_1.default.sep; +const SEP_UP_FRAGMENT = path_1.default.sep + '..'; +const UP_FRAGMENT_SEP_LENGTH = UP_FRAGMENT_SEP.length; +const CURRENT_FRAGMENT = '.' + path_1.default.sep; +class RootPathUtils { + #rootDir; + #rootDirnames; + #rootParts; + #rootDepth; + constructor(rootDir) { + this.#rootDir = rootDir; + const rootDirnames = []; + for (let next = rootDir, previous = null; previous !== next; previous = next, next = path_1.default.dirname(next)) { + rootDirnames.push(next); + } + this.#rootDirnames = rootDirnames; + this.#rootDepth = rootDirnames.length - 1; + const rootParts = rootDir.split(path_1.default.sep); + // If rootDir is a filesystem root (C:\ or /), it will end in a separator and + // give a spurious empty entry at the end of rootParts. + if (this.#rootDepth === 0) { + rootParts.pop(); + } + this.#rootParts = rootParts; + } + getBasenameOfNthAncestor(n) { + return this.#rootParts[this.#rootParts.length - 1 - n]; + } + getParts() { + return this.#rootParts; + } + // absolutePath may be any well-formed absolute path. + absoluteToNormal(absolutePath) { + let endOfMatchingPrefix = 0; + let lastMatchingPartIdx = 0; + for (let nextPart = this.#rootParts[0], nextLength = nextPart.length; nextPart != null && + // Check that absolutePath is equal to nextPart + '/' or ends with + // nextPart, starting from endOfMatchingPrefix. + absolutePath.startsWith(nextPart, endOfMatchingPrefix) && + (absolutePath.length === endOfMatchingPrefix + nextLength || + absolutePath[endOfMatchingPrefix + nextLength] === path_1.default.sep);) { + // Move our matching pointer forward and load the next part. + endOfMatchingPrefix += nextLength + 1; + nextPart = this.#rootParts[++lastMatchingPartIdx]; + nextLength = nextPart?.length ?? 0; + } + // If our root is /project/root and we're given /project/bar/foo.js, we + // have matched up to '/project', and will need to return a path + // beginning '../' (one prepended indirection, to go up from 'root'). + // + // If we're given /project/../project2/otherroot, we have one level of + // indirection up to prepend in the same way as above. There's another + // explicit indirection already present in the input - we'll account for + // that in tryCollapseIndirectionsInSuffix. + const upIndirectionsToPrepend = this.#rootParts.length - lastMatchingPartIdx; + return (this.#tryCollapseIndirectionsInSuffix(absolutePath, endOfMatchingPrefix, upIndirectionsToPrepend)?.collapsedPath ?? this.#slowAbsoluteToNormal(absolutePath)); + } + #slowAbsoluteToNormal(absolutePath) { + const endsWithSep = absolutePath.endsWith(path_1.default.sep); + const result = path_1.default.relative(this.#rootDir, absolutePath); + return endsWithSep && !result.endsWith(path_1.default.sep) ? result + path_1.default.sep : result; + } + // `normalPath` is assumed to be normal (root-relative, no redundant + // indirection), per the definition above. + normalToAbsolute(normalPath) { + let left = this.#rootDir; + let i = 0; + let pos = 0; + while (normalPath.startsWith(UP_FRAGMENT_SEP, pos) || + (normalPath.endsWith('..') && normalPath.length === 2 + pos)) { + left = this.#rootDirnames[i === this.#rootDepth ? this.#rootDepth : ++i]; + pos += UP_FRAGMENT_SEP_LENGTH; + } + const right = pos === 0 ? normalPath : normalPath.slice(pos); + if (right.length === 0) { + return left; + } + // left may already end in a path separator only if it is a filesystem root, + // '/' or 'X:\'. + if (i === this.#rootDepth) { + return left + right; + } + return left + path_1.default.sep + right; + } + relativeToNormal(relativePath) { + return (this.#tryCollapseIndirectionsInSuffix(relativePath, 0, 0)?.collapsedPath ?? + path_1.default.relative(this.#rootDir, path_1.default.join(this.#rootDir, relativePath))); + } + // If a path is a direct ancestor of the project root (or the root itself), + // return a number with the degrees of separation, e.g. root=0, parent=1,.. + // or null otherwise. + getAncestorOfRootIdx(normalPath) { + if (normalPath === '') { + return 0; + } + if (normalPath === '..') { + return 1; + } + // Otherwise a *normal* path is only a root ancestor if it is a sequence of + // '../' segments followed by '..', so the length tells us the number of + // up fragments. + if (normalPath.endsWith(SEP_UP_FRAGMENT)) { + return (normalPath.length + 1) / 3; + } + return null; + } + // Takes a normal and relative path, and joins them efficiently into a normal + // path, including collapsing trailing '..' in the first part with leading + // project root segments in the relative part. + joinNormalToRelative(normalPath, relativePath) { + if (normalPath === '') { + return { collapsedSegments: 0, normalPath: relativePath }; + } + if (relativePath === '') { + return { collapsedSegments: 0, normalPath }; + } + const left = normalPath + path_1.default.sep; + const rawPath = left + relativePath; + if (normalPath === '..' || normalPath.endsWith(SEP_UP_FRAGMENT)) { + const collapsed = this.#tryCollapseIndirectionsInSuffix(rawPath, 0, 0); + (0, invariant_1.default)(collapsed != null, 'Failed to collapse'); + return { + collapsedSegments: collapsed.collapsedSegments, + normalPath: collapsed.collapsedPath, + }; + } + return { + collapsedSegments: 0, + normalPath: rawPath, + }; + } + relative(from, to) { + return path_1.default.relative(from, to); + } + // Internal: Tries to collapse sequences like `../root/foo` for root + // `/project/root` down to the normal 'foo'. + #tryCollapseIndirectionsInSuffix(fullPath, // A string ending with the relative path to process + startOfRelativePart, // Index of the start of part to process + implicitUpIndirections // 0=root-relative, 1=dirname(root)-relative... + ) { + let totalUpIndirections = implicitUpIndirections; + let collapsedSegments = 0; + // Allow any sequence of indirection fragments at the start of the + // unmatched suffix e.g /project/[../../foo], but bail out to Node's + // path.relative if we find a possible indirection after any later segment, + // or on any "./" that isn't a "../". + for (let pos = startOfRelativePart;; pos += UP_FRAGMENT_SEP_LENGTH) { + const nextIndirection = fullPath.indexOf(CURRENT_FRAGMENT, pos); + if (nextIndirection === -1) { + // If we have any indirections, they may "collapse" if a subsequent + // segment re-enters a directory we had previously exited, e.g: + // /project/root/../root/foo should collapse to /project/root/foo' and + // return foo, not ../root/foo. + // + // We match each segment following redirections, in turn, against the + // part of the root path they may collapse into, and break on the first + // mismatch. + while (totalUpIndirections > 0) { + const segmentToMaybeCollapse = this.#rootParts[this.#rootParts.length - totalUpIndirections]; + if (fullPath.startsWith(segmentToMaybeCollapse, pos) && + // The following character should be either a separator or end of + // string + (fullPath.length === segmentToMaybeCollapse.length + pos || + fullPath[segmentToMaybeCollapse.length + pos] === path_1.default.sep)) { + pos += segmentToMaybeCollapse.length + 1; + collapsedSegments++; + totalUpIndirections--; + } + else { + break; + } + } + // After collapsing we may have no more segments remaining (following + // '..' indirections). Ensure that we don't drop or add a trailing + // separator in this case by taking .slice(pos-1). In any other case, + // we know that fullPath[pos] is a separator. + if (pos >= fullPath.length) { + return { + collapsedPath: totalUpIndirections > 0 + ? UP_FRAGMENT_SEP.repeat(totalUpIndirections - 1) + '..' + fullPath.slice(pos - 1) + : '', + collapsedSegments, + }; + } + const right = pos > 0 ? fullPath.slice(pos) : fullPath; + if (right === '..' && totalUpIndirections >= this.#rootParts.length - 1) { + // If we have no right side (or an indirection that would take us + // below the root), just ensure we don't include a trailing separtor. + return { + collapsedPath: UP_FRAGMENT_SEP.repeat(totalUpIndirections).slice(0, -1), + collapsedSegments, + }; + } + // Optimisation for the common case, saves a concatenation. + if (totalUpIndirections === 0) { + return { collapsedPath: right, collapsedSegments }; + } + return { + collapsedPath: UP_FRAGMENT_SEP.repeat(totalUpIndirections) + right, + collapsedSegments, + }; + } + // Cap the number of indirections at the total number of root segments. + // File systems treat '..' at the root as '.'. + if (totalUpIndirections < this.#rootParts.length - 1) { + totalUpIndirections++; + } + if (nextIndirection !== pos + 1 || // Fallback when ./ later in the path, or leading + fullPath[pos] !== '.' // and for anything other than a leading ../ + ) { + return null; + } + } + } +} +exports.RootPathUtils = RootPathUtils; diff --git a/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts b/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts new file mode 100644 index 00000000000000..5929d40345072e --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts @@ -0,0 +1,154 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { CacheData, FileData, FileMetadata, FileStats, FileSystemListener, LookupResult, MutableFileSystem, Path, ProcessFileFunction } from '../types'; +type DirectoryNode = Map; +type FileNode = FileMetadata; +type MixedNode = FileNode | DirectoryNode; +interface DeserializedSnapshotInput { + rootDir: string; + fileSystemData: DirectoryNode; + processFile: ProcessFileFunction; +} +interface TreeFSOptions { + rootDir: Path; + files?: FileData; + processFile: ProcessFileFunction; +} +interface MatchFilesOptions { + readonly filter?: RegExp | null; + readonly filterCompareAbsolute?: boolean; + readonly filterComparePosix?: boolean; + readonly follow?: boolean; + readonly recursive?: boolean; + readonly rootDir?: Path | null; +} +interface MetadataIteratorOptions { + readonly includeSymlinks: boolean; + readonly includeNodeModules: boolean; +} +/** + * OVERVIEW: + * + * TreeFS is Metro's in-memory representation of the file system. It is + * structured as a tree of non-empty maps and leaves (tuples), with the root + * node representing the given `rootDir`, typically Metro's _project root_ + * (not a filesystem root). Map keys are path segments, and branches outside + * the project root are accessed via `'..'`. + * + * EXAMPLE: + * + * For a root dir '/data/project', the file '/data/other/app/index.js' would + * have metadata at #rootNode.get('..').get('other').get('app').get('index.js') + * + * SERIALISATION: + * + * #rootNode is designed to be directly serialisable and directly portable (for + * a given project) between different root directories and operating systems. + * + * SYMLINKS: + * + * Symlinks are represented as nodes whose metadata contains their literal + * target. Literal targets are resolved to normal paths at runtime, and cached. + * If a symlink is encountered during traversal, we restart traversal at the + * root node targeting join(normal symlink target, remaining path suffix). + * + * NODE TYPES: + * + * - A directory (including a parent directory at '..') is represented by a + * `Map` of basenames to any other node type. + * - A file is represented by an `Array` (tuple) of metadata, of which: + * - A regular file has node[H.SYMLINK] === 0 + * - A symlink has node[H.SYMLINK] === 1 or + * typeof node[H.SYMLINK] === 'string', where a string is the literal + * content of the symlink (i.e. from readlink), if known. + * + * TERMINOLOGY: + * + * - mixedPath + * A root-relative or absolute path + * - relativePath + * A root-relative path + * - normalPath + * A root-relative, normalised path (no extraneous '.' or '..'), may have a + * single trailing slash + * - canonicalPath + * A root-relative, normalised, real path (no symlinks in dirname), never has + * a trailing slash + */ +export default class TreeFS implements MutableFileSystem { + #private; + constructor(opts: TreeFSOptions); + getSerializableSnapshot(): CacheData['fileSystemData']; + static fromDeserializedSnapshot(args: DeserializedSnapshotInput): TreeFS; + getSize(mixedPath: Path): number | null; + getDifference(files: FileData, options?: { + /** + * Only consider files under this normal subdirectory when computing + * removedFiles. If not provided, all files in the file system are + * considered. + */ + readonly subpath?: string; + }): { + changedFiles: FileData; + removedFiles: Set; + }; + getSha1(mixedPath: Path): string | null; + getOrComputeSha1(mixedPath: Path): Promise<{ + sha1: string; + content?: Buffer; + } | null | undefined>; + exists(mixedPath: Path): boolean; + lookup(mixedPath: Path): LookupResult; + getAllFiles(): Path[]; + linkStats(mixedPath: Path): FileStats | null; + /** + * Given a search context, return a list of file paths matching the query. + * The query matches against normalized paths which start with `./`, + * for example: `a/b.js` -> `./a/b.js` + */ + matchFiles(opts: MatchFilesOptions): Generator; + addOrModify(mixedPath: Path, metadata: FileMetadata, changeListener?: FileSystemListener): void; + bulkAddOrModify(addedOrModifiedFiles: FileData, changeListener?: FileSystemListener): void; + remove(mixedPath: Path, changeListener?: FileSystemListener): void; + /** + * Given a start path (which need not exist), a subpath and type, and + * optionally a 'breakOnSegment', performs the following: + * + * X = mixedStartPath + * do + * if basename(X) === opts.breakOnSegment + * return null + * if X + subpath exists and has type opts.subpathType + * return { + * absolutePath: realpath(X + subpath) + * containerRelativePath: relative(mixedStartPath, X) + * } + * X = dirname(X) + * while X !== dirname(X) + * + * If opts.invalidatedBy is given, collects all absolute, real paths that if + * added or removed may invalidate this result. + * + * Useful for finding the closest package scope (subpath: package.json, + * type f, breakOnSegment: node_modules) or closest potential package root + * (subpath: node_modules/pkg, type: d) in Node.js resolution. + */ + hierarchicalLookup(mixedStartPath: string, subpath: string, opts: { + breakOnSegment: string | null | undefined; + invalidatedBy: Set | null | undefined; + subpathType: 'f' | 'd'; + }): { + absolutePath: string; + containerRelativePath: string; + } | null | undefined; + metadataIterator(opts: MetadataIteratorOptions): Generator<{ + baseName: string; + canonicalPath: string; + metadata: FileMetadata; + }>; +} +export {}; diff --git a/packages/@expo/metro-file-map/build/lib/TreeFS.js b/packages/@expo/metro-file-map/build/lib/TreeFS.js new file mode 100644 index 00000000000000..7605a51ce50df3 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/TreeFS.js @@ -0,0 +1,838 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const invariant_1 = __importDefault(require("invariant")); +const path_1 = __importDefault(require("path")); +const constants_1 = __importDefault(require("../constants")); +const RootPathUtils_1 = require("./RootPathUtils"); +function isDirectory(node) { + return node instanceof Map; +} +function isRegularFile(node) { + return node[constants_1.default.SYMLINK] === 0; +} +/** + * OVERVIEW: + * + * TreeFS is Metro's in-memory representation of the file system. It is + * structured as a tree of non-empty maps and leaves (tuples), with the root + * node representing the given `rootDir`, typically Metro's _project root_ + * (not a filesystem root). Map keys are path segments, and branches outside + * the project root are accessed via `'..'`. + * + * EXAMPLE: + * + * For a root dir '/data/project', the file '/data/other/app/index.js' would + * have metadata at #rootNode.get('..').get('other').get('app').get('index.js') + * + * SERIALISATION: + * + * #rootNode is designed to be directly serialisable and directly portable (for + * a given project) between different root directories and operating systems. + * + * SYMLINKS: + * + * Symlinks are represented as nodes whose metadata contains their literal + * target. Literal targets are resolved to normal paths at runtime, and cached. + * If a symlink is encountered during traversal, we restart traversal at the + * root node targeting join(normal symlink target, remaining path suffix). + * + * NODE TYPES: + * + * - A directory (including a parent directory at '..') is represented by a + * `Map` of basenames to any other node type. + * - A file is represented by an `Array` (tuple) of metadata, of which: + * - A regular file has node[H.SYMLINK] === 0 + * - A symlink has node[H.SYMLINK] === 1 or + * typeof node[H.SYMLINK] === 'string', where a string is the literal + * content of the symlink (i.e. from readlink), if known. + * + * TERMINOLOGY: + * + * - mixedPath + * A root-relative or absolute path + * - relativePath + * A root-relative path + * - normalPath + * A root-relative, normalised path (no extraneous '.' or '..'), may have a + * single trailing slash + * - canonicalPath + * A root-relative, normalised, real path (no symlinks in dirname), never has + * a trailing slash + */ +class TreeFS { + #cachedNormalSymlinkTargets = new WeakMap(); + #pathUtils; + #processFile; + #rootDir; + #rootNode = new Map(); + constructor(opts) { + const { rootDir, files, processFile } = opts; + this.#rootDir = rootDir; + this.#pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); + this.#processFile = processFile; + if (files != null) { + this.bulkAddOrModify(files); + } + } + getSerializableSnapshot() { + return this.#cloneTree(this.#rootNode); + } + static fromDeserializedSnapshot(args) { + const { rootDir, fileSystemData, processFile } = args; + const tfs = new TreeFS({ processFile, rootDir }); + tfs.#rootNode = fileSystemData; + return tfs; + } + getSize(mixedPath) { + const fileMetadata = this.#getFileData(mixedPath); + return (fileMetadata && fileMetadata[constants_1.default.SIZE]) ?? null; + } + getDifference(files, options) { + const changedFiles = new Map(files); + const removedFiles = new Set(); + const subpath = options?.subpath; + // If a subpath is specified, start iteration from that node + let rootNode = this.#rootNode; + let prefix = ''; + if (subpath != null && subpath !== '') { + const lookupResult = this.#lookupByNormalPath(subpath, { + followLeaf: true, + }); + if (!lookupResult.exists || !isDirectory(lookupResult.node)) { + // Directory doesn't exist, nothing to compare - all files are new + return { changedFiles, removedFiles }; + } + rootNode = lookupResult.node; + prefix = lookupResult.canonicalPath; + } + for (const { canonicalPath, metadata } of this.#metadataIterator(rootNode, { + includeNodeModules: true, + includeSymlinks: true, + }, prefix)) { + const newMetadata = files.get(canonicalPath); + if (newMetadata) { + if (isRegularFile(newMetadata) !== isRegularFile(metadata)) { + // Types differ, file has changed + continue; + } + if (newMetadata[constants_1.default.MTIME] != null && + // TODO: Remove when mtime is null if not populated + newMetadata[constants_1.default.MTIME] !== 0 && + newMetadata[constants_1.default.MTIME] === metadata[constants_1.default.MTIME]) { + // Types and modified time match - not changed. + changedFiles.delete(canonicalPath); + } + else if (newMetadata[constants_1.default.SHA1] != null && + newMetadata[constants_1.default.SHA1] === metadata[constants_1.default.SHA1] && + metadata[constants_1.default.VISITED] === 1) { + // Content matches - update modified time but don't revisit + const updatedMetadata = [...metadata]; + updatedMetadata[constants_1.default.MTIME] = newMetadata[constants_1.default.MTIME]; + changedFiles.set(canonicalPath, updatedMetadata); + } + } + else { + removedFiles.add(canonicalPath); + } + } + return { + changedFiles, + removedFiles, + }; + } + getSha1(mixedPath) { + const fileMetadata = this.#getFileData(mixedPath); + return (fileMetadata && fileMetadata[constants_1.default.SHA1]) ?? null; + } + async getOrComputeSha1(mixedPath) { + const normalPath = this.#normalizePath(mixedPath); + const result = this.#lookupByNormalPath(normalPath, { + followLeaf: true, + }); + if (!result.exists || isDirectory(result.node)) { + return null; + } + const { canonicalPath, node: fileMetadata } = result; + // Empty strings + const existing = fileMetadata[constants_1.default.SHA1]; + if (existing != null && existing.length > 0) { + return { sha1: existing }; + } + // Mutate the metadata we first retrieved. This may be orphaned or about + // to be overwritten if the file changes while we are processing it - + // by only mutating the original metadata, we don't risk caching a stale + // SHA-1 after a change event. + const maybeContent = await this.#processFile(canonicalPath, fileMetadata, { + computeSha1: true, + }); + const sha1 = fileMetadata[constants_1.default.SHA1]; + (0, invariant_1.default)(sha1 != null && sha1.length > 0, "File processing didn't populate a SHA-1 hash for %s", canonicalPath); + return maybeContent + ? { + content: maybeContent, + sha1, + } + : { sha1 }; + } + exists(mixedPath) { + const result = this.#getFileData(mixedPath); + return result != null; + } + lookup(mixedPath) { + const normalPath = this.#normalizePath(mixedPath); + const links = new Set(); + const result = this.#lookupByNormalPath(normalPath, { + collectLinkPaths: links, + followLeaf: true, + }); + if (!result.exists) { + const { canonicalMissingPath } = result; + return { + exists: false, + links, + missing: this.#pathUtils.normalToAbsolute(canonicalMissingPath), + }; + } + const { canonicalPath, node } = result; + const realPath = this.#pathUtils.normalToAbsolute(canonicalPath); + if (isDirectory(node)) { + return { exists: true, links, realPath, type: 'd' }; + } + (0, invariant_1.default)(isRegularFile(node), 'lookup follows symlinks, so should never return one (%s -> %s)', mixedPath, canonicalPath); + return { exists: true, links, realPath, type: 'f', metadata: node }; + } + getAllFiles() { + return Array.from(this.metadataIterator({ + includeNodeModules: true, + includeSymlinks: false, + }), ({ canonicalPath }) => this.#pathUtils.normalToAbsolute(canonicalPath)); + } + linkStats(mixedPath) { + const fileMetadata = this.#getFileData(mixedPath, { followLeaf: false }); + if (fileMetadata == null) { + return null; + } + const fileType = isRegularFile(fileMetadata) ? 'f' : 'l'; + return { + fileType, + modifiedTime: fileMetadata[constants_1.default.MTIME], + size: fileMetadata[constants_1.default.SIZE], + }; + } + /** + * Given a search context, return a list of file paths matching the query. + * The query matches against normalized paths which start with `./`, + * for example: `a/b.js` -> `./a/b.js` + */ + *matchFiles(opts) { + const { filter = null, filterCompareAbsolute = false, filterComparePosix = false, follow = false, recursive = true, rootDir = null, } = opts; + const normalRoot = rootDir == null ? '' : this.#normalizePath(rootDir); + const contextRootResult = this.#lookupByNormalPath(normalRoot); + if (!contextRootResult.exists) { + return; + } + const { ancestorOfRootIdx, canonicalPath: rootRealPath, node: contextRoot, parentNode: contextRootParent, } = contextRootResult; + if (!isDirectory(contextRoot)) { + return; + } + const contextRootAbsolutePath = rootRealPath === '' ? this.#rootDir : path_1.default.join(this.#rootDir, rootRealPath); + const prefix = filterComparePosix ? './' : '.' + path_1.default.sep; + const contextRootAbsolutePathForComparison = filterComparePosix && path_1.default.sep !== '/' + ? contextRootAbsolutePath.replaceAll(path_1.default.sep, '/') + : contextRootAbsolutePath; + for (const relativePathForComparison of this.#pathIterator(contextRoot, contextRootParent, ancestorOfRootIdx, { + alwaysYieldPosix: filterComparePosix, + canonicalPathOfRoot: rootRealPath, + follow, + recursive, + subtreeOnly: rootDir != null, + })) { + if (filter == null || + filter.test( + // NOTE(EvanBacon): Ensure files start with `./` for matching purposes + // this ensures packages work across Metro and Webpack (ex: Storybook for React DOM / React Native). + // `a/b.js` -> `./a/b.js` + filterCompareAbsolute === true + ? path_1.default.join(contextRootAbsolutePathForComparison, relativePathForComparison) + : prefix + relativePathForComparison)) { + const relativePath = filterComparePosix === true && path_1.default.sep !== '/' + ? relativePathForComparison.replaceAll('/', path_1.default.sep) + : relativePathForComparison; + yield path_1.default.join(contextRootAbsolutePath, relativePath); + } + } + } + addOrModify(mixedPath, metadata, changeListener) { + const normalPath = this.#normalizePath(mixedPath); + // Walk the tree to find the *real* path of the parent node, creating + // directories as we need. + const parentDirNode = this.#lookupByNormalPath(path_1.default.dirname(normalPath), { + changeListener, + makeDirectories: true, + }); + if (!parentDirNode.exists) { + throw new Error(`TreeFS: Failed to make parent directory entry for ${mixedPath}`); + } + // Normalize the resulting path to account for the parent node being root. + const canonicalPath = this.#normalizePath(parentDirNode.canonicalPath + path_1.default.sep + path_1.default.basename(normalPath)); + this.bulkAddOrModify(new Map([[canonicalPath, metadata]]), changeListener); + } + bulkAddOrModify(addedOrModifiedFiles, changeListener) { + // Optimisation: Bulk FileData are typically clustered by directory, so we + // optimise for that case by remembering the last directory we looked up. + // Experiments with large result sets show this to be significantly (~30%) + // faster than caching all lookups in a Map, and 70% faster than no cache. + let lastDir; + let directoryNode; + for (const [normalPath, metadata] of addedOrModifiedFiles) { + const lastSepIdx = normalPath.lastIndexOf(path_1.default.sep); + const dirname = lastSepIdx === -1 ? '' : normalPath.slice(0, lastSepIdx); + const basename = lastSepIdx === -1 ? normalPath : normalPath.slice(lastSepIdx + 1); + if (directoryNode == null || dirname !== lastDir) { + const lookup = this.#lookupByNormalPath(dirname, { + changeListener, + followLeaf: false, + makeDirectories: true, + }); + if (!lookup.exists) { + // This should only be possible if the input is non-real and + // lookup hits a broken symlink. + throw new Error(`TreeFS: Unexpected error adding ${normalPath}.\nMissing: ` + + lookup.canonicalMissingPath); + } + if (!isDirectory(lookup.node)) { + throw new Error(`TreeFS: Could not add directory ${dirname}, adding ${normalPath}. ` + + `${dirname} already exists in the file map as a file.`); + } + lastDir = dirname; + directoryNode = lookup.node; + } + if (changeListener != null) { + const existingNode = directoryNode.get(basename); + if (existingNode != null) { + (0, invariant_1.default)(!isDirectory(existingNode), 'Detected addition or modification of file %s, but it is tracked as a non-empty directory', normalPath); + // File already exists - this is a modification + changeListener.fileModified(normalPath, existingNode, metadata); + } + else { + // New file + changeListener.fileAdded(normalPath, metadata); + } + } + directoryNode.set(basename, metadata); + } + } + remove(mixedPath, changeListener) { + const normalPath = this.#normalizePath(mixedPath); + const result = this.#lookupByNormalPath(normalPath, { followLeaf: false }); + if (!result.exists) { + return; + } + const { parentNode, canonicalPath, node } = result; + if (isDirectory(node) && node.size > 0) { + for (const basename of node.keys()) { + this.remove(canonicalPath + path_1.default.sep + basename, changeListener); + } + // Removing the last file will delete this directory + return; + } + if (parentNode != null) { + if (changeListener != null) { + if (isDirectory(node)) { + changeListener.directoryRemoved(canonicalPath); + } + else { + changeListener.fileRemoved(canonicalPath, node); + } + } + parentNode.delete(path_1.default.basename(canonicalPath)); + if (parentNode.size === 0 && parentNode !== this.#rootNode) { + // NB: This isn't the most efficient algorithm - in the case of + // removing the last file in a deep hierarchy it's O(depth^2), but + // that's not expected to be a case common enough to justify + // implementation complexity, or slowing down more common uses of + // _lookupByNormalPath. + this.remove(path_1.default.dirname(canonicalPath), changeListener); + } + } + } + /** + * The core traversal algorithm of TreeFS - takes a normal path and traverses + * through a tree of maps keyed on path segments, returning the node, + * canonical path, and other metadata if successful, or the first missing + * segment otherwise. + * + * When a symlink is encountered, we set a new target of the symlink's + * normalised target path plus the remainder of the original target path. In + * this way, the eventual target path in a successful lookup has all symlinks + * resolved, and gives us the real path "for free". Similarly if a traversal + * fails, we automatically have the real path of the first non-existent node. + * + * Note that this code is extremely hot during resolution, being the most + * expensive part of a file existence check. Benchmark any modifications! + */ + #lookupByNormalPath(requestedNormalPath, opts = { followLeaf: true, makeDirectories: false }) { + // We'll update the target if we hit a symlink. + let targetNormalPath = requestedNormalPath; + // Lazy-initialised set of seen target paths, to detect symlink cycles. + let seen; + // Pointer to the first character of the current path segment in + // targetNormalPath. + let fromIdx = opts.start?.pathIdx ?? 0; + // The parent of the current segment. + let parentNode = opts.start?.node ?? this.#rootNode; + // If a returned node is (an ancestor of) the root, this is the number of + // levels below the root, i.e. '' is 0, '..' is 1, '../..' is 2, otherwise + // null. + let ancestorOfRootIdx = opts.start?.ancestorOfRootIdx ?? 0; + const { collectAncestors, changeListener } = opts; + // Used only when collecting ancestors, to avoid double-counting nodes and + // paths when traversing a symlink takes us back to rootNode and out again. + // This tracks the first character of the first segment not already + // collected. + let unseenPathFromIdx = 0; + while (targetNormalPath.length > fromIdx) { + const nextSepIdx = targetNormalPath.indexOf(path_1.default.sep, fromIdx); + const isLastSegment = nextSepIdx === -1; + const segmentName = isLastSegment + ? targetNormalPath.slice(fromIdx) + : targetNormalPath.slice(fromIdx, nextSepIdx); + const isUnseen = fromIdx >= unseenPathFromIdx; + fromIdx = !isLastSegment ? nextSepIdx + 1 : targetNormalPath.length; + if (segmentName === '.') { + continue; + } + let segmentNode = parentNode.get(segmentName); + // In normal paths all indirections are at the prefix, so we are at the + // nth ancestor of the root iff the path so far is n '..' segments. + if (segmentName === '..' && ancestorOfRootIdx != null) { + ancestorOfRootIdx++; + } + else if (segmentNode != null) { + ancestorOfRootIdx = null; + } + if (segmentNode == null) { + if (opts.makeDirectories !== true && segmentName !== '..') { + return { + canonicalMissingPath: isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1), + exists: false, + missingSegmentName: segmentName, + }; + } + segmentNode = new Map(); + if (opts.makeDirectories === true) { + if (changeListener != null) { + const canonicalPath = isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1); + changeListener.directoryAdded(canonicalPath); + } + parentNode.set(segmentName, segmentNode); + } + } + // We are done if... + if ( + // ...at a directory node and the only subsequent character is `/`, or + (nextSepIdx === targetNormalPath.length - 1 && isDirectory(segmentNode)) || + // there are no subsequent `/`, and this node is anything but a symlink + // we're required to resolve due to followLeaf. + (isLastSegment && + (isDirectory(segmentNode) || isRegularFile(segmentNode) || opts.followLeaf === false))) { + return { + ancestorOfRootIdx, + canonicalPath: isLastSegment ? targetNormalPath : targetNormalPath.slice(0, -1), // remove trailing `/` + exists: true, + node: segmentNode, + parentNode, + }; + } + // If the next node is a directory, go into it + if (isDirectory(segmentNode)) { + parentNode = segmentNode; + if (collectAncestors && isUnseen) { + const currentPath = isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1); + collectAncestors.push({ + ancestorOfRootIdx, + node: segmentNode, + normalPath: currentPath, + segmentName, + }); + } + } + else { + const currentPath = isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1); + if (isRegularFile(segmentNode)) { + // Regular file in a directory path + return { + canonicalMissingPath: currentPath, + exists: false, + missingSegmentName: segmentName, + }; + } + // Symlink in a directory path + const normalSymlinkTarget = this.#resolveSymlinkTargetToNormalPath(segmentNode, currentPath); + if (opts.collectLinkPaths) { + opts.collectLinkPaths.add(this.#pathUtils.normalToAbsolute(currentPath)); + } + const remainingTargetPath = isLastSegment ? '' : targetNormalPath.slice(fromIdx); + // Append any subsequent path segments to the symlink target, and reset + // with our new target. + const joinedResult = this.#pathUtils.joinNormalToRelative(normalSymlinkTarget.normalPath, remainingTargetPath); + targetNormalPath = joinedResult.normalPath; + // Two special cases (covered by unit tests): + // + // If the symlink target is the root, the root should be a counted as + // an ancestor. We'd otherwise miss counting it because we normally + // push new ancestors only when entering a directory. + // + // If the symlink target is an ancestor of the root *and* joining it + // with the remaining path results in collapsing segments, e.g: + // '../..' + 'parentofroot/root/foo.js' = 'foo.js', then we must add + // parentofroot and root as ancestors. + if (collectAncestors && + !isLastSegment && + // No-op optimisation to bail out the common case of nothing to do. + (normalSymlinkTarget.ancestorOfRootIdx === 0 || joinedResult.collapsedSegments > 0)) { + let node = this.#rootNode; + let collapsedPath = ''; + const reverseAncestors = []; + for (let i = 0; i <= joinedResult.collapsedSegments && isDirectory(node); i++) { + if ( + // Add the root only if the target is the root or we have + // collapsed segments. + i > 0 || + normalSymlinkTarget.ancestorOfRootIdx === 0 || + joinedResult.collapsedSegments > 0) { + reverseAncestors.push({ + ancestorOfRootIdx: i, + node, + normalPath: collapsedPath, + segmentName: this.#pathUtils.getBasenameOfNthAncestor(i), + }); + } + node = node.get('..') ?? new Map(); + collapsedPath = collapsedPath === '' ? '..' : collapsedPath + path_1.default.sep + '..'; + } + collectAncestors.push(...reverseAncestors.reverse()); + } + // For the purpose of collecting ancestors: Ignore the traversal to + // the symlink target, and start collecting ancestors only + // from the target itself (ie, the basename of the normal target path) + // onwards. + unseenPathFromIdx = normalSymlinkTarget.startOfBasenameIdx; + if (seen == null) { + // Optimisation: set this lazily only when we've encountered a symlink + seen = new Set([requestedNormalPath]); + } + if (seen.has(targetNormalPath)) { + // TODO: Warn `Symlink cycle detected: ${[...seen, node].join(' -> ')}` + return { + canonicalMissingPath: targetNormalPath, + exists: false, + missingSegmentName: segmentName, + }; + } + seen.add(targetNormalPath); + fromIdx = 0; + parentNode = this.#rootNode; + ancestorOfRootIdx = 0; + } + } + (0, invariant_1.default)(parentNode === this.#rootNode, 'Unexpectedly escaped traversal'); + return { + ancestorOfRootIdx: 0, + canonicalPath: targetNormalPath, + exists: true, + node: this.#rootNode, + parentNode: null, + }; + } + /** + * Given a start path (which need not exist), a subpath and type, and + * optionally a 'breakOnSegment', performs the following: + * + * X = mixedStartPath + * do + * if basename(X) === opts.breakOnSegment + * return null + * if X + subpath exists and has type opts.subpathType + * return { + * absolutePath: realpath(X + subpath) + * containerRelativePath: relative(mixedStartPath, X) + * } + * X = dirname(X) + * while X !== dirname(X) + * + * If opts.invalidatedBy is given, collects all absolute, real paths that if + * added or removed may invalidate this result. + * + * Useful for finding the closest package scope (subpath: package.json, + * type f, breakOnSegment: node_modules) or closest potential package root + * (subpath: node_modules/pkg, type: d) in Node.js resolution. + */ + hierarchicalLookup(mixedStartPath, subpath, opts) { + const ancestorsOfInput = []; + const normalPath = this.#normalizePath(mixedStartPath); + const invalidatedBy = opts.invalidatedBy; + const closestLookup = this.#lookupByNormalPath(normalPath, { + collectAncestors: ancestorsOfInput, + collectLinkPaths: invalidatedBy, + }); + if (closestLookup.exists && isDirectory(closestLookup.node)) { + const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath(closestLookup.canonicalPath, subpath, opts.subpathType, invalidatedBy, null); + if (maybeAbsolutePathMatch != null) { + return { + absolutePath: maybeAbsolutePathMatch, + containerRelativePath: '', + }; + } + } + else { + if (invalidatedBy && (!closestLookup.exists || !isDirectory(closestLookup.node))) { + invalidatedBy.add(this.#pathUtils.normalToAbsolute(closestLookup.exists ? closestLookup.canonicalPath : closestLookup.canonicalMissingPath)); + } + if (opts.breakOnSegment != null && + !closestLookup.exists && + closestLookup.missingSegmentName === opts.breakOnSegment) { + return null; + } + } + // Let the "common root" be the nearest common ancestor of this.rootDir + // and the input path. We'll look for a match in two stages: + // 1. Every collected ancestor of the input path, from nearest to furthest, + // that is a descendent of the common root + // 2. The common root, and its ancestors. + let commonRoot = this.#rootNode; + let commonRootDepth = 0; + // Collected ancestors do not include the lookup result itself, so go one + // further if the input path is itself a root ancestor. + if (closestLookup.exists && closestLookup.ancestorOfRootIdx != null) { + commonRootDepth = closestLookup.ancestorOfRootIdx; + (0, invariant_1.default)(isDirectory(closestLookup.node), 'ancestors of the root must be directories'); + commonRoot = closestLookup.node; + } + else { + // Establish the common root by counting the '..' segments at the start + // of the collected ancestors. + for (const ancestor of ancestorsOfInput) { + if (ancestor.ancestorOfRootIdx == null) { + break; + } + commonRootDepth = ancestor.ancestorOfRootIdx; + commonRoot = ancestor.node; + } + } + // Phase 1: Consider descendenants of the common root, from deepest to + // shallowest. + for (let candidateIdx = ancestorsOfInput.length - 1; candidateIdx >= commonRootDepth; --candidateIdx) { + const candidate = ancestorsOfInput[candidateIdx]; + if (candidate.segmentName === opts.breakOnSegment) { + return null; + } + const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath(candidate.normalPath, subpath, opts.subpathType, invalidatedBy, { + ancestorOfRootIdx: candidate.ancestorOfRootIdx, + node: candidate.node, + pathIdx: candidate.normalPath.length > 0 ? candidate.normalPath.length + 1 : 0, + }); + if (maybeAbsolutePathMatch != null) { + // Determine the input path relative to the current candidate. Note + // that the candidate path will always be canonical (real), whereas the + // input may contain symlinks, so the candidate is not necessarily a + // prefix of the input. Use the fact that each remaining candidate + // corresponds to a leading segment of the input normal path, and + // discard the first candidateIdx + 1 segments of the input path. + // + // The next 5 lines are equivalent to (but faster than) + // normalPath.split('/').slice(candidateIdx + 1).join('/'). + let prefixLength = commonRootDepth * 3; // Leading '../' + for (let i = commonRootDepth; i <= candidateIdx; i++) { + prefixLength = normalPath.indexOf(path_1.default.sep, prefixLength + 1); + } + const containerRelativePath = normalPath.slice(prefixLength + 1); + return { + absolutePath: maybeAbsolutePathMatch, + containerRelativePath, + }; + } + } + // Phase 2: Consider the common root and its ancestors + // This will be '', '..', '../..', etc. + let candidateNormalPath = commonRootDepth > 0 ? normalPath.slice(0, 3 * commonRootDepth - 1) : ''; + const remainingNormalPath = normalPath.slice(commonRootDepth * 3); + let nextNode = commonRoot; + let depthBelowCommonRoot = 0; + while (isDirectory(nextNode)) { + const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath(candidateNormalPath, subpath, opts.subpathType, invalidatedBy, null); + if (maybeAbsolutePathMatch != null) { + const rootDirParts = this.#pathUtils.getParts(); + const relativeParts = depthBelowCommonRoot > 0 + ? rootDirParts.slice(-(depthBelowCommonRoot + commonRootDepth), commonRootDepth > 0 ? -commonRootDepth : undefined) + : []; + if (remainingNormalPath !== '') { + relativeParts.push(remainingNormalPath); + } + return { + absolutePath: maybeAbsolutePathMatch, + containerRelativePath: relativeParts.join(path_1.default.sep), + }; + } + depthBelowCommonRoot++; + candidateNormalPath = + candidateNormalPath === '' ? '..' : candidateNormalPath + path_1.default.sep + '..'; + nextNode = nextNode.get('..'); + } + return null; + } + #checkCandidateHasSubpath(normalCandidatePath, subpath, subpathType, invalidatedBy, start) { + const lookupResult = this.#lookupByNormalPath(this.#pathUtils.joinNormalToRelative(normalCandidatePath, subpath).normalPath, { + collectLinkPaths: invalidatedBy, + }); + if (lookupResult.exists && + // Should be a Map iff subpathType is directory + isDirectory(lookupResult.node) === (subpathType === 'd')) { + return this.#pathUtils.normalToAbsolute(lookupResult.canonicalPath); + } + else if (invalidatedBy) { + invalidatedBy.add(this.#pathUtils.normalToAbsolute(lookupResult.exists ? lookupResult.canonicalPath : lookupResult.canonicalMissingPath)); + } + return null; + } + *metadataIterator(opts) { + yield* this.#metadataIterator(this.#rootNode, opts); + } + *#metadataIterator(rootNode, opts, prefix = '') { + for (const [name, node] of rootNode) { + if (!opts.includeNodeModules && isDirectory(node) && name === 'node_modules') { + continue; + } + const prefixedName = prefix === '' ? name : prefix + path_1.default.sep + name; + if (isDirectory(node)) { + yield* this.#metadataIterator(node, opts, prefixedName); + } + else if (isRegularFile(node) || opts.includeSymlinks) { + yield { baseName: name, canonicalPath: prefixedName, metadata: node }; + } + } + } + #normalizePath(relativeOrAbsolutePath) { + return path_1.default.isAbsolute(relativeOrAbsolutePath) + ? this.#pathUtils.absoluteToNormal(relativeOrAbsolutePath) + : this.#pathUtils.relativeToNormal(relativeOrAbsolutePath); + } + *#directoryNodeIterator(node, parent, ancestorOfRootIdx) { + if (ancestorOfRootIdx != null && ancestorOfRootIdx > 0 && parent) { + yield [this.#pathUtils.getBasenameOfNthAncestor(ancestorOfRootIdx - 1), parent]; + } + yield* node.entries(); + } + /** + * Enumerate paths under a given node, including symlinks and through + * symlinks (if `follow` is enabled). + */ + *#pathIterator(iterationRootNode, iterationRootParentNode, ancestorOfRootIdx, opts, pathPrefix = '', followedLinks = new Set()) { + const pathSep = opts.alwaysYieldPosix ? '/' : path_1.default.sep; + const prefixWithSep = pathPrefix === '' ? pathPrefix : pathPrefix + pathSep; + for (const [name, node] of this.#directoryNodeIterator(iterationRootNode, iterationRootParentNode, ancestorOfRootIdx)) { + if (opts.subtreeOnly && name === '..') { + continue; + } + const nodePath = prefixWithSep + name; + if (!isDirectory(node)) { + if (isRegularFile(node)) { + // regular file + yield nodePath; + } + else { + // symlink + const nodePathWithSystemSeparators = pathSep === path_1.default.sep ? nodePath : nodePath.replaceAll(pathSep, path_1.default.sep); + // Although both paths are normal, the node path may begin '..' so we + // can't simply concatenate. + const normalPathOfSymlink = path_1.default.join(opts.canonicalPathOfRoot, nodePathWithSystemSeparators); + // We can't resolve the symlink directly here because we only have + // its normal path, and we need a canonical path for resolution + // (imagine our normal path contains a symlink 'bar' -> '.', and we + // are at /foo/bar/baz where baz -> '..' - that should resolve to + // /foo, not /foo/bar). We *can* use _lookupByNormalPath to walk to + // the canonical symlink, and then to its target. + const resolved = this.#lookupByNormalPath(normalPathOfSymlink, { + followLeaf: true, + }); + if (!resolved.exists) { + // Symlink goes nowhere, nothing to report. + continue; + } + const target = resolved.node; + if (!isDirectory(target)) { + // Symlink points to a file, just yield the path of the symlink. + yield nodePath; + } + else if (opts.recursive && opts.follow && !followedLinks.has(node)) { + // Symlink points to a directory - iterate over its contents using + // the path where we found the symlink as a prefix. + yield* this.#pathIterator(target, resolved.parentNode, resolved.ancestorOfRootIdx, opts, nodePath, new Set([...followedLinks, node])); + } + } + } + else if (opts.recursive) { + yield* this.#pathIterator(node, iterationRootParentNode, ancestorOfRootIdx != null && ancestorOfRootIdx > 0 ? ancestorOfRootIdx - 1 : null, opts, nodePath, followedLinks); + } + } + } + #resolveSymlinkTargetToNormalPath(symlinkNode, canonicalPathOfSymlink) { + const cachedResult = this.#cachedNormalSymlinkTargets.get(symlinkNode); + if (cachedResult != null) { + return cachedResult; + } + const literalSymlinkTarget = symlinkNode[constants_1.default.SYMLINK]; + (0, invariant_1.default)(typeof literalSymlinkTarget === 'string', 'Expected symlink target to be populated.'); + const absoluteSymlinkTarget = path_1.default.resolve(this.#rootDir, canonicalPathOfSymlink, '..', // Symlink target is relative to its containing directory. + literalSymlinkTarget // May be absolute, in which case the above are ignored + ); + const normalSymlinkTarget = path_1.default.relative(this.#rootDir, absoluteSymlinkTarget); + const result = { + ancestorOfRootIdx: this.#pathUtils.getAncestorOfRootIdx(normalSymlinkTarget), + normalPath: normalSymlinkTarget, + startOfBasenameIdx: normalSymlinkTarget.lastIndexOf(path_1.default.sep) + 1, + }; + this.#cachedNormalSymlinkTargets.set(symlinkNode, result); + return result; + } + #getFileData(filePath, opts = { followLeaf: true }) { + const normalPath = this.#normalizePath(filePath); + const result = this.#lookupByNormalPath(normalPath, { + followLeaf: opts.followLeaf, + }); + if (!result.exists || isDirectory(result.node)) { + return null; + } + return result.node; + } + #cloneTree(root) { + const clone = new Map(); + for (const [name, node] of root) { + if (isDirectory(node)) { + clone.set(name, this.#cloneTree(node)); + } + else { + clone.set(name, [...node]); + } + } + return clone; + } +} +exports.default = TreeFS; diff --git a/packages/@expo/metro-file-map/build/lib/checkWatchmanCapabilities.d.ts b/packages/@expo/metro-file-map/build/lib/checkWatchmanCapabilities.d.ts new file mode 100644 index 00000000000000..7aeab002ce6bc4 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/checkWatchmanCapabilities.d.ts @@ -0,0 +1,9 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +export default function checkWatchmanCapabilities(requiredCapabilities: readonly string[]): Promise<{ + version: string; +}>; diff --git a/packages/@expo/metro-file-map/build/lib/checkWatchmanCapabilities.js b/packages/@expo/metro-file-map/build/lib/checkWatchmanCapabilities.js new file mode 100644 index 00000000000000..6a2e688b7f8e41 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/checkWatchmanCapabilities.js @@ -0,0 +1,50 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = checkWatchmanCapabilities; +const child_process_1 = require("child_process"); +const util_1 = require("util"); +async function checkWatchmanCapabilities(requiredCapabilities) { + const execFilePromise = (0, util_1.promisify)(child_process_1.execFile); + let rawResponse; + try { + const result = await execFilePromise('watchman', [ + 'list-capabilities', + '--output-encoding=json', + '--no-pretty', + '--no-spawn', // The client can answer this, so don't spawn a server + ]); + rawResponse = result.stdout; + } + catch (e) { + if (e?.code === 'ENOENT') { + throw new Error('Watchman is not installed or not available on PATH'); + } + throw e; + } + let parsedResponse; + try { + parsedResponse = JSON.parse(rawResponse); + } + catch { + throw new Error('Failed to parse response from `watchman list-capabilities`'); + } + if (parsedResponse == null || + typeof parsedResponse !== 'object' || + typeof parsedResponse.version !== 'string' || + !Array.isArray(parsedResponse.capabilities)) { + throw new Error('Unexpected response from `watchman list-capabilities`'); + } + const version = parsedResponse.version; + const capabilities = new Set(parsedResponse.capabilities); + const missingCapabilities = requiredCapabilities.filter((requiredCapability) => !capabilities.has(requiredCapability)); + if (missingCapabilities.length > 0) { + throw new Error(`The installed version of Watchman (${version}) is missing required capabilities: ${missingCapabilities.join(', ')}`); + } + return { version }; +} diff --git a/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToPosix.d.ts b/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToPosix.d.ts new file mode 100644 index 00000000000000..ee99fd8c1bef07 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToPosix.d.ts @@ -0,0 +1,8 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +declare let normalizePathSeparatorsToPosix: (filePath: string) => string; +export default normalizePathSeparatorsToPosix; diff --git a/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToPosix.js b/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToPosix.js new file mode 100644 index 00000000000000..b725f02b1dc1fd --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToPosix.js @@ -0,0 +1,20 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path_1 = __importDefault(require("path")); +let normalizePathSeparatorsToPosix; +if (path_1.default.sep === '/') { + normalizePathSeparatorsToPosix = (filePath) => filePath; +} +else { + normalizePathSeparatorsToPosix = (filePath) => filePath.replace(/\\/g, '/'); +} +exports.default = normalizePathSeparatorsToPosix; diff --git a/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToSystem.d.ts b/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToSystem.d.ts new file mode 100644 index 00000000000000..c99a0ed63139ae --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToSystem.d.ts @@ -0,0 +1,8 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +declare let normalizePathSeparatorsToSystem: (filePath: string) => string; +export default normalizePathSeparatorsToSystem; diff --git a/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToSystem.js b/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToSystem.js new file mode 100644 index 00000000000000..38b5c4a80a50bf --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/normalizePathSeparatorsToSystem.js @@ -0,0 +1,20 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path_1 = __importDefault(require("path")); +let normalizePathSeparatorsToSystem; +if (path_1.default.sep === '/') { + normalizePathSeparatorsToSystem = (filePath) => filePath; +} +else { + normalizePathSeparatorsToSystem = (filePath) => filePath.replace(/\//g, path_1.default.sep); +} +exports.default = normalizePathSeparatorsToSystem; diff --git a/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.d.ts b/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.d.ts new file mode 100644 index 00000000000000..8665909b8c414e --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.d.ts @@ -0,0 +1,11 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { BuildParameters } from '../types'; +export default function rootRelativeCacheKeys(buildParameters: BuildParameters): { + rootDirHash: string; + relativeConfigHash: string; +}; diff --git a/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.js b/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.js new file mode 100644 index 00000000000000..7df40e303b4d03 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.js @@ -0,0 +1,55 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = rootRelativeCacheKeys; +const crypto_1 = require("crypto"); +const RootPathUtils_1 = require("./RootPathUtils"); +const normalizePathSeparatorsToPosix_1 = __importDefault(require("./normalizePathSeparatorsToPosix")); +function rootRelativeCacheKeys(buildParameters) { + const { rootDir, plugins, ...otherParameters } = buildParameters; + const rootDirHash = (0, crypto_1.createHash)('md5') + .update((0, normalizePathSeparatorsToPosix_1.default)(rootDir)) + .digest('hex'); + const pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); + const cacheComponents = Object.keys(otherParameters) + .sort() + .map((key) => { + switch (key) { + case 'roots': + return buildParameters[key].map((root) => (0, normalizePathSeparatorsToPosix_1.default)(pathUtils.absoluteToNormal(root))); + case 'cacheBreaker': + case 'extensions': + case 'computeSha1': + case 'enableSymlinks': + case 'forceNodeFilesystemAPI': + case 'retainAllFiles': + return buildParameters[key] ?? null; + case 'ignorePattern': + return buildParameters[key].toString(); + default: + key; + throw new Error('Unrecognised key in build parameters: ' + key); + } + }); + for (const plugin of plugins) { + cacheComponents.push(plugin.getCacheKey()); + } + // JSON.stringify is stable here because we only deal in (nested) arrays of + // primitives. Use a different approach if this is expanded to include + // objects/Sets/Maps, etc. + const relativeConfigHash = (0, crypto_1.createHash)('md5') + .update(JSON.stringify(cacheComponents)) + .digest('hex'); + return { + rootDirHash, + relativeConfigHash, + }; +} diff --git a/packages/@expo/metro-file-map/build/lib/sorting.d.ts b/packages/@expo/metro-file-map/build/lib/sorting.d.ts new file mode 100644 index 00000000000000..02f91e944edaa7 --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/sorting.d.ts @@ -0,0 +1,8 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +export declare function compareStrings(a: null | string, b: null | string): number; +export declare function chainComparators(...comparators: ((a: T, b: T) => number)[]): (a: T, b: T) => number; diff --git a/packages/@expo/metro-file-map/build/lib/sorting.js b/packages/@expo/metro-file-map/build/lib/sorting.js new file mode 100644 index 00000000000000..8e324789059e9f --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/sorting.js @@ -0,0 +1,31 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.compareStrings = compareStrings; +exports.chainComparators = chainComparators; +// Utilities for working with Array.prototype.sort +function compareStrings(a, b) { + if (a == null) { + return b == null ? 0 : -1; + } + if (b == null) { + return 1; + } + return a.localeCompare(b); +} +function chainComparators(...comparators) { + return (a, b) => { + for (const comparator of comparators) { + const result = comparator(a, b); + if (result !== 0) { + return result; + } + } + return 0; + }; +} diff --git a/packages/@expo/metro-file-map/build/plugins/DependencyPlugin.d.ts b/packages/@expo/metro-file-map/build/plugins/DependencyPlugin.d.ts new file mode 100644 index 00000000000000..50c9ef2bc3fab6 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/DependencyPlugin.d.ts @@ -0,0 +1,23 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { Path } from '../types'; +import FileDataPlugin from './FileDataPlugin'; +export interface DependencyPluginOptions { + /** Path to custom dependency extractor module */ + readonly dependencyExtractor: string | null; + /** Whether to compute dependencies (performance optimization) */ + readonly computeDependencies: boolean; +} +export default class DependencyPlugin extends FileDataPlugin { + constructor(options: DependencyPluginOptions); + /** + * Get the list of dependencies for a given file. + * @param mixedPath Absolute or project-relative path to the file + * @returns Array of dependency module names, or null if the file doesn't exist + */ + getDependencies(mixedPath: Path): readonly string[] | null | undefined; +} diff --git a/packages/@expo/metro-file-map/build/plugins/DependencyPlugin.js b/packages/@expo/metro-file-map/build/plugins/DependencyPlugin.js new file mode 100644 index 00000000000000..2101564e7ea027 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/DependencyPlugin.js @@ -0,0 +1,61 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const workerExclusionList_1 = __importDefault(require("../workerExclusionList")); +const FileDataPlugin_1 = __importDefault(require("./FileDataPlugin")); +class DependencyPlugin extends FileDataPlugin_1.default { + constructor(options) { + const { dependencyExtractor, computeDependencies } = options; + let cacheKey; + if (dependencyExtractor != null) { + const mod = require(dependencyExtractor); + const getCacheKey = mod?.getCacheKey ?? + (mod.__esModule === true && 'default' in mod ? mod.default : mod).getCacheKey; + cacheKey = getCacheKey?.() ?? dependencyExtractor; + } + else { + cacheKey = 'default-dependency-extractor'; + } + super({ + name: 'dependencies', + cacheKey, + worker: { + modulePath: require.resolve('./dependencies/worker'), + setupArgs: { + dependencyExtractor: dependencyExtractor ?? null, + }, + }, + filter: ({ normalPath, isNodeModules }) => { + if (!computeDependencies) { + return false; + } + if (isNodeModules) { + return false; + } + const ext = normalPath.substr(normalPath.lastIndexOf('.')); + return !workerExclusionList_1.default.has(ext); + }, + }); + } + /** + * Get the list of dependencies for a given file. + * @param mixedPath Absolute or project-relative path to the file + * @returns Array of dependency module names, or null if the file doesn't exist + */ + getDependencies(mixedPath) { + const result = this.getFileSystem().lookup(mixedPath); + if (result.exists && result.type === 'f') { + return result.pluginData ?? []; + } + return null; + } +} +exports.default = DependencyPlugin; diff --git a/packages/@expo/metro-file-map/build/plugins/FileDataPlugin.d.ts b/packages/@expo/metro-file-map/build/plugins/FileDataPlugin.d.ts new file mode 100644 index 00000000000000..982f8b8b14fc1a --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/FileDataPlugin.d.ts @@ -0,0 +1,28 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { FileMapPlugin, FileMapPluginInitOptions, FileMapPluginWorker, ReadonlyFileSystemChanges, V8Serializable } from '../types'; +export interface FileDataPluginOptions extends FileMapPluginWorker { + readonly name: string; + readonly cacheKey: string; +} +/** + * Base class for FileMap plugins that store per-file data via a worker and + * have no separate serializable state. Provides default no-op implementations + * of lifecycle methods that subclasses can override as needed. + */ +export default class FileDataPlugin implements FileMapPlugin { + #private; + readonly name: string; + constructor({ name, worker, filter, cacheKey }: FileDataPluginOptions); + initialize(initOptions: FileMapPluginInitOptions): Promise; + getFileSystem(): FileMapPluginInitOptions['files']; + onChanged(_changes: ReadonlyFileSystemChanges): void; + assertValid(): void; + getSerializableSnapshot(): null; + getCacheKey(): string; + getWorker(): FileMapPluginWorker; +} diff --git a/packages/@expo/metro-file-map/build/plugins/FileDataPlugin.js b/packages/@expo/metro-file-map/build/plugins/FileDataPlugin.js new file mode 100644 index 00000000000000..557f1efd6f9c6b --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/FileDataPlugin.js @@ -0,0 +1,46 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Base class for FileMap plugins that store per-file data via a worker and + * have no separate serializable state. Provides default no-op implementations + * of lifecycle methods that subclasses can override as needed. + */ +class FileDataPlugin { + name; + #worker; + #cacheKey; + #files; + constructor({ name, worker, filter, cacheKey }) { + this.name = name; + this.#worker = { worker, filter }; + this.#cacheKey = cacheKey; + } + async initialize(initOptions) { + this.#files = initOptions.files; + } + getFileSystem() { + const files = this.#files; + if (files == null) { + throw new Error(`${this.name} plugin has not been initialized`); + } + return files; + } + onChanged(_changes) { } + assertValid() { } + getSerializableSnapshot() { + return null; + } + getCacheKey() { + return this.#cacheKey; + } + getWorker() { + return this.#worker; + } +} +exports.default = FileDataPlugin; diff --git a/packages/@expo/metro-file-map/build/plugins/HastePlugin.d.ts b/packages/@expo/metro-file-map/build/plugins/HastePlugin.d.ts new file mode 100644 index 00000000000000..3e3bcf31647b65 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/HastePlugin.d.ts @@ -0,0 +1,32 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { Console, FileMapPlugin, FileMapPluginInitOptions, FileMapPluginWorker, HasteConflict, HasteMap, HasteMapItemMetadata, HTypeValue, Path, PerfLogger, ReadonlyFileSystemChanges } from '../types'; +export interface HasteMapOptions { + readonly console?: Console | null; + readonly enableHastePackages: boolean; + readonly hasteImplModulePath: string | null; + readonly perfLogger?: PerfLogger | null; + readonly platforms: ReadonlySet; + readonly rootDir: Path; + readonly failValidationOnConflicts: boolean; +} +export default class HastePlugin implements HasteMap, FileMapPlugin { + #private; + readonly name: 'haste'; + constructor(options: HasteMapOptions); + initialize({ files }: FileMapPluginInitOptions): Promise; + getSerializableSnapshot(): null; + getModule(name: string, platform?: string | undefined | null, supportsNativePlatform?: boolean | undefined | null, type?: HTypeValue | undefined | null): Path | undefined | null; + getModuleNameByPath(mixedPath: Path): string | undefined | null; + getPackage(name: string, platform: string | undefined | null, _supportsNativePlatform?: boolean | undefined | null): Path | undefined | null; + onChanged(delta: ReadonlyFileSystemChanges): void; + setModule(id: string, module: HasteMapItemMetadata): void; + assertValid(): void; + computeConflicts(): HasteConflict[]; + getCacheKey(): string; + getWorker(): FileMapPluginWorker; +} diff --git a/packages/@expo/metro-file-map/build/plugins/HastePlugin.js b/packages/@expo/metro-file-map/build/plugins/HastePlugin.js new file mode 100644 index 00000000000000..41e145c98cd675 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/HastePlugin.js @@ -0,0 +1,358 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path_1 = __importDefault(require("path")); +const constants_1 = __importDefault(require("../constants")); +const RootPathUtils_1 = require("../lib/RootPathUtils"); +const sorting_1 = require("../lib/sorting"); +const DuplicateHasteCandidatesError_1 = require("./haste/DuplicateHasteCandidatesError"); +const HasteConflictsError_1 = require("./haste/HasteConflictsError"); +const getPlatformExtension_1 = __importDefault(require("./haste/getPlatformExtension")); +const EMPTY_OBJ = {}; +const EMPTY_MAP = new Map(); +const PACKAGE_JSON = /(?:[/\\]|^)package\.json$/; +// Periodically yield to the event loop to allow parallel I/O, etc. +// Based on 200k files taking up to 800ms => max 40ms between yields. +const YIELD_EVERY_NUM_HASTE_FILES = 10000; +class HastePlugin { + name = 'haste'; + #console; + #duplicates = new Map(); + #enableHastePackages; + #failValidationOnConflicts; + #getModuleNameByPath; + #hasteImplModulePath; + #map = new Map(); + #pathUtils; + #perfLogger; + #platforms; + constructor(options) { + this.#console = options.console ?? globalThis.console; + this.#enableHastePackages = options.enableHastePackages; + this.#hasteImplModulePath = options.hasteImplModulePath; + this.#perfLogger = options.perfLogger; + this.#platforms = options.platforms; + this.#pathUtils = new RootPathUtils_1.RootPathUtils(options.rootDir); + this.#failValidationOnConflicts = options.failValidationOnConflicts; + } + async initialize({ files }) { + this.#perfLogger?.point('constructHasteMap_start'); + let hasteFiles = 0; + for (const { baseName, canonicalPath, pluginData: hasteId } of files.fileIterator({ + // Symlinks and node_modules are never Haste modules or packages. + includeNodeModules: false, + includeSymlinks: false, + })) { + if (hasteId == null) { + continue; + } + this.setModule(hasteId, [ + canonicalPath, + this.#enableHastePackages && baseName === 'package.json' ? constants_1.default.PACKAGE : constants_1.default.MODULE, + ]); + if (++hasteFiles % YIELD_EVERY_NUM_HASTE_FILES === 0) { + await new Promise(setImmediate); + } + } + this.#getModuleNameByPath = (mixedPath) => { + const result = files.lookup(mixedPath); + return result.exists && result.type === 'f' && typeof result.pluginData === 'string' + ? result.pluginData + : null; + }; + this.#perfLogger?.point('constructHasteMap_end'); + this.#perfLogger?.annotate({ int: { hasteFiles } }); + } + getSerializableSnapshot() { + // Haste is not serialised, but built from traversing the file metadata + // on each run. This turns out to have comparable performance to + // serialisation, at least when Haste is dense, and makes for a much + // smaller cache. + return null; + } + getModule(name, platform, supportsNativePlatform, type) { + const module = this.#getModuleMetadata(name, platform, !!supportsNativePlatform); + if (module && module[constants_1.default.TYPE] === (type ?? constants_1.default.MODULE)) { + const modulePath = module[constants_1.default.PATH]; + return modulePath && this.#pathUtils.normalToAbsolute(modulePath); + } + return null; + } + getModuleNameByPath(mixedPath) { + if (this.#getModuleNameByPath == null) { + throw new Error('HastePlugin has not been initialized before getModuleNameByPath'); + } + return this.#getModuleNameByPath(mixedPath) ?? null; + } + getPackage(name, platform, _supportsNativePlatform) { + return this.getModule(name, platform, null, constants_1.default.PACKAGE); + } + /** + * When looking up a module's data, we walk through each eligible platform for + * the query. For each platform, we want to check if there are known + * duplicates for that name+platform pair. The duplication logic normally + * removes elements from the `map` object, but we want to check upfront to be + * extra sure. If metadata exists both in the `duplicates` object and the + * `map`, this would be a bug. + */ + #getModuleMetadata(name, platform, supportsNativePlatform) { + const map = this.#map.get(name) || EMPTY_OBJ; + const dupMap = this.#duplicates.get(name) || EMPTY_MAP; + if (platform != null) { + this.#assertNoDuplicates(name, platform, supportsNativePlatform, dupMap.get(platform)); + if (map[platform] != null) { + return map[platform]; + } + } + if (supportsNativePlatform) { + this.#assertNoDuplicates(name, constants_1.default.NATIVE_PLATFORM, supportsNativePlatform, dupMap.get(constants_1.default.NATIVE_PLATFORM)); + if (map[constants_1.default.NATIVE_PLATFORM]) { + return map[constants_1.default.NATIVE_PLATFORM]; + } + } + this.#assertNoDuplicates(name, constants_1.default.GENERIC_PLATFORM, supportsNativePlatform, dupMap.get(constants_1.default.GENERIC_PLATFORM)); + if (map[constants_1.default.GENERIC_PLATFORM]) { + return map[constants_1.default.GENERIC_PLATFORM]; + } + return null; + } + #assertNoDuplicates(name, platform, supportsNativePlatform, relativePathSet) { + if (relativePathSet == null) { + return; + } + const duplicates = new Map(); + for (const [relativePath, type] of relativePathSet) { + const duplicatePath = this.#pathUtils.normalToAbsolute(relativePath); + duplicates.set(duplicatePath, type); + } + throw new DuplicateHasteCandidatesError_1.DuplicateHasteCandidatesError(name, platform, supportsNativePlatform, duplicates); + } + onChanged(delta) { + // Process removals first so that moves aren't treated as duplicates. + for (const [canonicalPath, maybeHasteId] of delta.removedFiles) { + this.#onRemovedFile(canonicalPath, maybeHasteId); + } + for (const [canonicalPath, maybeHasteId] of delta.addedFiles) { + this.#onNewFile(canonicalPath, maybeHasteId); + } + } + #onNewFile(canonicalPath, id) { + if (id == null) { + // Not a Haste module or package + return; + } + const module = [ + canonicalPath, + this.#enableHastePackages && path_1.default.basename(canonicalPath) === 'package.json' + ? constants_1.default.PACKAGE + : constants_1.default.MODULE, + ]; + this.setModule(id, module); + } + setModule(id, module) { + let hasteMapItem = this.#map.get(id); + if (!hasteMapItem) { + hasteMapItem = Object.create(null); + this.#map.set(id, hasteMapItem); + } + const platform = (0, getPlatformExtension_1.default)(module[constants_1.default.PATH], this.#platforms) || constants_1.default.GENERIC_PLATFORM; + const existingModule = hasteMapItem[platform]; + if (existingModule && existingModule[constants_1.default.PATH] !== module[constants_1.default.PATH]) { + if (this.#console) { + this.#console.warn([ + 'metro-file-map: Haste module naming collision: ' + id, + ' The following files share their name; please adjust your hasteImpl:', + ' * ' + path_1.default.sep + existingModule[constants_1.default.PATH], + ' * ' + path_1.default.sep + module[constants_1.default.PATH], + '', + ].join('\n')); + } + // We do NOT want consumers to use a module that is ambiguous. + delete hasteMapItem[platform]; + if (Object.keys(hasteMapItem).length === 0) { + this.#map.delete(id); + } + let dupsByPlatform = this.#duplicates.get(id); + if (dupsByPlatform == null) { + dupsByPlatform = new Map(); + this.#duplicates.set(id, dupsByPlatform); + } + const dups = new Map([ + [module[constants_1.default.PATH], module[constants_1.default.TYPE]], + [existingModule[constants_1.default.PATH], existingModule[constants_1.default.TYPE]], + ]); + dupsByPlatform.set(platform, dups); + return; + } + const dupsByPlatform = this.#duplicates.get(id); + if (dupsByPlatform != null) { + const dups = dupsByPlatform.get(platform); + if (dups != null) { + dups.set(module[constants_1.default.PATH], module[constants_1.default.TYPE]); + } + return; + } + hasteMapItem[platform] = module; + } + #onRemovedFile(canonicalPath, moduleName) { + if (moduleName == null) { + // Not a Haste module or package + return; + } + const platform = (0, getPlatformExtension_1.default)(canonicalPath, this.#platforms) || constants_1.default.GENERIC_PLATFORM; + const hasteMapItem = this.#map.get(moduleName); + if (hasteMapItem != null) { + delete hasteMapItem[platform]; + if (Object.keys(hasteMapItem).length === 0) { + this.#map.delete(moduleName); + } + else { + this.#map.set(moduleName, hasteMapItem); + } + } + this.#recoverDuplicates(moduleName, canonicalPath); + } + assertValid() { + if (!this.#failValidationOnConflicts) { + return; + } + const conflicts = this.computeConflicts(); + if (conflicts.length > 0) { + throw new HasteConflictsError_1.HasteConflictsError(conflicts); + } + } + /** + * This function should be called when the file under `filePath` is removed + * or changed. When that happens, we want to figure out if that file was + * part of a group of files that had the same ID. If it was, we want to + * remove it from the group. Furthermore, if there is only one file + * remaining in the group, then we want to restore that single file as the + * correct resolution for its ID, and cleanup the duplicates index. + */ + #recoverDuplicates(moduleName, relativeFilePath) { + let dupsByPlatform = this.#duplicates.get(moduleName); + if (dupsByPlatform == null) { + return; + } + const platform = (0, getPlatformExtension_1.default)(relativeFilePath, this.#platforms) || constants_1.default.GENERIC_PLATFORM; + let dups = dupsByPlatform.get(platform); + if (dups == null) { + return; + } + dupsByPlatform = new Map(dupsByPlatform); + this.#duplicates.set(moduleName, dupsByPlatform); + dups = new Map(dups); + dupsByPlatform.set(platform, dups); + dups.delete(relativeFilePath); + if (dups.size !== 1) { + return; + } + const uniqueModule = dups.entries().next().value; + if (!uniqueModule) { + return; + } + let dedupMap = this.#map.get(moduleName); + if (dedupMap == null) { + dedupMap = Object.create(null); + this.#map.set(moduleName, dedupMap); + } + dedupMap[platform] = uniqueModule; + dupsByPlatform.delete(platform); + if (dupsByPlatform.size === 0) { + this.#duplicates.delete(moduleName); + } + } + computeConflicts() { + const conflicts = []; + // Add literal duplicates tracked in the #duplicates map + for (const [id, dupsByPlatform] of this.#duplicates.entries()) { + for (const [platform, conflictingModules] of dupsByPlatform) { + conflicts.push({ + absolutePaths: [...conflictingModules.keys()] + .map((modulePath) => this.#pathUtils.normalToAbsolute(modulePath)) + // Sort for ease of testing + .sort(), + id, + platform: platform === constants_1.default.GENERIC_PLATFORM ? null : platform, + type: 'duplicate', + }); + } + } + // Add cases of "shadowing at a distance": a module with a platform suffix and + // a module with a lower priority platform suffix (or no suffix), in different + // directories. + for (const [id, data] of this.#map) { + const conflictPaths = new Set(); + const basePaths = []; + for (const basePlatform of [constants_1.default.NATIVE_PLATFORM, constants_1.default.GENERIC_PLATFORM]) { + if (data[basePlatform] == null) { + continue; + } + const basePath = data[basePlatform][0]; + basePaths.push(basePath); + const basePathDir = path_1.default.dirname(basePath); + // Find all platforms that can shadow basePlatform + // Given that X.(specific platform).js > x.native.js > X.js + // and basePlatform is either 'native' or generic (no platform). + for (const platform of Object.keys(data)) { + if (platform === basePlatform || platform === constants_1.default.GENERIC_PLATFORM /* lowest priority */) { + continue; + } + const platformPath = data[platform][0]; + if (path_1.default.dirname(platformPath) !== basePathDir) { + conflictPaths.add(platformPath); + } + } + } + if (conflictPaths.size) { + conflicts.push({ + absolutePaths: [...new Set([...conflictPaths, ...basePaths])] + .map((modulePath) => this.#pathUtils.normalToAbsolute(modulePath)) + // Sort for ease of testing + .sort(), + id, + platform: null, + type: 'shadowing', + }); + } + } + // Sort for ease of testing + conflicts.sort((0, sorting_1.chainComparators)((a, b) => (0, sorting_1.compareStrings)(a.type, b.type), (a, b) => (0, sorting_1.compareStrings)(a.id, b.id), (a, b) => (0, sorting_1.compareStrings)(a.platform, b.platform))); + return conflicts; + } + getCacheKey() { + return JSON.stringify([ + this.#enableHastePackages, + this.#hasteImplModulePath != null ? require(this.#hasteImplModulePath).getCacheKey() : null, + [...this.#platforms].sort(), + ]); + } + getWorker() { + return { + worker: { + modulePath: require.resolve('./haste/worker.js'), + setupArgs: { + hasteImplModulePath: this.#hasteImplModulePath ?? null, + }, + }, + filter: ({ isNodeModules, normalPath }) => { + if (isNodeModules) { + return false; + } + if (PACKAGE_JSON.test(normalPath)) { + return this.#enableHastePackages; + } + return this.#hasteImplModulePath != null; + }, + }; + } +} +exports.default = HastePlugin; diff --git a/packages/@expo/metro-file-map/build/plugins/MockPlugin.d.ts b/packages/@expo/metro-file-map/build/plugins/MockPlugin.d.ts new file mode 100644 index 00000000000000..d825d0e5c6fe59 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/MockPlugin.d.ts @@ -0,0 +1,27 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { Console, FileMapPlugin, FileMapPluginInitOptions, FileMapPluginWorker, MockMap as IMockMap, Path, RawMockMap, ReadonlyFileSystemChanges } from '../types'; +export declare const CACHE_VERSION = 2; +export interface MockMapOptions { + readonly console: Console; + readonly mocksPattern: RegExp; + readonly rawMockMap?: RawMockMap; + readonly rootDir: Path; + readonly throwOnModuleCollision: boolean; +} +export default class MockPlugin implements FileMapPlugin, IMockMap { + #private; + readonly name: 'mocks'; + constructor({ console, mocksPattern, rawMockMap, rootDir, throwOnModuleCollision, }: MockMapOptions); + initialize({ files, pluginState }: FileMapPluginInitOptions): Promise; + getMockModule(name: string): Path | undefined | null; + onChanged(delta: ReadonlyFileSystemChanges): void; + getSerializableSnapshot(): RawMockMap; + assertValid(): void; + getCacheKey(): string; + getWorker(): FileMapPluginWorker | undefined | null; +} diff --git a/packages/@expo/metro-file-map/build/plugins/MockPlugin.js b/packages/@expo/metro-file-map/build/plugins/MockPlugin.js new file mode 100644 index 00000000000000..8818a0205ce874 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/MockPlugin.js @@ -0,0 +1,157 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CACHE_VERSION = void 0; +const path_1 = __importDefault(require("path")); +const RootPathUtils_1 = require("../lib/RootPathUtils"); +const normalizePathSeparatorsToPosix_1 = __importDefault(require("../lib/normalizePathSeparatorsToPosix")); +const normalizePathSeparatorsToSystem_1 = __importDefault(require("../lib/normalizePathSeparatorsToSystem")); +const getMockName_1 = __importDefault(require("./mocks/getMockName")); +exports.CACHE_VERSION = 2; +class MockPlugin { + name = 'mocks'; + #mocksPattern; + #raw; + #pathUtils; + #console; + #throwOnModuleCollision; + constructor({ console, mocksPattern, rawMockMap = { + duplicates: new Map(), + mocks: new Map(), + version: exports.CACHE_VERSION, + }, rootDir, throwOnModuleCollision, }) { + this.#mocksPattern = mocksPattern; + if (rawMockMap.version !== exports.CACHE_VERSION) { + throw new Error('Incompatible state passed to MockPlugin'); + } + this.#raw = rawMockMap; + this.#console = console; + this.#pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); + this.#throwOnModuleCollision = throwOnModuleCollision; + } + async initialize({ files, pluginState }) { + if (pluginState != null && pluginState.version === this.#raw.version) { + // Use cached state directly if available + this.#raw = pluginState; + } + else { + // Otherwise, traverse all files to rebuild + for (const { canonicalPath } of files.fileIterator({ + includeNodeModules: false, + includeSymlinks: false, + })) { + this.#onFileAdded(canonicalPath); + } + } + } + getMockModule(name) { + const mockPosixRelativePath = this.#raw.mocks.get(name) || this.#raw.mocks.get(name + '/index'); + if (typeof mockPosixRelativePath !== 'string') { + return null; + } + return this.#pathUtils.normalToAbsolute((0, normalizePathSeparatorsToSystem_1.default)(mockPosixRelativePath)); + } + onChanged(delta) { + // Process removals first so that moves aren't treated as duplicates. + for (const [canonicalPath] of delta.removedFiles) { + this.#onFileRemoved(canonicalPath); + } + for (const [canonicalPath] of delta.addedFiles) { + this.#onFileAdded(canonicalPath); + } + } + #onFileAdded(canonicalPath) { + const absoluteFilePath = this.#pathUtils.normalToAbsolute(canonicalPath); + if (!this.#mocksPattern.test(absoluteFilePath)) { + return; + } + const mockName = (0, getMockName_1.default)(absoluteFilePath); + const posixRelativePath = (0, normalizePathSeparatorsToPosix_1.default)(canonicalPath); + const existingMockPosixPath = this.#raw.mocks.get(mockName); + if (existingMockPosixPath != null) { + if (existingMockPosixPath !== posixRelativePath) { + let duplicates = this.#raw.duplicates.get(mockName); + if (duplicates == null) { + duplicates = new Set([existingMockPosixPath, posixRelativePath]); + this.#raw.duplicates.set(mockName, duplicates); + } + else { + duplicates.add(posixRelativePath); + } + this.#console.warn(this.#getMessageForDuplicates(mockName, duplicates)); + } + } + // If there are duplicates and we don't throw, the latest mock wins. + // This is to preserve backwards compatibility, but it's unpredictable. + this.#raw.mocks.set(mockName, posixRelativePath); + } + #onFileRemoved(canonicalPath) { + const absoluteFilePath = this.#pathUtils.normalToAbsolute(canonicalPath); + if (!this.#mocksPattern.test(absoluteFilePath)) { + return; + } + const mockName = (0, getMockName_1.default)(absoluteFilePath); + const duplicates = this.#raw.duplicates.get(mockName); + if (duplicates != null) { + const posixRelativePath = (0, normalizePathSeparatorsToPosix_1.default)(canonicalPath); + duplicates.delete(posixRelativePath); + if (duplicates.size === 1) { + this.#raw.duplicates.delete(mockName); + } + // Set the mock to a remaining duplicate. Should never be empty. + // Size was checked as 1 above, so this is always defined + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const remaining = duplicates.values().next().value; + this.#raw.mocks.set(mockName, remaining); + } + else { + this.#raw.mocks.delete(mockName); + } + } + getSerializableSnapshot() { + return { + duplicates: new Map([...this.#raw.duplicates].map(([k, v]) => [k, new Set(v)])), + mocks: new Map(this.#raw.mocks), + version: this.#raw.version, + }; + } + assertValid() { + if (!this.#throwOnModuleCollision) { + return; + } + // Throw an aggregate error for each duplicate. + const errors = []; + for (const [mockName, relativePosixPaths] of this.#raw.duplicates) { + errors.push(this.#getMessageForDuplicates(mockName, relativePosixPaths)); + } + if (errors.length > 0) { + throw new Error(`Mock map has ${errors.length} error${errors.length > 1 ? 's' : ''}:\n${errors.join('\n')}`); + } + } + #getMessageForDuplicates(mockName, relativePosixPaths) { + return ('Duplicate manual mock found for `' + + mockName + + '`:\n' + + [...relativePosixPaths] + .map((relativePosixPath) => ' * ' + + path_1.default.sep + + this.#pathUtils.absoluteToNormal((0, normalizePathSeparatorsToSystem_1.default)(relativePosixPath)) + + '\n') + .join('')); + } + getCacheKey() { + return this.#mocksPattern.source.replaceAll('\\\\', '\\/') + ',' + this.#mocksPattern.flags; + } + getWorker() { + return null; + } +} +exports.default = MockPlugin; diff --git a/packages/@expo/metro-file-map/build/plugins/dependencies/dependencyExtractor.d.ts b/packages/@expo/metro-file-map/build/plugins/dependencies/dependencyExtractor.d.ts new file mode 100644 index 00000000000000..74120ef699f1e0 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/dependencies/dependencyExtractor.d.ts @@ -0,0 +1,7 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +export declare function extract(code: string): Set; diff --git a/packages/@expo/metro-file-map/build/plugins/dependencies/dependencyExtractor.js b/packages/@expo/metro-file-map/build/plugins/dependencies/dependencyExtractor.js new file mode 100644 index 00000000000000..4bbfdc368b21c9 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/dependencies/dependencyExtractor.js @@ -0,0 +1,66 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.extract = extract; +const NOT_A_DOT = '(? `([\`'"])([^'"\`]*?)(?:\\${pos})`; +const WORD_SEPARATOR = '\\b'; +const LEFT_PARENTHESIS = '\\('; +const RIGHT_PARENTHESIS = '\\)'; +const WHITESPACE = '\\s*'; +const OPTIONAL_COMMA = '(:?,\\s*)?'; +function createRegExp(parts, flags) { + return new RegExp(parts.join(''), flags); +} +function alternatives(...parts) { + return `(?:${parts.join('|')})`; +} +function functionCallStart(...names) { + return [ + NOT_A_DOT, + WORD_SEPARATOR, + alternatives(...names), + WHITESPACE, + LEFT_PARENTHESIS, + WHITESPACE, + ]; +} +const BLOCK_COMMENT_RE = /\/\*[^]*?\*\//g; +const LINE_COMMENT_RE = /\/\/.*/g; +const REQUIRE_OR_DYNAMIC_IMPORT_RE = createRegExp([ + ...functionCallStart('require', 'import'), + CAPTURE_STRING_LITERAL(1), + WHITESPACE, + OPTIONAL_COMMA, + RIGHT_PARENTHESIS, +], 'g'); +const IMPORT_OR_EXPORT_RE = createRegExp([ + '\\b(?:import|export)\\s+(?!type(?:of)?\\s+)(?:[^\'"]+\\s+from\\s+)?', + CAPTURE_STRING_LITERAL(1), +], 'g'); +const JEST_EXTENSIONS_RE = createRegExp([ + ...functionCallStart('jest\\s*\\.\\s*(?:requireActual|requireMock|genMockFromModule|createMockFromModule)'), + CAPTURE_STRING_LITERAL(1), + WHITESPACE, + OPTIONAL_COMMA, + RIGHT_PARENTHESIS, +], 'g'); +function extract(code) { + const dependencies = new Set(); + const addDependency = (match, _, dep) => { + dependencies.add(dep); + return match; + }; + code + .replace(BLOCK_COMMENT_RE, '') + .replace(LINE_COMMENT_RE, '') + .replace(IMPORT_OR_EXPORT_RE, addDependency) + .replace(REQUIRE_OR_DYNAMIC_IMPORT_RE, addDependency) + .replace(JEST_EXTENSIONS_RE, addDependency); + return dependencies; +} diff --git a/packages/@expo/metro-file-map/build/plugins/dependencies/worker.d.ts b/packages/@expo/metro-file-map/build/plugins/dependencies/worker.d.ts new file mode 100644 index 00000000000000..a983c644aa03e2 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/dependencies/worker.d.ts @@ -0,0 +1,18 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ +import type { MetadataWorker, V8Serializable, WorkerMessage } from '../../types'; +export default class DependencyExtractorWorker implements MetadataWorker { + #private; + constructor({ dependencyExtractor }: Readonly<{ + dependencyExtractor: string | null; + }>); + processFile(data: WorkerMessage, utils: Readonly<{ + getContent: () => Buffer; + }>): V8Serializable; +} diff --git a/packages/@expo/metro-file-map/build/plugins/dependencies/worker.js b/packages/@expo/metro-file-map/build/plugins/dependencies/worker.js new file mode 100644 index 00000000000000..67096ac6ef3899 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/dependencies/worker.js @@ -0,0 +1,30 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ +'use strict'; +Object.defineProperty(exports, "__esModule", { value: true }); +const dependencyExtractor_1 = require("./dependencyExtractor"); +class DependencyExtractorWorker { + #dependencyExtractor; + constructor({ dependencyExtractor }) { + if (dependencyExtractor != null) { + const mod = require(dependencyExtractor); + this.#dependencyExtractor = mod.__esModule === true && 'default' in mod ? mod.default : mod; + } + } + processFile(data, utils) { + const content = utils.getContent().toString(); + const { filePath } = data; + const dependencies = this.#dependencyExtractor != null + ? this.#dependencyExtractor.extract(content, filePath, dependencyExtractor_1.extract) + : (0, dependencyExtractor_1.extract)(content); + // Return as array (PerFileData type) + return Array.from(dependencies); + } +} +exports.default = DependencyExtractorWorker; diff --git a/packages/@expo/metro-file-map/build/plugins/haste/DuplicateHasteCandidatesError.d.ts b/packages/@expo/metro-file-map/build/plugins/haste/DuplicateHasteCandidatesError.d.ts new file mode 100644 index 00000000000000..737e69bb1e2ef7 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/DuplicateHasteCandidatesError.d.ts @@ -0,0 +1,14 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { DuplicatesSet } from '../../types'; +export declare class DuplicateHasteCandidatesError extends Error { + hasteName: string; + platform: string | null; + supportsNativePlatform: boolean; + duplicatesSet: DuplicatesSet; + constructor(name: string, platform: string, supportsNativePlatform: boolean, duplicatesSet: DuplicatesSet); +} diff --git a/packages/@expo/metro-file-map/build/plugins/haste/DuplicateHasteCandidatesError.js b/packages/@expo/metro-file-map/build/plugins/haste/DuplicateHasteCandidatesError.js new file mode 100644 index 00000000000000..e88a661e52132d --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/DuplicateHasteCandidatesError.js @@ -0,0 +1,51 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DuplicateHasteCandidatesError = void 0; +const constants_1 = __importDefault(require("../../constants")); +class DuplicateHasteCandidatesError extends Error { + hasteName; + platform; + supportsNativePlatform; + duplicatesSet; + constructor(name, platform, supportsNativePlatform, duplicatesSet) { + const platformMessage = getPlatformMessage(platform); + super(`The name \`${name}\` was looked up in the Haste module map. It ` + + 'cannot be resolved, because there exists several different ' + + 'files, or packages, that provide a module for ' + + `that particular name and platform. ${platformMessage} You must ` + + 'delete or exclude files until there remains only one of these:\n\n' + + Array.from(duplicatesSet) + .map(([dupFilePath, dupFileType]) => ` * \`${dupFilePath}\` (${getTypeMessage(dupFileType)})\n`) + .sort() + .join('')); + this.hasteName = name; + this.platform = platform; + this.supportsNativePlatform = supportsNativePlatform; + this.duplicatesSet = duplicatesSet; + } +} +exports.DuplicateHasteCandidatesError = DuplicateHasteCandidatesError; +function getPlatformMessage(platform) { + if (platform === constants_1.default.GENERIC_PLATFORM) { + return 'The platform is generic (no extension).'; + } + return `The platform extension is \`${platform}\`.`; +} +function getTypeMessage(type) { + switch (type) { + case constants_1.default.MODULE: + return 'module'; + case constants_1.default.PACKAGE: + return 'package'; + } + return 'unknown'; +} diff --git a/packages/@expo/metro-file-map/build/plugins/haste/HasteConflictsError.d.ts b/packages/@expo/metro-file-map/build/plugins/haste/HasteConflictsError.d.ts new file mode 100644 index 00000000000000..792a95290f04b5 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/HasteConflictsError.d.ts @@ -0,0 +1,12 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { HasteConflict } from '../../types'; +export declare class HasteConflictsError extends Error { + #private; + constructor(conflicts: readonly HasteConflict[]); + getDetailedMessage(pathsRelativeToRoot: string | null): string; +} diff --git a/packages/@expo/metro-file-map/build/plugins/haste/HasteConflictsError.js b/packages/@expo/metro-file-map/build/plugins/haste/HasteConflictsError.js new file mode 100644 index 00000000000000..b9c4e75761d42e --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/HasteConflictsError.js @@ -0,0 +1,49 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HasteConflictsError = void 0; +const path_1 = __importDefault(require("path")); +class HasteConflictsError extends Error { + #conflicts; + constructor(conflicts) { + super(`Found ${conflicts.length} Haste conflict(s). Haste module IDs must be globally unique in the codebase.`); + this.#conflicts = conflicts; + } + getDetailedMessage(pathsRelativeToRoot) { + const messages = []; + const conflicts = this.#conflicts; + if (conflicts.some((conflict) => conflict.type === 'duplicate')) { + messages.push('Advice: Resolve conflicts of type "duplicate" by renaming one or both of the conflicting modules, or by excluding conflicting paths from Haste.'); + } + if (conflicts.some((conflict) => conflict.type === 'shadowing')) { + messages.push('Advice: Resolve conflicts of type "shadowing" by moving the modules to the same folder, or by excluding conflicting paths from Haste.'); + } + let index = 0; + for (const conflict of conflicts) { + const itemHeader = index + 1 + '. '; + const indent = ' '.repeat(itemHeader.length + 2); + messages.push('\n' + + itemHeader + + conflict.id + + (conflict.platform != null ? `.${conflict.platform}` : '') + + ` (${conflict.type})`); + for (const modulePath of conflict.absolutePaths) { + messages.push(indent + + (pathsRelativeToRoot != null + ? path_1.default.relative(pathsRelativeToRoot, modulePath) + : modulePath)); + } + ++index; + } + return messages.join('\n'); + } +} +exports.HasteConflictsError = HasteConflictsError; diff --git a/packages/@expo/metro-file-map/build/plugins/haste/computeConflicts.d.ts b/packages/@expo/metro-file-map/build/plugins/haste/computeConflicts.d.ts new file mode 100644 index 00000000000000..f7c98947567340 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/computeConflicts.d.ts @@ -0,0 +1,19 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { HasteMapItem } from '../../types'; +interface Conflict { + id: string; + platform: string | null; + absolutePaths: string[]; + type: 'duplicate' | 'shadowing'; +} +export declare function computeHasteConflicts(options: { + readonly duplicates: ReadonlyMap>>; + readonly map: ReadonlyMap; + readonly rootDir: string; +}): Conflict[]; +export {}; diff --git a/packages/@expo/metro-file-map/build/plugins/haste/computeConflicts.js b/packages/@expo/metro-file-map/build/plugins/haste/computeConflicts.js new file mode 100644 index 00000000000000..e443932e84f20e --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/computeConflicts.js @@ -0,0 +1,74 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.computeHasteConflicts = computeHasteConflicts; +const path_1 = __importDefault(require("path")); +const constants_1 = __importDefault(require("../../constants")); +const sorting_1 = require("../../lib/sorting"); +function computeHasteConflicts(options) { + const { duplicates, map, rootDir } = options; + const conflicts = []; + // Add duplicates reported by metro-file-map + for (const [id, dupsByPlatform] of duplicates.entries()) { + for (const [platform, conflictingModules] of dupsByPlatform) { + conflicts.push({ + id, + platform: platform === constants_1.default.GENERIC_PLATFORM ? null : platform, + absolutePaths: [...conflictingModules.keys()] + .map((modulePath) => path_1.default.resolve(rootDir, modulePath)) + // Sort for ease of testing + .sort(), + type: 'duplicate', + }); + } + } + // Add cases of "shadowing at a distance": a module with a platform suffix and + // a module with a lower priority platform suffix (or no suffix), in different + // directories. + for (const [id, data] of map) { + const conflictPaths = new Set(); + const basePaths = []; + for (const basePlatform of [constants_1.default.NATIVE_PLATFORM, constants_1.default.GENERIC_PLATFORM]) { + if (data[basePlatform] == null) { + continue; + } + const basePath = data[basePlatform][0]; + basePaths.push(basePath); + const basePathDir = path_1.default.dirname(basePath); + // Find all platforms that can shadow basePlatform + // Given that X.(specific platform).js > x.native.js > X.js + // and basePlatform is either 'native' or generic (no platform). + for (const platform of Object.keys(data)) { + if (platform === basePlatform || platform === constants_1.default.GENERIC_PLATFORM /* lowest priority */) { + continue; + } + const platformPath = data[platform][0]; + if (path_1.default.dirname(platformPath) !== basePathDir) { + conflictPaths.add(platformPath); + } + } + } + if (conflictPaths.size) { + conflicts.push({ + id, + platform: null, + absolutePaths: [...new Set([...conflictPaths, ...basePaths])] + .map((modulePath) => path_1.default.resolve(rootDir, modulePath)) + // Sort for ease of testing + .sort(), + type: 'shadowing', + }); + } + } + // Sort for ease of testing + conflicts.sort((0, sorting_1.chainComparators)((a, b) => (0, sorting_1.compareStrings)(a.type, b.type), (a, b) => (0, sorting_1.compareStrings)(a.id, b.id), (a, b) => (0, sorting_1.compareStrings)(a.platform, b.platform))); + return conflicts; +} diff --git a/packages/@expo/metro-file-map/build/plugins/haste/getPlatformExtension.d.ts b/packages/@expo/metro-file-map/build/plugins/haste/getPlatformExtension.d.ts new file mode 100644 index 00000000000000..04783c61d3cb84 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/getPlatformExtension.d.ts @@ -0,0 +1,7 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +export default function getPlatformExtension(file: string, platforms: ReadonlySet): string | null; diff --git a/packages/@expo/metro-file-map/build/plugins/haste/getPlatformExtension.js b/packages/@expo/metro-file-map/build/plugins/haste/getPlatformExtension.js new file mode 100644 index 00000000000000..e5ae861d7c2dae --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/getPlatformExtension.js @@ -0,0 +1,19 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = getPlatformExtension; +// Extract platform extension: index.ios.js -> ios +function getPlatformExtension(file, platforms) { + const last = file.lastIndexOf('.'); + const secondToLast = file.lastIndexOf('.', last - 1); + if (secondToLast === -1) { + return null; + } + const platform = file.substring(secondToLast + 1, last); + return platforms.has(platform) ? platform : null; +} diff --git a/packages/@expo/metro-file-map/build/plugins/haste/worker.d.ts b/packages/@expo/metro-file-map/build/plugins/haste/worker.d.ts new file mode 100644 index 00000000000000..0f258f62bb727e --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/worker.d.ts @@ -0,0 +1,16 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { MetadataWorker, V8Serializable, WorkerMessage } from '../../types'; +export default class Worker implements MetadataWorker { + #private; + constructor({ hasteImplModulePath }: Readonly<{ + hasteImplModulePath: string | null; + }>); + processFile(data: WorkerMessage, utils: Readonly<{ + getContent: () => Buffer; + }>): V8Serializable; +} diff --git a/packages/@expo/metro-file-map/build/plugins/haste/worker.js b/packages/@expo/metro-file-map/build/plugins/haste/worker.js new file mode 100644 index 00000000000000..99418e2f289c83 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/haste/worker.js @@ -0,0 +1,48 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path_1 = __importDefault(require("path")); +const workerExclusionList_1 = __importDefault(require("../../workerExclusionList")); +const PACKAGE_JSON = path_1.default.sep + 'package.json'; +class Worker { + #hasteImpl = null; + constructor({ hasteImplModulePath }) { + if (hasteImplModulePath != null) { + const mod = require(hasteImplModulePath); + this.#hasteImpl = mod.__esModule === true && 'default' in mod ? mod.default : mod; + } + } + processFile(data, utils) { + let hasteName = null; + const { filePath } = data; + if (filePath.endsWith(PACKAGE_JSON)) { + // Process a package.json that is returned as a PACKAGE type with its name. + try { + const fileData = JSON.parse(utils.getContent().toString()); + if (fileData.name) { + hasteName = fileData.name; + } + } + catch (err) { + throw new Error(`Cannot parse ${filePath} as JSON: ${err.message}`); + } + } + else if (!workerExclusionList_1.default.has(filePath.substr(filePath.lastIndexOf('.')))) { + if (!this.#hasteImpl) { + throw new Error('computeHaste is true but hasteImplModulePath not set'); + } + // Process a random file that is returned as a MODULE. + hasteName = this.#hasteImpl.getHasteName(filePath) || null; + } + return hasteName; + } +} +exports.default = Worker; diff --git a/packages/@expo/metro-file-map/build/plugins/mocks/getMockName.d.ts b/packages/@expo/metro-file-map/build/plugins/mocks/getMockName.d.ts new file mode 100644 index 00000000000000..e1b134cc9aee67 --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/mocks/getMockName.d.ts @@ -0,0 +1,8 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +declare const _default: (filePath: string) => string; +export default _default; diff --git a/packages/@expo/metro-file-map/build/plugins/mocks/getMockName.js b/packages/@expo/metro-file-map/build/plugins/mocks/getMockName.js new file mode 100644 index 00000000000000..5fc42e22d4b57f --- /dev/null +++ b/packages/@expo/metro-file-map/build/plugins/mocks/getMockName.js @@ -0,0 +1,17 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path_1 = __importDefault(require("path")); +const MOCKS_PATTERN = path_1.default.sep + '__mocks__' + path_1.default.sep; +exports.default = (filePath) => { + const mockPath = filePath.split(MOCKS_PATTERN)[1]; + return mockPath.substring(0, mockPath.lastIndexOf(path_1.default.extname(mockPath))).replaceAll('\\', '/'); +}; diff --git a/packages/@expo/metro-file-map/build/ts-declarations/fb-watchman.d.ts b/packages/@expo/metro-file-map/build/ts-declarations/fb-watchman.d.ts new file mode 100644 index 00000000000000..5fcd0cfa5105d6 --- /dev/null +++ b/packages/@expo/metro-file-map/build/ts-declarations/fb-watchman.d.ts @@ -0,0 +1,20 @@ +declare module 'fb-watchman' { + /** Information about a changed file */ + interface FileChange { + dev?: number; + cclock?: string; + gid?: number; + ino?: number; + mode?: number; + mtime?: number; + mtime_us?: number; + mtime_ns?: number; + mtime_f?: number; + new?: boolean; + nlink?: number; + uid?: number; + 'content.sha1hex'?: string; + symlink_target?: string; + } +} +export {}; diff --git a/packages/@expo/metro-file-map/build/ts-declarations/fb-watchman.js b/packages/@expo/metro-file-map/build/ts-declarations/fb-watchman.js new file mode 100644 index 00000000000000..c8ad2e549bdc68 --- /dev/null +++ b/packages/@expo/metro-file-map/build/ts-declarations/fb-watchman.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/packages/@expo/metro-file-map/build/types.d.ts b/packages/@expo/metro-file-map/build/types.d.ts new file mode 100644 index 00000000000000..01ffed0cd62a55 --- /dev/null +++ b/packages/@expo/metro-file-map/build/types.d.ts @@ -0,0 +1,395 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { PerfLogger, RootPerfLogger } from '@expo/metro/metro-config'; +import type { HType, HTypeValue } from './constants'; +export type { HType, HTypeValue }; +export type { PerfLoggerFactory, PerfLogger } from '@expo/metro/metro-config'; +export interface BuildParameters { + readonly computeSha1: boolean; + readonly enableSymlinks: boolean; + readonly extensions: readonly string[]; + readonly forceNodeFilesystemAPI: boolean; + readonly ignorePattern: RegExp; + readonly plugins: readonly InputFileMapPlugin[]; + readonly retainAllFiles: boolean; + readonly rootDir: string; + readonly roots: readonly string[]; + readonly cacheBreaker: string; +} +export interface BuildResult { + fileSystem: FileSystem; +} +export interface CacheData { + readonly clocks: WatchmanClocks; + readonly fileSystemData: unknown; + readonly plugins: ReadonlyMap; +} +export interface CacheManager { + /** + * Called during startup to load initial state, if available. Provided to + * a crawler, which will return the delta between the initial state and the + * current file system state. + */ + read(): Promise; + /** + * Called when metro-file-map `build()` has applied changes returned by the + * crawler - i.e. internal state reflects the current file system state. + * + * getSnapshot may be retained and called at any time before end(), such as + * in response to eventSource 'change' events. + */ + write(getSnapshot: () => CacheData, opts: CacheManagerWriteOptions): Promise; + /** + * The last call that will be made to this CacheManager. Any handles should + * be closed by the time this settles. + */ + end(): Promise; +} +export interface CacheManagerEventSource { + onChange(listener: () => void): () => void; +} +export type CacheManagerFactory = (options: CacheManagerFactoryOptions) => CacheManager; +export interface CacheManagerFactoryOptions { + readonly buildParameters: BuildParameters; +} +export interface CacheManagerWriteOptions { + readonly changedSinceCacheRead: boolean; + readonly eventSource: CacheManagerEventSource; + readonly onWriteError: (error: Error) => void; +} +export type CanonicalPath = string; +export interface ChangedFileMetadata { + readonly isSymlink: boolean; + readonly modifiedTime?: number | undefined | null; +} +export interface ChangeEvent { + readonly logger: RootPerfLogger | undefined | null; + readonly changes: ReadonlyFileSystemChanges; + readonly rootDir: string; +} +export interface ChangeEventMetadata { + modifiedTime: number | undefined | null; + size: number | undefined | null; + type: 'f' | 'd' | 'l'; +} +export type Console = typeof globalThis.console; +interface CrawlerPreviousState { + readonly clocks: ReadonlyMap; + readonly fileSystem: FileSystem; +} +export interface CrawlerOptions { + abortSignal: AbortSignal | undefined | null; + computeSha1: boolean; + console: Console; + extensions: readonly string[]; + forceNodeFilesystemAPI: boolean; + ignore: IgnoreMatcher; + includeSymlinks: boolean; + perfLogger?: PerfLogger | null | undefined; + previousState: CrawlerPreviousState; + rootDir: string; + roots: readonly string[]; + onStatus: (status: WatcherStatus) => void; + subpath?: string; +} +export type CrawlResult = { + changedFiles: FileData; + removedFiles: Set; + clocks: WatchmanClocks; +} | { + changedFiles: FileData; + removedFiles: Set; +}; +export type DependencyExtractor = { + extract: (content: string, absoluteFilePath: string, defaultExtractor?: DependencyExtractor['extract']) => Set; + getCacheKey: () => string; +}; +export type WatcherStatus = { + type: 'watchman_slow_command'; + timeElapsed: number; + command: 'watch-project' | 'query'; +} | { + type: 'watchman_slow_command_complete'; + timeElapsed: number; + command: 'watch-project' | 'query'; +} | { + type: 'watchman_warning'; + warning: unknown; + command: 'watch-project' | 'query'; +}; +export type DuplicatesSet = Map; +export type DuplicatesIndex = Map>; +interface FileMapPluginInitOptionsFiles { + fileIterator(opts: Readonly<{ + includeNodeModules: boolean; + includeSymlinks: boolean; + }>): Iterable<{ + baseName: string; + canonicalPath: string; + readonly pluginData: PerFileData | null | undefined; + }>; + lookup(mixedPath: string): { + exists: false; + } | { + exists: true; + type: 'f'; + readonly pluginData: PerFileData; + } | { + exists: true; + type: 'd'; + }; +} +export interface FileMapPluginInitOptions { + readonly files: FileMapPluginInitOptionsFiles; + readonly pluginState: SerializableState | undefined | null; +} +interface FileMapPluginWorkerOptions { + readonly modulePath: string; + readonly setupArgs: JsonData; +} +export interface FileMapPluginWorker { + readonly worker: FileMapPluginWorkerOptions; + readonly filter: (input: { + normalPath: string; + isNodeModules: boolean; + }) => boolean; +} +export type V8Serializable = string | number | boolean | null | readonly V8Serializable[] | ReadonlySet | ReadonlyMap | Readonly<{ + [key: string]: V8Serializable; +}>; +export interface FileMapPlugin { + readonly name: string; + initialize(initOptions: FileMapPluginInitOptions): Promise; + assertValid(): void; + onChanged(changes: ReadonlyFileSystemChanges): void; + getSerializableSnapshot(): void | V8Serializable; + getCacheKey(): string; + getWorker(): FileMapPluginWorker | undefined | null; +} +export type InputFileMapPlugin = FileMapPlugin; +export interface MetadataWorkerParams { + getContent(): Buffer; +} +export interface MetadataWorker { + processFile(message: WorkerMessage, params: MetadataWorkerParams): V8Serializable; +} +export type IgnoreMatcher = (item: string) => boolean; +export type FileData = Map; +export type FileMetadata = [ + mtime: number | null, + size: number, + visited: 0 | 1, + sha1: string | null, + symlink: 0 | 1 | string, + ...any[] +]; +export interface FileStats { + readonly fileType: 'f' | 'l'; + readonly modifiedTime: number | undefined | null; + readonly size: number | undefined | null; +} +export interface FileSystem { + exists(file: Path): boolean; + getAllFiles(): Path[]; + /** + * Given a map of files, determine which of them are new or modified + * (changedFiles), and which of them are missing from the input + * (removedFiles), vs the current state of this instance of FileSystem. + */ + getDifference(files: FileData, options?: Readonly<{ + /** + * Only consider files under this subpath (which should be a directory) + * when computing removedFiles. If not provided, all files in the file + * system are considered. + */ + subpath?: string; + }>): { + changedFiles: FileData; + removedFiles: Set; + }; + getSerializableSnapshot(): CacheData['fileSystemData']; + getSha1(file: Path): string | undefined | null; + getOrComputeSha1(file: Path): Promise<{ + sha1: string; + content?: Buffer; + } | undefined | null>; + /** + * Given a start path (which need not exist), a subpath and type, and + * optionally a 'breakOnSegment', performs the following: + * + * X = mixedStartPath + * do + * if basename(X) === opts.breakOnSegment + * return null + * if X + subpath exists and has type opts.subpathType + * return { + * absolutePath: realpath(X + subpath) + * containerRelativePath: relative(mixedStartPath, X) + * } + * X = dirname(X) + * while X !== dirname(X) + * + * If opts.invalidatedBy is given, collects all absolute, real paths that if + * added or removed may invalidate this result. + * + * Useful for finding the closest package scope (subpath: package.json, + * type f, breakOnSegment: node_modules) or closest potential package root + * (subpath: node_modules/pkg, type: d) in Node.js resolution. + */ + hierarchicalLookup(mixedStartPath: string, subpath: string, opts: { + breakOnSegment: string | undefined | null; + invalidatedBy: Set | undefined | null; + subpathType: 'f' | 'd'; + }): { + absolutePath: string; + containerRelativePath: string; + } | undefined | null; + /** + * Analogous to posix lstat. If the file at `file` is a symlink, return + * information about the symlink without following it. + */ + linkStats(file: Path): FileStats | undefined | null; + /** + * Return information about the given path, whether a directory or file. + * Always follow symlinks, and return a real path if it exists. + */ + lookup(mixedPath: Path): LookupResult; + matchFiles(opts: { + filter?: RegExp | null | undefined; + filterCompareAbsolute?: boolean | undefined; + filterComparePosix?: boolean | undefined; + follow?: boolean | undefined; + recursive?: boolean | undefined; + rootDir?: Path | null | undefined; + }): Iterable; +} +export type Glob = string; +export type JsonData = string | number | boolean | null | JsonData[] | { + [key: string]: JsonData; +}; +export type LookupResult = { + exists: false; + links: ReadonlySet; + missing: string; +} | { + exists: true; + links: ReadonlySet; + realPath: string; + type: 'd'; +} | { + exists: true; + links: ReadonlySet; + realPath: string; + type: 'f'; + metadata: FileMetadata; +}; +export interface MockMap { + getMockModule(name: string): Path | undefined | null; +} +export interface HasteConflict { + id: string; + platform: string | null; + absolutePaths: string[]; + type: 'duplicate' | 'shadowing'; +} +export interface HasteMap { + getModule(name: string, platform?: string | undefined | null, supportsNativePlatform?: boolean | undefined | null, type?: HTypeValue | undefined | null): Path | undefined | null; + getModuleNameByPath(file: Path): string | undefined | null; + getPackage(name: string, platform: string | undefined | null, _supportsNativePlatform: boolean | undefined | null): Path | undefined | null; + computeConflicts(): HasteConflict[]; +} +export type HasteMapData = Map; +export type HasteMapItem = { + [platform: string]: HasteMapItemMetadata; +}; +export type HasteMapItemMetadata = [/* path */ string, /* type */ number]; +export interface FileSystemListener { + directoryAdded(canonicalPath: CanonicalPath): void; + directoryRemoved(canonicalPath: CanonicalPath): void; + fileAdded(canonicalPath: CanonicalPath, data: FileMetadata): void; + fileModified(canonicalPath: CanonicalPath, oldData: FileMetadata, newData: FileMetadata): void; + fileRemoved(canonicalPath: CanonicalPath, data: FileMetadata): void; +} +export interface ReadonlyFileSystemChanges { + readonly addedDirectories: Iterable; + readonly removedDirectories: Iterable; + readonly addedFiles: Iterable>; + readonly modifiedFiles: Iterable>; + readonly removedFiles: Iterable>; +} +export interface MutableFileSystem extends FileSystem { + remove(filePath: Path, listener?: FileSystemListener | undefined): void; + addOrModify(filePath: Path, fileMetadata: FileMetadata, listener?: FileSystemListener | undefined): void; + bulkAddOrModify(addedOrModifiedFiles: FileData, listener?: FileSystemListener | undefined): void; +} +export type Path = string; +export type ProcessFileFunction = (normalPath: string, metadata: FileMetadata, request: Readonly<{ + computeSha1: boolean; +}>) => Buffer | undefined | null; +export type RawMockMap = { + /** posix-separated mock name to posix-separated project-relative paths */ + readonly duplicates: Map>; + /** posix-separated mock name to posix-separated project-relative path */ + readonly mocks: Map; + readonly version: number; +}; +export interface ReadOnlyRawMockMap { + readonly duplicates: ReadonlyMap>; + readonly mocks: ReadonlyMap; + readonly version: number; +} +export interface WatcherBackend { + getPauseReason(): string | undefined | null; + onError(listener: (error: Error) => void): () => void; + onFileEvent(listener: (event: WatcherBackendChangeEvent) => void): () => void; + startWatching(): Promise; + stopWatching(): Promise; +} +export type ChangeEventClock = [absoluteWatchRoot: string, opaqueClock: string]; +export type WatcherBackendChangeEvent = { + readonly event: 'touch'; + readonly clock?: ChangeEventClock | undefined; + readonly relativePath: string; + readonly root: string; + readonly metadata: ChangeEventMetadata; +} | { + readonly event: 'delete'; + readonly clock?: ChangeEventClock | undefined; + readonly relativePath: string; + readonly root: string; + readonly metadata?: undefined; +} | { + readonly event: 'recrawl'; + readonly clock?: ChangeEventClock | undefined; + readonly relativePath: string; + readonly root: string; +}; +export interface WatcherBackendOptions { + readonly ignored: RegExp | undefined | null; + readonly globs: readonly string[]; + readonly dot: boolean; +} +export type WatchmanClockSpec = string | { + readonly scm: { + readonly 'mergebase-with': string; + }; +}; +export type WatchmanClocks = Map; +export interface WorkerMessage { + readonly computeSha1: boolean; + readonly filePath: string; + readonly maybeReturnContent: boolean; + readonly pluginsToRun: readonly number[]; +} +export interface WorkerMetadata { + readonly sha1?: string | undefined | null; + readonly content?: Buffer | undefined | null; + readonly pluginData?: readonly V8Serializable[]; +} +export interface WorkerSetupArgs { + readonly plugins?: readonly FileMapPluginWorker['worker'][]; +} diff --git a/packages/@expo/metro-file-map/build/types.js b/packages/@expo/metro-file-map/build/types.js new file mode 100644 index 00000000000000..4e1ed35975a22b --- /dev/null +++ b/packages/@expo/metro-file-map/build/types.js @@ -0,0 +1,8 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/packages/@expo/metro-file-map/build/watchers/AbstractWatcher.d.ts b/packages/@expo/metro-file-map/build/watchers/AbstractWatcher.d.ts new file mode 100644 index 00000000000000..93e9f2f67d29f4 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/AbstractWatcher.d.ts @@ -0,0 +1,30 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { WatcherBackend, WatcherBackendChangeEvent, WatcherBackendOptions } from '../types'; +type EachOmit = T extends any ? Omit : never; +export type WatcherBackendChangeEventWithoutRoot = EachOmit; +export interface Listeners { + onFileEvent(event: WatcherBackendChangeEvent): void; + onError(error: Error): void; +} +export declare class AbstractWatcher implements WatcherBackend { + #private; + readonly root: string; + readonly ignored: RegExp | undefined | null; + readonly globs: readonly string[]; + readonly dot: boolean; + readonly doIgnore: (path: string) => boolean; + constructor(dir: string, opts: WatcherBackendOptions); + onFileEvent(listener: (event: WatcherBackendChangeEvent) => void): () => void; + onError(listener: (error: Error) => void): () => void; + startWatching(): Promise; + stopWatching(): Promise; + emitFileEvent(event: WatcherBackendChangeEventWithoutRoot): void; + emitError(error: Error): void; + getPauseReason(): string | undefined | null; +} +export {}; diff --git a/packages/@expo/metro-file-map/build/watchers/AbstractWatcher.js b/packages/@expo/metro-file-map/build/watchers/AbstractWatcher.js new file mode 100644 index 00000000000000..71f2ad1eac218c --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/AbstractWatcher.js @@ -0,0 +1,97 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AbstractWatcher = void 0; +const events_1 = __importDefault(require("events")); +const path = __importStar(require("path")); +const common_1 = require("./common"); +class AbstractWatcher { + root; + ignored; + globs; + dot; + doIgnore; + #emitter = new events_1.default(); + constructor(dir, opts) { + const { ignored, globs, dot } = opts; + this.dot = dot || false; + this.ignored = ignored; + this.globs = globs; + this.doIgnore = ignored + ? (filePath) => (0, common_1.posixPathMatchesPattern)(ignored, filePath) + : () => false; + this.root = path.resolve(dir); + } + onFileEvent(listener) { + this.#emitter.on('fileevent', listener); + return () => { + this.#emitter.removeListener('fileevent', listener); + }; + } + onError(listener) { + this.#emitter.on('error', listener); + return () => { + this.#emitter.removeListener('error', listener); + }; + } + async startWatching() { + // Must be implemented by subclasses + } + async stopWatching() { + this.#emitter.removeAllListeners(); + } + emitFileEvent(event) { + this.#emitter.emit('fileevent', { + ...event, + root: this.root, + }); + } + emitError(error) { + this.#emitter.emit('error', error); + } + getPauseReason() { + return null; + } +} +exports.AbstractWatcher = AbstractWatcher; diff --git a/packages/@expo/metro-file-map/build/watchers/FallbackWatcher.d.ts b/packages/@expo/metro-file-map/build/watchers/FallbackWatcher.d.ts new file mode 100644 index 00000000000000..1806ea26816922 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/FallbackWatcher.d.ts @@ -0,0 +1,18 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ +import { AbstractWatcher } from './AbstractWatcher'; +export default class FallbackWatcher extends AbstractWatcher { + #private; + startWatching(): Promise; + /** + * End watching. + */ + stopWatching(): Promise; + getPauseReason(): string | undefined | null; +} diff --git a/packages/@expo/metro-file-map/build/watchers/FallbackWatcher.js b/packages/@expo/metro-file-map/build/watchers/FallbackWatcher.js new file mode 100644 index 00000000000000..1bb69f11e18ade --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/FallbackWatcher.js @@ -0,0 +1,411 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs_1 = __importDefault(require("fs")); +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +const AbstractWatcher_1 = require("./AbstractWatcher"); +const common = __importStar(require("./common")); +// NOTE(@kitten): No typings +const walker = require('walker'); +const platform = os_1.default.platform(); +const fsPromises = fs_1.default.promises; +const TOUCH_EVENT = common.TOUCH_EVENT; +const DELETE_EVENT = common.DELETE_EVENT; +/** + * This setting delays all events. It suppresses 'change' events that + * immediately follow an 'add', and debounces successive 'change' events to + * only emit the latest. + */ +const DEBOUNCE_MS = 100; +class FallbackWatcher extends AbstractWatcher_1.AbstractWatcher { + #changeTimers = new Map(); + #dirRegistry = Object.create(null); + #watched = Object.create(null); + async startWatching() { + this.#watchdir(this.root); + await new Promise((resolve) => { + recReaddir(this.root, (dir) => { + this.#watchdir(dir); + }, (filename) => { + this.#register(filename, 'f'); + }, (symlink) => { + this.#register(symlink, 'l'); + }, () => { + resolve(); + }, this.#checkedEmitError, this.ignored); + }); + } + /** + * Register files that matches our globs to know what to type of event to + * emit in the future. + * + * Registry looks like the following: + * + * dirRegister => Map { + * dirpath => Map { + * filename => true + * } + * } + * + * Return false if ignored or already registered. + */ + #register(filepath, type) { + const dir = path_1.default.dirname(filepath); + const filename = path_1.default.basename(filepath); + if (this.#dirRegistry[dir] && this.#dirRegistry[dir][filename]) { + return false; + } + const relativePath = path_1.default.relative(this.root, filepath); + if (this.doIgnore(relativePath) || + (type === 'f' && !common.includedByGlob('f', this.globs, this.dot, relativePath))) { + return false; + } + if (!this.#dirRegistry[dir]) { + this.#dirRegistry[dir] = Object.create(null); + } + this.#dirRegistry[dir][filename] = true; + return true; + } + /** + * Removes a file from the registry. + */ + #unregister(filepath) { + const dir = path_1.default.dirname(filepath); + if (this.#dirRegistry[dir]) { + const filename = path_1.default.basename(filepath); + delete this.#dirRegistry[dir][filename]; + } + } + /** + * Removes a dir from the registry, returning all files that were registered + * under it (recursively). + */ + #unregisterDir(dirpath) { + const removedFiles = []; + // Find and remove all entries under this directory + for (const registeredDir of Object.keys(this.#dirRegistry)) { + if (registeredDir === dirpath || registeredDir.startsWith(dirpath + path_1.default.sep)) { + // Collect all files in this directory + for (const filename of Object.keys(this.#dirRegistry[registeredDir])) { + removedFiles.push(path_1.default.join(registeredDir, filename)); + } + delete this.#dirRegistry[registeredDir]; + } + } + return removedFiles; + } + /** + * Checks if a file or directory exists in the registry. + */ + #registered(fullpath) { + const dir = path_1.default.dirname(fullpath); + return !!(this.#dirRegistry[fullpath] || + (this.#dirRegistry[dir] && this.#dirRegistry[dir][path_1.default.basename(fullpath)])); + } + /** + * Emit "error" event if it's not an ignorable event + */ + #checkedEmitError = (error) => { + if (!isIgnorableFileError(error)) { + this.emitError(error); + } + }; + /** + * Watch a directory. + */ + #watchdir = (dir) => { + if (this.#watched[dir]) { + return false; + } + const watcher = fs_1.default.watch(dir, { persistent: true }, (event, filename) => this.#normalizeChange(dir, event, filename)); + this.#watched[dir] = watcher; + watcher.on('error', this.#checkedEmitError); + if (this.root !== dir) { + this.#register(dir, 'd'); + } + return true; + }; + /** + * Stop watching a directory. + */ + async #stopWatching(dir) { + const watcher = this.#watched[dir]; + if (watcher) { + await new Promise((resolve) => { + watcher.once('close', () => process.nextTick(resolve)); + watcher.close(); + delete this.#watched[dir]; + }); + } + } + /** + * End watching. + */ + async stopWatching() { + await super.stopWatching(); + const promises = Object.keys(this.#watched).map((dir) => this.#stopWatching(dir)); + await Promise.all(promises); + } + /** + * On some platforms, as pointed out on the fs docs (most likely just win32) + * the file argument might be missing from the fs event. Try to detect what + * change by detecting if something was deleted or the most recent file change. + */ + #detectChangedFile(dir, event, callback) { + if (!this.#dirRegistry[dir]) { + return; + } + let found = false; + let closest = null; + let c = 0; + Object.keys(this.#dirRegistry[dir]).forEach((file, i, arr) => { + fs_1.default.lstat(path_1.default.join(dir, file), (error, stat) => { + if (found) { + return; + } + if (error) { + if (isIgnorableFileError(error)) { + found = true; + callback(file); + } + else { + this.emitError(error); + } + } + else { + if (closest == null || stat.mtime > closest.mtime) { + closest = { file, mtime: stat.mtime }; + } + if (arr.length === ++c) { + callback(closest.file); + } + } + }); + }); + } + /** + * Normalize fs events and pass it on to be processed. + */ + #normalizeChange(dir, event, file) { + if (!file) { + this.#detectChangedFile(dir, event, (actualFile) => { + if (actualFile) { + this.#processChange(dir, event, actualFile).catch((error) => { + this.emitError(error); + }); + } + }); + } + else { + this.#processChange(dir, event, path_1.default.normalize(file)).catch((error) => { + this.emitError(error); + }); + } + } + /** + * Process changes. + */ + async #processChange(dir, event, file) { + const fullPath = path_1.default.join(dir, file); + const relativePath = path_1.default.join(path_1.default.relative(this.root, dir), file); + const registered = this.#registered(fullPath); + try { + const stat = await fsPromises.lstat(fullPath); + if (stat.isDirectory()) { + // win32 emits usless change events on dirs. + if (event === 'change') { + return; + } + if (this.doIgnore(relativePath) || + !common.includedByGlob('d', this.globs, this.dot, relativePath)) { + return; + } + recReaddir(path_1.default.resolve(this.root, relativePath), (dir, stats) => { + if (this.#watchdir(dir)) { + this.#emitEvent({ + event: TOUCH_EVENT, + relativePath: path_1.default.relative(this.root, dir), + metadata: { + modifiedTime: stats.mtime.getTime(), + size: stats.size, + type: 'd', + }, + }); + } + }, (file, stats) => { + if (this.#register(file, 'f')) { + this.#emitEvent({ + event: TOUCH_EVENT, + relativePath: path_1.default.relative(this.root, file), + metadata: { + modifiedTime: stats.mtime.getTime(), + size: stats.size, + type: 'f', + }, + }); + } + }, (symlink, stats) => { + if (this.#register(symlink, 'l')) { + this.emitFileEvent({ + event: TOUCH_EVENT, + relativePath: path_1.default.relative(this.root, symlink), + metadata: { + modifiedTime: stats.mtime.getTime(), + size: stats.size, + type: 'l', + }, + }); + } + }, function endCallback() { }, this.#checkedEmitError, this.ignored); + } + else { + const type = common.typeFromStat(stat); + if (type == null) { + return; + } + const metadata = { + modifiedTime: stat.mtime.getTime(), + size: stat.size, + type, + }; + if (registered) { + this.#emitEvent({ event: TOUCH_EVENT, relativePath, metadata }); + } + else { + if (this.#register(fullPath, type)) { + this.#emitEvent({ event: TOUCH_EVENT, relativePath, metadata }); + } + } + } + } + catch (error) { + if (!isIgnorableFileError(error)) { + this.emitError(error); + return; + } + this.#unregister(fullPath); + // When a directory is deleted, emit delete events for all files we + // knew about under that directory + const removedFiles = this.#unregisterDir(fullPath); + for (const removedFile of removedFiles) { + this.#emitEvent({ + event: DELETE_EVENT, + relativePath: path_1.default.relative(this.root, removedFile), + }); + } + if (registered) { + this.#emitEvent({ event: DELETE_EVENT, relativePath }); + } + await this.#stopWatching(fullPath); + } + } + /** + * Emits the given event after debouncing, to emit only the latest + * information when we receive several events in quick succession. E.g., + * Linux emits two events for every new file. + * + * See also note above for DEBOUNCE_MS. + */ + #emitEvent(change) { + const { event, relativePath } = change; + const key = event + '-' + relativePath; + const existingTimer = this.#changeTimers.get(key); + if (existingTimer) { + clearTimeout(existingTimer); + } + this.#changeTimers.set(key, setTimeout(() => { + this.#changeTimers.delete(key); + this.emitFileEvent(change); + }, DEBOUNCE_MS)); + } + getPauseReason() { + return null; + } +} +exports.default = FallbackWatcher; +/** + * Determine if a given FS error can be ignored + */ +function isIgnorableFileError(error) { + return (error.code === 'ENOENT' || + // Workaround Windows EPERM on watched folder deletion, and when + // reading locked files (pending further writes or pending deletion). + // In such cases, we'll receive a subsequent event when the file is + // deleted or ready to read. + // https://github.com/facebook/metro/issues/1001 + // https://github.com/nodejs/node-v0.x-archive/issues/4337 + (error.code === 'EPERM' && platform === 'win32')); +} +/** + * Traverse a directory recursively calling `callback` on every directory. + */ +function recReaddir(dir, dirCallback, fileCallback, symlinkCallback, endCallback, errorCallback, ignored) { + const walk = walker(dir); + if (ignored) { + walk.filterDir((currentDir) => !common.posixPathMatchesPattern(ignored, currentDir)); + } + walk + .on('dir', normalizeProxy(dirCallback)) + .on('file', normalizeProxy(fileCallback)) + .on('symlink', normalizeProxy(symlinkCallback)) + .on('error', errorCallback) + .on('end', () => { + if (platform === 'win32') { + setTimeout(endCallback, 1000); + } + else { + endCallback(); + } + }); +} +/** + * Returns a callback that when called will normalize a path and call the + * original callback + */ +function normalizeProxy(callback) { + return (filepath, stats) => callback(path_1.default.normalize(filepath), stats); +} diff --git a/packages/@expo/metro-file-map/build/watchers/NativeWatcher.d.ts b/packages/@expo/metro-file-map/build/watchers/NativeWatcher.d.ts new file mode 100644 index 00000000000000..ca78895b5319e6 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/NativeWatcher.d.ts @@ -0,0 +1,39 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { WatcherBackendOptions } from '../types'; +import { AbstractWatcher } from './AbstractWatcher'; +/** + * NativeWatcher uses Node's native fs.watch API with recursive: true. + * + * Supported on macOS (and potentially Windows), because both natively have a + * concept of recurisve watching, via FSEvents and ReadDirectoryChangesW + * respectively. Notably Linux lacks this capability at the OS level. + * + * Node.js has at times supported the `recursive` option to fs.watch on Linux + * by walking the directory tree and creating a watcher on each directory, but + * this fits poorly with the synchronous `watch` API - either it must block for + * arbitrarily large IO, or it may drop changes after `watch` returns. See: + * https://github.com/nodejs/node/issues/48437 + * + * Therefore, we retain a fallback to our own application-level recursive + * FallbackWatcher for Linux, which has async `startWatching`. + * + * On Windows, this watcher could be used in principle, but needs work around + * some Windows-specific edge cases handled in FallbackWatcher, like + * deduping file change events, ignoring directory changes, and handling EPERM. + */ +export default class NativeWatcher extends AbstractWatcher { + #private; + static isSupported(): boolean; + constructor(dir: string, opts: WatcherBackendOptions); + startWatching(): Promise; + /** + * End watching. + */ + stopWatching(): Promise; + _handleEvent(event: string, relativePath: string | null): Promise; +} diff --git a/packages/@expo/metro-file-map/build/watchers/NativeWatcher.js b/packages/@expo/metro-file-map/build/watchers/NativeWatcher.js new file mode 100644 index 00000000000000..7f7c03c742f250 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/NativeWatcher.js @@ -0,0 +1,156 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +const fs_1 = require("fs"); +const os_1 = require("os"); +const path = __importStar(require("path")); +const AbstractWatcher_1 = require("./AbstractWatcher"); +const common_1 = require("./common"); +const debug = require('debug')('Metro:NativeWatcher'); +const TOUCH_EVENT = 'touch'; +const DELETE_EVENT = 'delete'; +const RECRAWL_EVENT = 'recrawl'; +/** + * NativeWatcher uses Node's native fs.watch API with recursive: true. + * + * Supported on macOS (and potentially Windows), because both natively have a + * concept of recurisve watching, via FSEvents and ReadDirectoryChangesW + * respectively. Notably Linux lacks this capability at the OS level. + * + * Node.js has at times supported the `recursive` option to fs.watch on Linux + * by walking the directory tree and creating a watcher on each directory, but + * this fits poorly with the synchronous `watch` API - either it must block for + * arbitrarily large IO, or it may drop changes after `watch` returns. See: + * https://github.com/nodejs/node/issues/48437 + * + * Therefore, we retain a fallback to our own application-level recursive + * FallbackWatcher for Linux, which has async `startWatching`. + * + * On Windows, this watcher could be used in principle, but needs work around + * some Windows-specific edge cases handled in FallbackWatcher, like + * deduping file change events, ignoring directory changes, and handling EPERM. + */ +class NativeWatcher extends AbstractWatcher_1.AbstractWatcher { + #fsWatcher; + static isSupported() { + return (0, os_1.platform)() === 'darwin'; + } + // eslint-disable-next-line @typescript-eslint/no-useless-constructor + constructor(dir, opts) { + // NOTE(@kitten): `!NativeWatcher.isSupported` was always truthy, so omitting check here + super(dir, opts); + } + async startWatching() { + this.#fsWatcher = (0, fs_1.watch)(this.root, { + // Don't hold the process open if we forget to close() + persistent: false, + // FSEvents or ReadDirectoryChangesW should mean this is cheap and + // ~instant on macOS or Windows. + recursive: true, + }, (event, relativePath) => { + this._handleEvent(event, relativePath).catch((error) => { + this.emitError(error); + }); + }); + debug('Watching %s', this.root); + } + /** + * End watching. + */ + async stopWatching() { + await super.stopWatching(); + if (this.#fsWatcher) { + this.#fsWatcher.close(); + } + } + async _handleEvent(event, relativePath) { + if (relativePath == null) { + return; + } + const absolutePath = path.resolve(this.root, relativePath); + if (this.doIgnore(relativePath)) { + debug('Ignoring event "%s" on %s (root: %s)', event, relativePath, this.root); + return; + } + debug('Handling event "%s" on %s (root: %s)', event, relativePath, this.root); + try { + const stat = await fs_1.promises.lstat(absolutePath); + const type = (0, common_1.typeFromStat)(stat); + // Ignore files of an unrecognized type + if (!type) { + return; + } + if (!(0, common_1.includedByGlob)(type, this.globs, this.dot, relativePath)) { + return; + } + // For directory "rename" events, notify that we need a recrawl since we + // wont' receive events for unmodified files underneath a moved (or + // cloned) directory. Renames are fired by the OS on moves, clones, and + // creations. We ignore "change" events because they indiciate a change + // to directory metadata, rather than its path or existence. + if (type === 'd' && event === 'rename') { + debug('Directory rename detected on %s, requesting recrawl', relativePath); + this.emitFileEvent({ + event: RECRAWL_EVENT, + relativePath, + }); + return; + } + this.emitFileEvent({ + event: TOUCH_EVENT, + relativePath, + metadata: { + type, + modifiedTime: stat.mtime.getTime(), + size: stat.size, + }, + }); + } + catch (error) { + if (error?.code !== 'ENOENT') { + this.emitError(error); + return; + } + this.emitFileEvent({ event: DELETE_EVENT, relativePath }); + } + } +} +exports.default = NativeWatcher; diff --git a/packages/@expo/metro-file-map/build/watchers/RecrawlWarning.d.ts b/packages/@expo/metro-file-map/build/watchers/RecrawlWarning.d.ts new file mode 100644 index 00000000000000..94cb3c0bad9121 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/RecrawlWarning.d.ts @@ -0,0 +1,18 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * Originally vendored from + * https://github.com/amasad/sane/blob/64ff3a870c42e84f744086884bf55a4f9c22d376/src/utils/recrawl-warning-dedupe.js + */ +export default class RecrawlWarning { + static RECRAWL_WARNINGS: RecrawlWarning[]; + static REGEXP: RegExp; + root: string; + count: number; + constructor(root: string, count: number); + static findByRoot(root: string): RecrawlWarning | undefined; + static isRecrawlWarningDupe(warningMessage: unknown): boolean; +} diff --git a/packages/@expo/metro-file-map/build/watchers/RecrawlWarning.js b/packages/@expo/metro-file-map/build/watchers/RecrawlWarning.js new file mode 100644 index 00000000000000..f2da86f79968e9 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/RecrawlWarning.js @@ -0,0 +1,59 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * Originally vendored from + * https://github.com/amasad/sane/blob/64ff3a870c42e84f744086884bf55a4f9c22d376/src/utils/recrawl-warning-dedupe.js + */ +Object.defineProperty(exports, "__esModule", { value: true }); +class RecrawlWarning { + static RECRAWL_WARNINGS = []; + static REGEXP = /Recrawled this watch (\d+) times?, most recently because:\n([^:]+)/; + root; + count; + constructor(root, count) { + this.root = root; + this.count = count; + } + static findByRoot(root) { + for (let i = 0; i < this.RECRAWL_WARNINGS.length; i++) { + const warning = this.RECRAWL_WARNINGS[i]; + if (warning.root === root) { + return warning; + } + } + return undefined; + } + static isRecrawlWarningDupe(warningMessage) { + if (typeof warningMessage !== 'string') { + return false; + } + const match = warningMessage.match(this.REGEXP); + if (!match) { + return false; + } + const count = Number(match[1]); + const root = match[2]; + const warning = this.findByRoot(root); + if (warning) { + // only keep the highest count, assume count to either stay the same or + // increase. + if (warning.count >= count) { + return true; + } + else { + // update the existing warning to the latest (highest) count + warning.count = count; + return false; + } + } + else { + this.RECRAWL_WARNINGS.push(new RecrawlWarning(root, count)); + return false; + } + } +} +exports.default = RecrawlWarning; diff --git a/packages/@expo/metro-file-map/build/watchers/WatchmanWatcher.d.ts b/packages/@expo/metro-file-map/build/watchers/WatchmanWatcher.d.ts new file mode 100644 index 00000000000000..0ca04db2ee7d8b --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/WatchmanWatcher.d.ts @@ -0,0 +1,22 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import { AbstractWatcher } from './AbstractWatcher'; +import type { WatcherOptions } from './common'; +/** + * Watches `dir`. + */ +export default class WatchmanWatcher extends AbstractWatcher { + #private; + readonly subscriptionName: string; + constructor(dir: string, opts: WatcherOptions); + startWatching(): Promise; + /** + * Closes the watcher. + */ + stopWatching(): Promise; + getPauseReason(): string | undefined | null; +} diff --git a/packages/@expo/metro-file-map/build/watchers/WatchmanWatcher.js b/packages/@expo/metro-file-map/build/watchers/WatchmanWatcher.js new file mode 100644 index 00000000000000..58ddb3cb87ea99 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/WatchmanWatcher.js @@ -0,0 +1,263 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert_1 = __importDefault(require("assert")); +const crypto_1 = require("crypto"); +const fb_watchman_1 = __importDefault(require("fb-watchman")); +const invariant_1 = __importDefault(require("invariant")); +const AbstractWatcher_1 = require("./AbstractWatcher"); +const RecrawlWarning_1 = __importDefault(require("./RecrawlWarning")); +const common = __importStar(require("./common")); +const normalizePathSeparatorsToSystem_1 = __importDefault(require("../lib/normalizePathSeparatorsToSystem")); +const debug = require('debug')('Metro:WatchmanWatcher'); +const DELETE_EVENT = common.DELETE_EVENT; +const TOUCH_EVENT = common.TOUCH_EVENT; +const SUB_PREFIX = 'metro-file-map'; +/** + * Watches `dir`. + */ +class WatchmanWatcher extends AbstractWatcher_1.AbstractWatcher { + #client; + subscriptionName; + #watchProjectInfo; + #watchmanDeferStates; + #deferringStates = null; + constructor(dir, opts) { + const { watchmanDeferStates, ...baseOpts } = opts; + super(dir, baseOpts); + this.#watchmanDeferStates = watchmanDeferStates; + // Use a unique subscription name per process per watched directory + const watchKey = (0, crypto_1.createHash)('md5').update(this.root).digest('hex'); + const readablePath = this.root + .replace(/[/\\]/g, '-') // \ and / to - + .replace(/[^\-\w]/g, ''); // Remove non-word/hyphen + this.subscriptionName = `${SUB_PREFIX}-${process.pid}-${readablePath}-${watchKey}`; + } + async startWatching() { + await new Promise((resolve, reject) => this.#init(resolve, reject)); + } + /** + * Run the watchman `watch` command on the root and subscribe to changes. + */ + #init(onReady, onError) { + if (this.#client) { + this.#client.removeAllListeners(); + } + const self = this; + this.#client = new fb_watchman_1.default.Client(); + this.#client.on('error', (error) => { + this.emitError(error); + }); + this.#client.on('subscription', (changeEvent) => this.#handleChangeEvent(changeEvent)); + this.#client.on('end', () => { + console.warn('[metro-file-map] Warning: Lost connection to Watchman, reconnecting..'); + self.#init(() => { }, (error) => self.emitError(error)); + }); + this.#watchProjectInfo = null; + function getWatchRoot() { + return self.#watchProjectInfo ? self.#watchProjectInfo.root : self.root; + } + function onWatchProject(error, resp) { + if (error) { + onError(error); + return; + } + debug('Received watch-project response: %s', resp.relative_path); + handleWarning(resp); + // NB: Watchman outputs posix-separated paths even on Windows, convert + // them to system-native separators. + self.#watchProjectInfo = { + relativePath: resp.relative_path ? (0, normalizePathSeparatorsToSystem_1.default)(resp.relative_path) : '', + root: (0, normalizePathSeparatorsToSystem_1.default)(resp.watch), + }; + self.#client.command(['clock', getWatchRoot()], onClock); + } + function onClock(error, resp) { + if (error) { + onError(error); + return; + } + debug('Received clock response: %s', resp.clock); + const watchProjectInfo = self.#watchProjectInfo; + (0, invariant_1.default)(watchProjectInfo != null, 'watch-project response should have been set before clock response'); + handleWarning(resp); + const options = { + fields: ['name', 'exists', 'new', 'type', 'size', 'mtime_ms'], + since: resp.clock, + defer: self.#watchmanDeferStates, + relative_root: watchProjectInfo.relativePath, + }; + // Make sure we honor the dot option if even we're not using globs. + if (self.globs.length === 0 && !self.dot) { + options.expression = [ + 'match', + '**', + 'wholename', + { + includedotfiles: false, + }, + ]; + } + self.#client.command(['subscribe', getWatchRoot(), self.subscriptionName, options], onSubscribe); + } + const onSubscribe = (error, resp) => { + if (error) { + onError(error); + return; + } + debug('Received subscribe response: %s', resp.subscribe); + handleWarning(resp); + if (resp['asserted-states'] != null) { + this.#deferringStates = new Set(resp['asserted-states']); + } + onReady(); + }; + self.#client.command(['watch-project', getWatchRoot()], onWatchProject); + } + /** + * Handles a change event coming from the subscription. + */ + #handleChangeEvent(resp) { + debug('Received subscription response: %s (fresh: %s, files: %s, enter: %s, leave: %s, clock: %s)', resp.subscription, resp.is_fresh_instance, resp.files?.length, resp['state-enter'], resp['state-leave'], resp.clock); + assert_1.default.equal(resp.subscription, this.subscriptionName, 'Invalid subscription event.'); + if (Array.isArray(resp.files)) { + resp.files.forEach((change) => this.#handleFileChange(change, resp.clock)); + } + const { 'state-enter': stateEnter, 'state-leave': stateLeave } = resp; + if (stateEnter != null && (this.#watchmanDeferStates ?? []).includes(stateEnter)) { + this.#deferringStates?.add(stateEnter); + debug('Watchman reports "%s" just started. Filesystem notifications are paused.', stateEnter); + } + if (stateLeave != null && (this.#watchmanDeferStates ?? []).includes(stateLeave)) { + this.#deferringStates?.delete(stateLeave); + debug('Watchman reports "%s" ended. Filesystem notifications resumed.', stateLeave); + } + } + /** + * Handles a single change event record. + */ + #handleFileChange(changeDescriptor, rawClock) { + const self = this; + const watchProjectInfo = self.#watchProjectInfo; + (0, invariant_1.default)(watchProjectInfo != null, 'watch-project response should have been set before receiving subscription events'); + const { name: relativePosixPath, new: isNew = false, exists = false, type, mtime_ms, size, } = changeDescriptor; + // Watchman emits posix-separated paths on Windows, which is inconsistent + // with other watchers. Normalize to system-native separators. + const relativePath = (0, normalizePathSeparatorsToSystem_1.default)(relativePosixPath); + debug('Handling change to: %s (new: %s, exists: %s, type: %s)', relativePath, isNew, exists, type); + // Ignore files of an unrecognized type + if (type != null && !(type === 'f' || type === 'd' || type === 'l')) { + return; + } + if (this.doIgnore(relativePath) || + !common.includedByGlob(type, this.globs, this.dot, relativePath)) { + return; + } + const clock = typeof rawClock === 'string' && this.#watchProjectInfo != null + ? [this.#watchProjectInfo.root, rawClock] + : undefined; + if (!exists) { + self.emitFileEvent({ event: DELETE_EVENT, clock, relativePath }); + } + else { + (0, invariant_1.default)(type != null && mtime_ms != null && size != null, 'Watchman file change event for "%s" missing some requested metadata. ' + + 'Got type: %s, mtime_ms: %s, size: %s', relativePath, type, mtime_ms, size); + if ( + // Change event on dirs are mostly useless. + !(type === 'd' && !isNew)) { + const mtime = Number(mtime_ms); + self.emitFileEvent({ + event: TOUCH_EVENT, + clock, + relativePath, + metadata: { + modifiedTime: mtime !== 0 ? mtime : null, + size, + type, + }, + }); + } + } + } + /** + * Closes the watcher. + */ + async stopWatching() { + await super.stopWatching(); + if (this.#client) { + this.#client.removeAllListeners(); + this.#client.end(); + } + this.#deferringStates = null; + } + getPauseReason() { + if (this.#deferringStates == null || this.#deferringStates.size === 0) { + return null; + } + const states = [...this.#deferringStates]; + if (states.length === 1) { + return `The watch is in the '${states[0]}' state.`; + } + return `The watch is in the ${states + .slice(0, -1) + .map((s) => `'${s}'`) + .join(', ')} and '${states[states.length - 1]}' states.`; + } +} +exports.default = WatchmanWatcher; +/** + * Handles a warning in the watchman resp object. + */ +function handleWarning(resp) { + if ('warning' in resp) { + if (RecrawlWarning_1.default.isRecrawlWarningDupe(resp.warning)) { + return true; + } + console.warn(resp.warning); + return true; + } + else { + return false; + } +} diff --git a/packages/@expo/metro-file-map/build/watchers/common.d.ts b/packages/@expo/metro-file-map/build/watchers/common.d.ts new file mode 100644 index 00000000000000..6c3e9ffa3914d1 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/common.d.ts @@ -0,0 +1,39 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * Originally vendored from + * https://github.com/amasad/sane/blob/64ff3a870c42e84f744086884bf55a4f9c22d376/src/common.js + */ +import type { Stats } from 'fs'; +import type { ChangeEventMetadata } from '../types'; +export declare const DELETE_EVENT = "delete"; +export declare const TOUCH_EVENT = "touch"; +export declare const RECRAWL_EVENT = "recrawl"; +export declare const ALL_EVENT = "all"; +export interface WatcherOptions { + readonly globs: readonly string[]; + readonly dot: boolean; + readonly ignored: RegExp | null | undefined; + readonly watchmanDeferStates: readonly string[]; + readonly watchman?: unknown; + readonly watchmanPath?: string; +} +/** + * Checks a file relative path against the globs array. + */ +export declare function includedByGlob(type: 'f' | 'l' | 'd' | null | undefined, globs: readonly string[], dot: boolean, relativePath: string): boolean; +/** + * Whether the given filePath matches the given RegExp, after converting + * (on Windows only) system separators to posix separators. + * + * Conversion to posix is for backwards compatibility with the previous + * anymatch matcher, which normlises all inputs[1]. This may not be consistent + * with other parts of metro-file-map. + * + * [1]: https://github.com/micromatch/anymatch/blob/3.1.1/index.js#L50 + */ +export declare const posixPathMatchesPattern: (pattern: RegExp, filePath: string) => boolean; +export declare function typeFromStat(stat: Stats): ChangeEventMetadata['type'] | null; diff --git a/packages/@expo/metro-file-map/build/watchers/common.js b/packages/@expo/metro-file-map/build/watchers/common.js new file mode 100644 index 00000000000000..13cc2a4b052a50 --- /dev/null +++ b/packages/@expo/metro-file-map/build/watchers/common.js @@ -0,0 +1,60 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * Originally vendored from + * https://github.com/amasad/sane/blob/64ff3a870c42e84f744086884bf55a4f9c22d376/src/common.js + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.posixPathMatchesPattern = exports.ALL_EVENT = exports.RECRAWL_EVENT = exports.TOUCH_EVENT = exports.DELETE_EVENT = void 0; +exports.includedByGlob = includedByGlob; +exports.typeFromStat = typeFromStat; +const micromatch_1 = __importDefault(require("micromatch")); +const path_1 = __importDefault(require("path")); +exports.DELETE_EVENT = 'delete'; +exports.TOUCH_EVENT = 'touch'; +exports.RECRAWL_EVENT = 'recrawl'; +exports.ALL_EVENT = 'all'; +/** + * Checks a file relative path against the globs array. + */ +function includedByGlob(type, globs, dot, relativePath) { + // For non-regular files or if there are no glob matchers, just respect the + // `dot` option to filter dotfiles if dot === false. + if (globs.length === 0 || type !== 'f') { + return dot || micromatch_1.default.some(relativePath, '**/*'); + } + return micromatch_1.default.some(relativePath, globs, { dot }); +} +/** + * Whether the given filePath matches the given RegExp, after converting + * (on Windows only) system separators to posix separators. + * + * Conversion to posix is for backwards compatibility with the previous + * anymatch matcher, which normlises all inputs[1]. This may not be consistent + * with other parts of metro-file-map. + * + * [1]: https://github.com/micromatch/anymatch/blob/3.1.1/index.js#L50 + */ +exports.posixPathMatchesPattern = path_1.default.sep === '/' + ? (pattern, filePath) => pattern.test(filePath) + : (pattern, filePath) => pattern.test(filePath.replaceAll(path_1.default.sep, '/')); +function typeFromStat(stat) { + // Note: These tests are not mutually exclusive - a symlink passes isFile + if (stat.isSymbolicLink()) { + return 'l'; + } + if (stat.isDirectory()) { + return 'd'; + } + if (stat.isFile()) { + return 'f'; // "Regular" file + } + return null; +} diff --git a/packages/@expo/metro-file-map/build/worker.d.ts b/packages/@expo/metro-file-map/build/worker.d.ts new file mode 100644 index 00000000000000..dce26c934a76c2 --- /dev/null +++ b/packages/@expo/metro-file-map/build/worker.d.ts @@ -0,0 +1,24 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import type { WorkerMessage, WorkerMetadata, WorkerSetupArgs } from './types'; +/** + * Exposed for use outside a jest-worker context, ie when processing in-band. + */ +export declare class Worker { + #private; + constructor({ plugins }: WorkerSetupArgs); + processFile(data: WorkerMessage): WorkerMetadata; +} +/** + * Called automatically by jest-worker before the first call to `worker` when + * this module is used as worker thread or child process. + */ +export declare function setup(args: WorkerSetupArgs): void; +/** + * Called by jest-worker with each workload + */ +export declare function processFile(data: WorkerMessage): WorkerMetadata; diff --git a/packages/@expo/metro-file-map/build/worker.js b/packages/@expo/metro-file-map/build/worker.js new file mode 100644 index 00000000000000..17548368f4f10c --- /dev/null +++ b/packages/@expo/metro-file-map/build/worker.js @@ -0,0 +1,73 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Worker = void 0; +exports.setup = setup; +exports.processFile = processFile; +const crypto_1 = require("crypto"); +const graceful_fs_1 = __importDefault(require("graceful-fs")); +function sha1hex(content) { + return (0, crypto_1.createHash)('sha1').update(content).digest('hex'); +} +/** + * Exposed for use outside a jest-worker context, ie when processing in-band. + */ +class Worker { + #plugins; + constructor({ plugins = [] }) { + this.#plugins = plugins.map(({ modulePath, setupArgs }) => { + const mod = require(modulePath); + const PluginWorker = mod.__esModule === true && 'default' in mod ? mod.default : mod; + return new PluginWorker(setupArgs); + }); + } + processFile(data) { + let content; + let sha1; + const { computeSha1, filePath, pluginsToRun } = data; + const getContent = () => { + if (content == null) { + content = graceful_fs_1.default.readFileSync(filePath); + } + return content; + }; + const workerUtils = { getContent }; + const pluginData = pluginsToRun.map((pluginIdx) => this.#plugins[pluginIdx].processFile(data, workerUtils)); + // If a SHA-1 is requested on update, compute it. + if (computeSha1) { + sha1 = sha1hex(getContent()); + } + return content && data.maybeReturnContent + ? { content, pluginData, sha1 } + : { pluginData, sha1 }; + } +} +exports.Worker = Worker; +let singletonWorker; +/** + * Called automatically by jest-worker before the first call to `worker` when + * this module is used as worker thread or child process. + */ +function setup(args) { + if (singletonWorker) { + throw new Error('metro-file-map: setup() should only be called once'); + } + singletonWorker = new Worker(args); +} +/** + * Called by jest-worker with each workload + */ +function processFile(data) { + if (!singletonWorker) { + throw new Error('metro-file-map: setup() must be called before processFile()'); + } + return singletonWorker.processFile(data); +} diff --git a/packages/@expo/metro-file-map/build/workerExclusionList.d.ts b/packages/@expo/metro-file-map/build/workerExclusionList.d.ts new file mode 100644 index 00000000000000..da1d547f49ceae --- /dev/null +++ b/packages/@expo/metro-file-map/build/workerExclusionList.d.ts @@ -0,0 +1,8 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +declare const extensions: ReadonlySet; +export default extensions; diff --git a/packages/@expo/metro-file-map/build/workerExclusionList.js b/packages/@expo/metro-file-map/build/workerExclusionList.js new file mode 100644 index 00000000000000..95dc3ebf5cd957 --- /dev/null +++ b/packages/@expo/metro-file-map/build/workerExclusionList.js @@ -0,0 +1,57 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +// This list is compiled after the MDN list of the most common MIME types (see +// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/ +// Complete_list_of_MIME_types). +// +// Only MIME types starting with "image/", "video/", "audio/" and "font/" are +// reflected in the list. Adding "application/" is too risky since some text +// file formats (like ".js" and ".json") have an "application/" MIME type. +// +// Feel free to add any extensions that cannot be a Haste module. +const extensions = new Set([ + // JSONs are never haste modules, except for "package.json", which is handled. + '.json', + // Image extensions. + '.bmp', + '.gif', + '.ico', + '.jpeg', + '.jpg', + '.png', + '.svg', + '.tiff', + '.tif', + '.webp', + // Video extensions. + '.avi', + '.mp4', + '.mpeg', + '.mpg', + '.ogv', + '.webm', + '.3gp', + '.3g2', + // Audio extensions. + '.aac', + '.midi', + '.mid', + '.mp3', + '.oga', + '.wav', + '.3gp', + '.3g2', + // Font extensions. + '.eot', + '.otf', + '.ttf', + '.woff', + '.woff2', +]); +exports.default = extensions; diff --git a/packages/@expo/metro-file-map/jest.config.js b/packages/@expo/metro-file-map/jest.config.js new file mode 100644 index 00000000000000..03e717ad01ae17 --- /dev/null +++ b/packages/@expo/metro-file-map/jest.config.js @@ -0,0 +1,13 @@ +/** @type {import('jest').Config} */ +module.exports = { + ...require('expo-module-scripts/jest-preset-cli'), + clearMocks: true, + displayName: require('./package').name, + setupFiles: ['/jest.setup.ts'], + rootDir: __dirname, + roots: ['src'], + fakeTimers: { + enableGlobally: true, + doNotFake: ['nextTick', 'setImmediate', 'queueMicrotask'], + }, +}; diff --git a/packages/@expo/metro-file-map/jest.setup.ts b/packages/@expo/metro-file-map/jest.setup.ts new file mode 100644 index 00000000000000..626a566fff3d1d --- /dev/null +++ b/packages/@expo/metro-file-map/jest.setup.ts @@ -0,0 +1,15 @@ +// Prevent real filesystem access in tests — use memfs via __mocks__/ +jest.mock('fs'); +jest.mock('fs/promises'); +jest.mock('graceful-fs'); + +// Redirect node: prefixed built-in modules to their mocked equivalents +jest.mock('node:fs', () => require('fs')); +jest.mock('node:fs/promises', () => require('fs/promises')); + +// The timers module is not automatically faked by jest.useFakeTimers(). +// Redirect it to globalThis so faked timers are used consistently. +jest.mock('timers', () => ({ + setTimeout: globalThis.setTimeout, + clearTimeout: globalThis.clearTimeout, +})); diff --git a/packages/@expo/metro-file-map/package.json b/packages/@expo/metro-file-map/package.json new file mode 100644 index 00000000000000..a29bc0f64c83a7 --- /dev/null +++ b/packages/@expo/metro-file-map/package.json @@ -0,0 +1,56 @@ +{ + "name": "@expo/metro-file-map", + "version": "55.0.0-0", + "description": "A metro-file-map fork for Expo used with the Metro bundler", + "main": "build/index.js", + "types": "build/index.d.ts", + "scripts": { + "build": "expo-module tsc", + "clean": "expo-module clean", + "lint": "expo-module lint", + "prepublishOnly": "pnpm run clean && pnpm run build", + "test": "expo-module test", + "typecheck": "expo-module typecheck", + "watch": "expo-module tsc --watch --preserveWatchOutput" + }, + "repository": { + "type": "git", + "url": "https://github.com/expo/expo.git", + "directory": "packages/@expo/metro-file-map" + }, + "keywords": [ + "expo", + "metro" + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/expo/expo/issues" + }, + "homepage": "https://github.com/expo/expo/tree/main/packages/@expo/metro-file-map#readme", + "files": [ + "build" + ], + "dependencies": { + "debug": "^4.3.4", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.4", + "invariant": "^2.2.4", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "devDependencies": { + "@expo/metro": "56.0.0-rc.2", + "@types/debug": "^4.1.7", + "@types/fb-watchman": "^2.0.6", + "@types/graceful-fs": "^4.1.9", + "@types/invariant": "^2.2.37", + "@types/micromatch": "^4.0.10", + "@types/node": "^22.14.0", + "expo-module-scripts": "workspace:*", + "memfs": "^3.6.0" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/@expo/metro-file-map/src/Watcher.ts b/packages/@expo/metro-file-map/src/Watcher.ts new file mode 100644 index 00000000000000..3a85bd51571233 --- /dev/null +++ b/packages/@expo/metro-file-map/src/Watcher.ts @@ -0,0 +1,341 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import EventEmitter from 'events'; +import fs from 'fs'; +import path from 'path'; +import { performance } from 'perf_hooks'; + +import nodeCrawl from './crawlers/node'; +import watchmanCrawl from './crawlers/watchman'; +import type { + Console, + CrawlerOptions, + CrawlResult, + Path, + PerfLogger, + WatcherBackend, + WatcherBackendChangeEvent, +} from './types'; +import FallbackWatcher from './watchers/FallbackWatcher'; +import NativeWatcher from './watchers/NativeWatcher'; +import WatchmanWatcher from './watchers/WatchmanWatcher'; +import { TOUCH_EVENT } from './watchers/common'; +import type { WatcherOptions as WatcherBackendOptions } from './watchers/common'; + +const debug = require('debug')('Metro:Watcher'); + +const MAX_WAIT_TIME = 240000; + +interface InternalCrawlOptions { + readonly previousState: CrawlerOptions['previousState']; + readonly roots: readonly string[]; + readonly subpath?: string; + readonly useWatchman: boolean; +} + +interface WatcherOptions { + abortSignal: AbortSignal; + computeSha1: boolean; + console: Console; + enableSymlinks: boolean; + extensions: readonly string[]; + forceNodeFilesystemAPI: boolean; + healthCheckFilePrefix: string; + ignoreForCrawl: (filePath: string) => boolean; + ignorePatternForWatch: RegExp; + previousState: CrawlerOptions['previousState']; + perfLogger: PerfLogger | undefined | null; + roots: readonly string[]; + rootDir: string; + useWatchman: boolean; + watch: boolean; + watchmanDeferStates: readonly string[]; +} + +let nextInstanceId = 0; + +export type HealthCheckResult = + | { type: 'error'; timeout: number; error: Error; watcher: string | undefined | null } + | { + type: 'success'; + timeout: number; + timeElapsed: number; + watcher: string | undefined | null; + } + | { + type: 'timeout'; + timeout: number; + watcher: string | undefined | null; + pauseReason: string | undefined | null; + }; + +export class Watcher extends EventEmitter { + #activeWatcher: string | undefined | null; + #backends: readonly WatcherBackend[] = []; + readonly #instanceId: number; + #nextHealthCheckId: number = 0; + readonly #options: WatcherOptions; + readonly #pendingHealthChecks: Map void> = new Map(); + + constructor(options: WatcherOptions) { + super(); + this.#options = options; + this.#instanceId = nextInstanceId++; + } + + async crawl(): Promise { + this.#options.perfLogger?.point('crawl_start'); + const options = this.#options; + + const result = await this.#crawl({ + previousState: options.previousState, + roots: options.roots, + useWatchman: options.useWatchman, + }); + + this.#options.perfLogger?.point('crawl_end'); + return result; + } + + async recrawl( + subpath: string, + currentFileSystem: CrawlerOptions['previousState']['fileSystem'] + ): Promise { + return this.#crawl({ + previousState: { + clocks: new Map(), + fileSystem: currentFileSystem, + }, + roots: [path.join(this.#options.rootDir, subpath)], + subpath, + useWatchman: false, + }); + } + + async #crawl(crawlOptions: InternalCrawlOptions): Promise { + const options = this.#options; + const { useWatchman, subpath } = crawlOptions; + + const ignoreForCrawl = (filePath: string) => + options.ignoreForCrawl(filePath) || + path.basename(filePath).startsWith(this.#options.healthCheckFilePrefix); + const crawl = useWatchman ? watchmanCrawl : nodeCrawl; + let crawler = crawl === watchmanCrawl ? 'watchman' : 'node'; + + options.abortSignal.throwIfAborted(); + + const crawlerOptions: CrawlerOptions = { + abortSignal: options.abortSignal, + computeSha1: options.computeSha1, + console: options.console, + includeSymlinks: options.enableSymlinks, + extensions: options.extensions, + forceNodeFilesystemAPI: options.forceNodeFilesystemAPI, + ignore: ignoreForCrawl, + onStatus: (status) => { + this.emit('status', status); + }, + perfLogger: options.perfLogger, + previousState: crawlOptions.previousState, + rootDir: options.rootDir, + roots: crawlOptions.roots, + subpath, + }; + + debug('Crawling roots: %s with %s crawler.', crawlOptions.roots, crawler); + + let delta: CrawlResult; + try { + delta = await crawl(crawlerOptions); + } catch (firstError: any) { + if (crawl !== watchmanCrawl) { + throw firstError; + } + crawler = 'node'; + options.console.warn( + 'metro-file-map: Watchman crawl failed. Retrying once with node ' + + 'crawler.\n' + + " Usually this happens when watchman isn't running. Create an " + + "empty `.watchmanconfig` file in your project's root folder or " + + 'initialize a git or hg repository in your project.\n' + + ' ' + + firstError.toString() + ); + try { + delta = await nodeCrawl(crawlerOptions); + } catch (retryError: any) { + throw new Error( + 'Crawler retry failed:\n' + + ` Original error: ${firstError.message}\n` + + ` Retry error: ${retryError.message}\n` + ); + } + } + + debug( + 'Crawler "%s" returned %d added/modified, %d removed, %d clock(s).', + crawler, + delta.changedFiles.size, + delta.removedFiles.size, + 'clocks' in delta ? (delta.clocks?.size ?? 0) : 0 + ); + return delta; + } + + async watch(onChange: (change: WatcherBackendChangeEvent) => void) { + const { extensions, ignorePatternForWatch, useWatchman } = this.#options; + + // WatchmanWatcher > NativeWatcher > FallbackWatcher + const WatcherImpl = (useWatchman + ? WatchmanWatcher + : NativeWatcher.isSupported() + ? NativeWatcher + : FallbackWatcher) as unknown as new ( + root: string, + opts: WatcherBackendOptions + ) => WatcherBackend; + + let watcher = 'fallback'; + if (WatcherImpl === (WatchmanWatcher as unknown)) { + watcher = 'watchman'; + } else if (WatcherImpl === (NativeWatcher as unknown)) { + watcher = 'native'; + } + debug(`Using watcher: ${watcher}`); + this.#options.perfLogger?.annotate({ string: { watcher } }); + this.#activeWatcher = watcher; + + const createWatcherBackend = (root: Path): Promise => { + const watcherOptions: WatcherBackendOptions = { + dot: true, + globs: [ + // Ensure we always include package.json files, which are crucial for + /// module resolution. + '**/package.json', + // Ensure we always watch any health check files + '**/' + this.#options.healthCheckFilePrefix + '*', + ...extensions.map((extension) => '**/*.' + extension), + ], + ignored: ignorePatternForWatch, + watchmanDeferStates: this.#options.watchmanDeferStates, + }; + const watcher: WatcherBackend = new WatcherImpl(root, watcherOptions); + + return new Promise(async (resolve, reject) => { + const rejectTimeout = setTimeout( + () => reject(new Error('Failed to start watch mode.')), + MAX_WAIT_TIME + ); + + watcher.onFileEvent((change) => { + const basename = path.basename(change.relativePath); + if (basename.startsWith(this.#options.healthCheckFilePrefix)) { + if (change.event === TOUCH_EVENT) { + debug('Observed possible health check cookie: %s in %s', change.relativePath, root); + this.#handleHealthCheckObservation(basename); + } + return; + } + // Watchman handles recrawls internally - receiving a recrawl event + // when using Watchman would indicate a bug. Log an error and ignore. + if (change.event === 'recrawl' && useWatchman) { + this.#options.console.error( + 'metro-file-map: Received unexpected recrawl event while using ' + + 'Watchman. Watchman recrawls are not implemented.' + ); + return; + } + onChange(change); + }); + await watcher.startWatching(); + clearTimeout(rejectTimeout); + resolve(watcher); + }); + }; + + this.#backends = await Promise.all(this.#options.roots.map(createWatcherBackend)); + } + + #handleHealthCheckObservation(basename: string) { + const resolveHealthCheck = this.#pendingHealthChecks.get(basename); + if (!resolveHealthCheck) { + return; + } + resolveHealthCheck(); + } + + async close() { + await Promise.all(this.#backends.map((watcher) => watcher.stopWatching())); + this.#activeWatcher = null; + } + + async checkHealth(timeout: number): Promise { + const healthCheckId = this.#nextHealthCheckId++; + if (healthCheckId === Number.MAX_SAFE_INTEGER) { + this.#nextHealthCheckId = 0; + } + const watcher = this.#activeWatcher; + const basename = + this.#options.healthCheckFilePrefix + + '-' + + process.pid + + '-' + + this.#instanceId + + '-' + + healthCheckId; + const healthCheckPath = path.join(this.#options.rootDir, basename); + let result: HealthCheckResult | undefined | null; + const timeoutPromise = new Promise((resolve) => setTimeout(resolve, timeout)).then(() => { + if (!result) { + result = { + type: 'timeout', + pauseReason: this.#backends[0]?.getPauseReason(), + timeout, + watcher, + }; + } + }); + const startTime = performance.now(); + debug('Creating health check cookie: %s', healthCheckPath); + const creationPromise = fs.promises + .writeFile(healthCheckPath, String(startTime)) + .catch((error) => { + if (!result) { + result = { + type: 'error', + error, + timeout, + watcher, + }; + } + }); + const observationPromise = new Promise((resolve) => { + this.#pendingHealthChecks.set(basename, resolve); + }).then(() => { + if (!result) { + result = { + type: 'success', + timeElapsed: performance.now() - startTime, + timeout, + watcher, + }; + } + }); + await Promise.race([timeoutPromise, creationPromise.then(() => observationPromise)]); + this.#pendingHealthChecks.delete(basename); + // Chain a deletion to the creation promise (which may not have even settled yet!), + // don't await it, and swallow errors. This is just best-effort cleanup. + // eslint-disable-next-line @typescript-eslint/no-floating-promises + creationPromise.then(() => fs.promises.unlink(healthCheckPath).catch(() => {})); + debug('Health check result: %o', result); + if (result == null) { + throw new Error('health check result was not set by any promise branch'); + } + return result; + } +} diff --git a/packages/@expo/metro-file-map/src/__mocks__/fs.ts b/packages/@expo/metro-file-map/src/__mocks__/fs.ts new file mode 100644 index 00000000000000..c81a2b945be0b5 --- /dev/null +++ b/packages/@expo/metro-file-map/src/__mocks__/fs.ts @@ -0,0 +1,2 @@ +import { fs } from 'memfs'; +module.exports = fs; diff --git a/packages/@expo/metro-file-map/src/__mocks__/fs/promises.ts b/packages/@expo/metro-file-map/src/__mocks__/fs/promises.ts new file mode 100644 index 00000000000000..2ca5dd187e7676 --- /dev/null +++ b/packages/@expo/metro-file-map/src/__mocks__/fs/promises.ts @@ -0,0 +1,2 @@ +import { fs } from 'memfs'; +module.exports = fs.promises; diff --git a/packages/@expo/metro-file-map/src/__mocks__/graceful-fs.ts b/packages/@expo/metro-file-map/src/__mocks__/graceful-fs.ts new file mode 100644 index 00000000000000..c81a2b945be0b5 --- /dev/null +++ b/packages/@expo/metro-file-map/src/__mocks__/graceful-fs.ts @@ -0,0 +1,2 @@ +import { fs } from 'memfs'; +module.exports = fs; diff --git a/packages/@expo/metro-file-map/src/cache/DiskCacheManager.ts b/packages/@expo/metro-file-map/src/cache/DiskCacheManager.ts new file mode 100644 index 00000000000000..f314061aeace1f --- /dev/null +++ b/packages/@expo/metro-file-map/src/cache/DiskCacheManager.ts @@ -0,0 +1,153 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { promises as fsPromises } from 'fs'; +import { tmpdir } from 'os'; +import path from 'path'; +import { clearTimeout, setTimeout } from 'timers'; +import { deserialize, serialize } from 'v8'; + +import rootRelativeCacheKeys from '../lib/rootRelativeCacheKeys'; +import type { + BuildParameters, + CacheData, + CacheManager, + CacheManagerFactoryOptions, + CacheManagerWriteOptions, +} from '../types'; + +const debug = require('debug')('Metro:FileMapCache'); + +interface AutoSaveOptions { + readonly debounceMs: number; +} + +interface DiskCacheConfig { + readonly autoSave?: Partial | boolean | undefined; + readonly cacheFilePrefix?: string | undefined | null; + readonly cacheDirectory?: string | undefined | null; +} + +const DEFAULT_PREFIX = 'metro-file-map'; +const DEFAULT_DIRECTORY = tmpdir(); +const DEFAULT_AUTO_SAVE_DEBOUNCE_MS = 5000; + +// NOTE(@kitten): We're incompatible with Metro, so need our own naming +const FIXED_PREFIX = 'expo'; + +export class DiskCacheManager implements CacheManager { + readonly #autoSaveOpts: AutoSaveOptions | undefined | null; + readonly #cachePath: string; + #debounceTimeout: ReturnType | null = null; + #writePromise: Promise = Promise.resolve(); + #hasUnwrittenChanges: boolean = false; + #tryWrite: (() => Promise) | undefined | null; + #stopListening: (() => void) | undefined | null; + + constructor( + { buildParameters }: CacheManagerFactoryOptions, + { autoSave = {}, cacheDirectory, cacheFilePrefix }: DiskCacheConfig + ) { + this.#cachePath = DiskCacheManager.getCacheFilePath( + buildParameters, + cacheFilePrefix, + cacheDirectory + ); + + // Normalise auto-save options. + if (autoSave) { + const { debounceMs = DEFAULT_AUTO_SAVE_DEBOUNCE_MS } = autoSave === true ? {} : autoSave; + this.#autoSaveOpts = { debounceMs }; + } + } + + static getCacheFilePath( + buildParameters: BuildParameters, + cacheFilePrefix?: string | null, + cacheDirectory?: string | null + ): string { + const { rootDirHash, relativeConfigHash } = rootRelativeCacheKeys(buildParameters); + + return path.join( + cacheDirectory ?? DEFAULT_DIRECTORY, + `${cacheFilePrefix ?? DEFAULT_PREFIX}-${FIXED_PREFIX}-${rootDirHash}-${relativeConfigHash}` + ); + } + + getCacheFilePath(): string { + return this.#cachePath; + } + + async read(): Promise { + try { + return deserialize(await fsPromises.readFile(this.#cachePath)); + } catch (e: any) { + if (e?.code === 'ENOENT') { + // Cache file not found - not considered an error. + return null; + } + // Rethrow anything else. + throw e; + } + } + + async write( + getSnapshot: () => CacheData, + { changedSinceCacheRead, eventSource, onWriteError }: CacheManagerWriteOptions + ): Promise { + // Initialise a writer function using a promise queue to ensure writes are + // sequenced. + // eslint-disable-next-line no-multi-assign + const tryWrite = (this.#tryWrite = () => { + this.#writePromise = this.#writePromise + .then(async () => { + if (!this.#hasUnwrittenChanges) { + return; + } + const data = getSnapshot(); + this.#hasUnwrittenChanges = false; + await fsPromises.writeFile(this.#cachePath, serialize(data)); + debug('Written cache to %s', this.#cachePath); + }) + .catch(onWriteError); + return this.#writePromise; + }); + + // Set up auto-save on changes, if enabled. + if (this.#autoSaveOpts) { + const autoSave = this.#autoSaveOpts; + this.#stopListening?.(); + this.#stopListening = eventSource.onChange(() => { + this.#hasUnwrittenChanges = true; + if (this.#debounceTimeout) { + this.#debounceTimeout.refresh(); + } else { + this.#debounceTimeout = setTimeout(() => tryWrite(), autoSave.debounceMs).unref(); + } + }); + } + + // Write immediately if state has changed since the cache was read. + if (changedSinceCacheRead) { + this.#hasUnwrittenChanges = true; + await tryWrite(); + } + } + + async end(): Promise { + // Clear any timers + if (this.#debounceTimeout) { + clearTimeout(this.#debounceTimeout); + } + + // Remove event listeners + this.#stopListening?.(); + + // Flush unwritten changes to disk (no-op if no changes) + await this.#tryWrite?.(); + } +} diff --git a/packages/@expo/metro-file-map/src/cache/__tests__/DiskCacheManager.test.ts b/packages/@expo/metro-file-map/src/cache/__tests__/DiskCacheManager.test.ts new file mode 100644 index 00000000000000..b7dee7f842dd3b --- /dev/null +++ b/packages/@expo/metro-file-map/src/cache/__tests__/DiskCacheManager.test.ts @@ -0,0 +1,225 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import EventEmitter from 'events'; +import { vol } from 'memfs'; +import * as path from 'path'; +import { serialize } from 'v8'; + +import type { + BuildParameters, + CacheData, + CacheManagerEventSource, + FileMapPlugin, +} from '../../types'; +import { DiskCacheManager } from '../DiskCacheManager'; + +const flushPromises = () => new Promise((resolve) => process.nextTick(resolve)); + +const buildParameters: BuildParameters = { + cacheBreaker: '', + computeSha1: true, + enableSymlinks: false, + forceNodeFilesystemAPI: true, + ignorePattern: /ignored/, + retainAllFiles: false, + extensions: ['js', 'json'], + plugins: [], + rootDir: path.join('/', 'project'), + roots: [path.join('/', 'project', 'fruits'), path.join('/', 'project', 'vegetables')], +}; + +const defaultConfig = { + cacheFilePrefix: 'default-label', + cacheDirectory: '/tmp/cache', +}; + +describe('DiskCacheManager', () => { + beforeEach(() => { + vol.reset(); + vol.mkdirSync('/tmp/cache', { recursive: true }); + }); + + test('creates valid cache file paths', () => { + expect(DiskCacheManager.getCacheFilePath(buildParameters, 'file-prefix', '/')).toMatch( + /^\/file-prefix-.*$/ + ); + }); + + test('creates different cache file paths for different roots', () => { + const cm1 = new DiskCacheManager( + { buildParameters: { ...buildParameters, rootDir: '/root1' } }, + defaultConfig + ); + const cm2 = new DiskCacheManager( + { buildParameters: { ...buildParameters, rootDir: '/root2' } }, + defaultConfig + ); + expect(cm1.getCacheFilePath()).not.toBe(cm2.getCacheFilePath()); + }); + + test('creates different cache file paths for different plugins', () => { + let pluginCacheKey = 'foo'; + const plugin = { + name: 'foo', + onChanged() {}, + async initialize() {}, + assertValid() {}, + getSerializableSnapshot: () => ({}), + getWorker: () => null, + getCacheKey: () => pluginCacheKey, + } as unknown as FileMapPlugin; + + const path1 = new DiskCacheManager({ buildParameters }, defaultConfig).getCacheFilePath(); + const path2 = new DiskCacheManager( + { buildParameters: { ...buildParameters, plugins: [plugin] } }, + defaultConfig + ).getCacheFilePath(); + pluginCacheKey = 'bar'; + const path3 = new DiskCacheManager( + { buildParameters: { ...buildParameters, plugins: [plugin] } }, + defaultConfig + ).getCacheFilePath(); + expect(new Set([path1, path2, path3]).size).toBe(3); + }); + + test('creates different cache file paths for different projects', () => { + const cm1 = new DiskCacheManager( + { buildParameters }, + { ...defaultConfig, cacheFilePrefix: 'package-a' } + ); + const cm2 = new DiskCacheManager( + { buildParameters }, + { ...defaultConfig, cacheFilePrefix: 'package-b' } + ); + expect(cm1.getCacheFilePath()).not.toBe(cm2.getCacheFilePath()); + }); + + test('reads and deserialises a cache file', async () => { + const cm = new DiskCacheManager({ buildParameters }, defaultConfig); + const data = { + clocks: new Map([['foo', 'bar']]), + fileSystemData: new Map(), + plugins: new Map(), + }; + vol.writeFileSync(cm.getCacheFilePath(), Buffer.from(serialize(data))); + + const cache = await cm.read(); + expect(cache).not.toBeNull(); + expect(cache!.clocks.get('foo')).toBe('bar'); + }); + + test('returns null for a missing cache file', async () => { + const cm = new DiskCacheManager({ buildParameters }, defaultConfig); + const cache = await cm.read(); + expect(cache).toBeNull(); + }); + + test('serialises and writes a cache file', async () => { + const cm = new DiskCacheManager({ buildParameters }, defaultConfig); + const snapshot: CacheData = { + clocks: new Map([['foo', 'bar']]), + fileSystemData: new Map(), + plugins: new Map(), + }; + const getSnapshot = jest.fn(() => snapshot); + + await cm.write(getSnapshot, { + changedSinceCacheRead: true, + eventSource: { onChange: () => () => {} }, + onWriteError: () => {}, + }); + + expect(getSnapshot).toHaveBeenCalled(); + expect(vol.existsSync(cm.getCacheFilePath())).toBe(true); + + const written = vol.readFileSync(cm.getCacheFilePath()); + expect(serialize(snapshot)).toEqual(Buffer.from(written as Uint8Array)); + }); + + test('does not write when there have been no changes', async () => { + const cm = new DiskCacheManager({ buildParameters }, defaultConfig); + const getSnapshot = jest.fn(() => ({ + clocks: new Map(), + fileSystemData: new Map(), + plugins: new Map(), + })); + + await cm.write(getSnapshot, { + changedSinceCacheRead: false, + eventSource: { onChange: () => () => {} }, + onWriteError: () => {}, + }); + + expect(getSnapshot).not.toHaveBeenCalled(); + expect(vol.existsSync(cm.getCacheFilePath())).toBe(false); + }); + + describe('autoSave', () => { + let getSnapshot: jest.Mock; + let cm: DiskCacheManager; + let emitter: EventEmitter; + let eventSource: CacheManagerEventSource; + let writeFileSpy: jest.SpyInstance; + + beforeEach(async () => { + writeFileSpy = jest.spyOn(require('fs').promises, 'writeFile'); + getSnapshot = jest.fn(() => ({ + clocks: new Map(), + fileSystemData: new Map(), + plugins: new Map(), + })); + emitter = new EventEmitter(); + eventSource = { + onChange: jest.fn().mockImplementation((cb) => { + emitter.on('change', cb); + return () => emitter.removeListener('change', cb); + }), + }; + cm = new DiskCacheManager( + { buildParameters }, + { ...defaultConfig, autoSave: { debounceMs: 1000 } } + ); + await cm.write(getSnapshot, { + changedSinceCacheRead: false, + eventSource, + onWriteError: () => {}, + }); + }); + + afterEach(() => { + writeFileSpy.mockRestore(); + }); + + test('subscribes to change events during write(), even on empty delta', () => { + expect(eventSource.onChange).toHaveBeenCalledWith(expect.any(Function)); + expect(writeFileSpy).not.toHaveBeenCalled(); + }); + + test('saves after debounceMs', async () => { + emitter.emit('change'); + jest.advanceTimersByTime(999); + expect(getSnapshot).not.toHaveBeenCalled(); + jest.advanceTimersByTime(1); + await flushPromises(); + expect(getSnapshot).toHaveBeenCalled(); + expect(writeFileSpy).toHaveBeenCalledWith(cm.getCacheFilePath(), expect.any(Buffer)); + }); + + test('debounces successive changes within debounceMs', async () => { + emitter.emit('change'); + jest.advanceTimersByTime(500); + emitter.emit('change'); + jest.advanceTimersByTime(999); + expect(getSnapshot).not.toHaveBeenCalled(); + jest.advanceTimersByTime(1); + await flushPromises(); + expect(getSnapshot).toHaveBeenCalledTimes(1); + expect(writeFileSpy).toHaveBeenCalledTimes(1); + }); + }); +}); diff --git a/packages/@expo/metro-file-map/src/constants.ts b/packages/@expo/metro-file-map/src/constants.ts new file mode 100644 index 00000000000000..94cc6dd53088ce --- /dev/null +++ b/packages/@expo/metro-file-map/src/constants.ts @@ -0,0 +1,56 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/* + * This file exports a set of constants that are used for Jest's haste map + * serialization. On very large repositories, the haste map cache becomes very + * large to the point where it is the largest overhead in starting up Jest. + * + * This constant key map allows to keep the map smaller without having to build + * a custom serialization library. + */ + +export interface HType { + readonly MTIME: 0; + readonly SIZE: 1; + readonly VISITED: 2; + readonly SHA1: 3; + readonly SYMLINK: 4; + readonly PLUGINDATA: 5; + readonly PATH: 0; + readonly TYPE: 1; + readonly MODULE: 0; + readonly PACKAGE: 1; + readonly GENERIC_PLATFORM: 'g'; + readonly NATIVE_PLATFORM: 'native'; +} + +export type HTypeValue = 0 | 1 | 2 | 3 | 4 | 5 | 'g' | 'native'; + +const H: HType = { + /* file map attributes */ + MTIME: 0, + SIZE: 1, + VISITED: 2, + SHA1: 3, + SYMLINK: 4, + PLUGINDATA: 5, + + /* module map attributes */ + PATH: 0, + TYPE: 1, + + /* module types */ + MODULE: 0, + PACKAGE: 1, + + /* platforms */ + GENERIC_PLATFORM: 'g', + NATIVE_PLATFORM: 'native', +}; + +export default H; diff --git a/packages/@expo/metro-file-map/src/crawlers/node/__tests__/index.test.ts b/packages/@expo/metro-file-map/src/crawlers/node/__tests__/index.test.ts new file mode 100644 index 00000000000000..4bf377fe7e9609 --- /dev/null +++ b/packages/@expo/metro-file-map/src/crawlers/node/__tests__/index.test.ts @@ -0,0 +1,365 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { spawn } from 'child_process'; +import { EventEmitter } from 'events'; +import { vol } from 'memfs'; + +import H from '../../../constants'; +import TreeFS from '../../../lib/TreeFS'; +import type { CrawlerOptions, FileData, FileMetadata, PerfLogger } from '../../../types'; +import hasNativeFindSupport from '../hasNativeFindSupport'; +import nodeCrawl from '../index'; + +jest.mock('../hasNativeFindSupport', () => ({ + __esModule: true, + default: jest.fn().mockResolvedValue(false), +})); +jest.mock('child_process', () => ({ + spawn: jest.fn(), +})); +jest.mock('os', () => ({ + ...jest.requireActual('os'), + platform: () => 'linux', +})); + +const rootDir = '/project'; +const processFile = () => null; +const mockedSpawn = jest.mocked(spawn); +const mockedHasNativeFindSupport = jest.mocked(hasNativeFindSupport); + +function makeTreeFS(files?: FileData): TreeFS { + return new TreeFS({ rootDir, files, processFile }); +} + +const emptyFS = makeTreeFS(); + +function crawl(overrides: Partial = {}) { + return nodeCrawl({ + abortSignal: null, + computeSha1: false, + console, + extensions: ['js'], + forceNodeFilesystemAPI: true, + ignore: () => false, + includeSymlinks: false, + onStatus: jest.fn(), + previousState: { + fileSystem: emptyFS, + clocks: new Map(), + }, + rootDir, + roots: ['/project/fruits'], + ...overrides, + }); +} + +function sorted(iter: IterableIterator): string[] { + return Array.from(iter).sort(); +} + +describe('node crawler', () => { + beforeEach(() => { + vol.reset(); + mockedHasNativeFindSupport.mockResolvedValue(false); + mockedSpawn.mockReset(); + }); + + test('discovers files by extension', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/banana.ts': 'b', + '/project/fruits/cherry.json': 'c', + }); + + const { changedFiles, removedFiles } = await crawl({ + extensions: ['js', 'json'], + }); + + expect(sorted(changedFiles.keys())).toEqual(['fruits/apple.js', 'fruits/cherry.json']); + expect(removedFiles).toEqual(new Set()); + }); + + test('recurses into subdirectories', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/tropical/mango.js': 'b', + '/project/fruits/tropical/deep/papaya.js': 'c', + }); + + const { changedFiles } = await crawl(); + + expect(sorted(changedFiles.keys())).toEqual([ + 'fruits/apple.js', + 'fruits/tropical/deep/papaya.js', + 'fruits/tropical/mango.js', + ]); + }); + + test('applies ignore filter', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/pear.js': 'b', + '/project/fruits/tomato.js': 'c', + }); + + const { changedFiles } = await crawl({ + ignore: (p: string) => /pear/.test(p), + }); + + expect(sorted(changedFiles.keys())).toEqual(['fruits/apple.js', 'fruits/tomato.js']); + }); + + test('crawls multiple roots', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/vegetables/carrot.js': 'b', + }); + + const { changedFiles } = await crawl({ + roots: ['/project/fruits', '/project/vegetables'], + }); + + expect(sorted(changedFiles.keys())).toEqual(['fruits/apple.js', 'vegetables/carrot.js']); + }); + + test('reports only changed files', async () => { + vol.fromJSON({ + '/project/fruits/strawberry.js': 'changed', + '/project/fruits/tomato.js': 'same', + }); + + // Get the mtime that memfs assigned to tomato so we can match it + const tomatoStat = vol.statSync('/project/fruits/tomato.js'); + + const previousFiles: FileData = new Map([ + // strawberry has a different mtime → will be reported as changed + ['fruits/strawberry.js', [0, 0, 1, null, 0, null] as FileMetadata], + // tomato has matching mtime → unchanged, excluded from changedFiles + [ + 'fruits/tomato.js', + [tomatoStat.mtime.getTime(), tomatoStat.size, 1, null, 0, null] as FileMetadata, + ], + ]); + + const { changedFiles, removedFiles } = await crawl({ + previousState: { + fileSystem: makeTreeFS(previousFiles), + clocks: new Map(), + }, + }); + + expect(Array.from(changedFiles.keys())).toEqual(['fruits/strawberry.js']); + expect(removedFiles).toEqual(new Set()); + }); + + test('reports removed files', async () => { + vol.fromJSON({ + '/project/fruits/strawberry.js': 'a', + '/project/fruits/tomato.js': 'b', + }); + + const previousFiles: FileData = new Map([ + ['fruits/previouslyExisted.js', [0, 0, 1, null, 0, null] as FileMetadata], + ['fruits/strawberry.js', [0, 0, 1, null, 0, null] as FileMetadata], + ['fruits/tomato.js', [0, 0, 1, null, 0, null] as FileMetadata], + ]); + + const { removedFiles } = await crawl({ + previousState: { + fileSystem: makeTreeFS(previousFiles), + clocks: new Map(), + }, + }); + + expect(removedFiles).toEqual(new Set(['fruits/previouslyExisted.js'])); + }); + + test('completes with empty roots', async () => { + const { changedFiles, removedFiles } = await crawl({ roots: [] }); + + expect(changedFiles).toEqual(new Map()); + expect(removedFiles).toEqual(new Set()); + }); + + test('warns on readdir errors', async () => { + // /nonexistent doesn't exist in the virtual FS + const mockConsole = { ...console, warn: jest.fn() }; + + const { changedFiles, removedFiles } = await crawl({ + console: mockConsole as typeof console, + roots: ['/nonexistent'], + }); + + expect(mockConsole.warn).toHaveBeenCalledWith( + expect.stringContaining('reading contents of "/nonexistent"') + ); + expect(changedFiles).toEqual(new Map()); + expect(removedFiles).toEqual(new Set()); + }); + + test('skips symlinks when includeSymlinks is false', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/target.js': 'target', + }); + vol.symlinkSync('/project/fruits/target.js', '/project/fruits/link.js'); + + const { changedFiles } = await crawl({ includeSymlinks: false }); + + expect(sorted(changedFiles.keys())).toEqual(['fruits/apple.js', 'fruits/target.js']); + }); + + test('includes symlinks when includeSymlinks is true', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/target.js': 'target', + }); + vol.symlinkSync('/project/fruits/target.js', '/project/fruits/link.js'); + + const { changedFiles } = await crawl({ includeSymlinks: true }); + + const paths = sorted(changedFiles.keys()); + expect(paths).toContain('fruits/apple.js'); + expect(paths).toContain('fruits/link.js'); + expect(paths).toContain('fruits/target.js'); + + // Symlink should be marked in metadata + expect(changedFiles.get('fruits/link.js')![H.SYMLINK]).toBe(1); + // Regular file should not + expect(changedFiles.get('fruits/apple.js')![H.SYMLINK]).toBe(0); + }); + + test('populates file metadata correctly', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'hello', + }); + + const { changedFiles } = await crawl(); + const meta = changedFiles.get('fruits/apple.js')!; + + expect(meta).toBeDefined(); + expect(meta[H.MTIME]).toBeGreaterThan(0); + expect(meta[H.SIZE]).toBe(5); // 'hello'.length + expect(meta[H.VISITED]).toBe(0); + expect(meta[H.SHA1]).toBeNull(); + expect(meta[H.SYMLINK]).toBe(0); + }); + + describe('native find', () => { + function mockSpawnFind(filePaths: string[]) { + mockedSpawn.mockImplementation((() => { + const stdout = new EventEmitter() as EventEmitter & { + setEncoding: jest.Mock; + }; + stdout.setEncoding = jest.fn(); + process.nextTick(() => { + stdout.emit('data', filePaths.join('\n')); + process.nextTick(() => stdout.emit('close')); + }); + return { stdout, on: jest.fn() }; + }) as any); + } + + beforeEach(() => { + mockedHasNativeFindSupport.mockResolvedValue(true); + }); + + test('uses native find when available', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/pear.js': 'b', + '/project/fruits/tomato.js': 'c', + }); + + mockSpawnFind([ + '/project/fruits/apple.js', + '/project/fruits/pear.js', + '/project/fruits/tomato.js', + ]); + + const { changedFiles } = await crawl({ + forceNodeFilesystemAPI: false, + ignore: (p: string) => /pear/.test(p), + }); + + expect(mockedSpawn).toHaveBeenCalledWith('find', expect.arrayContaining(['/project/fruits'])); + + expect(sorted(changedFiles.keys())).toEqual(['fruits/apple.js', 'fruits/tomato.js']); + }); + + test('constructs correct find expression for extensions', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + }); + + mockSpawnFind(['/project/fruits/apple.js']); + + await crawl({ + forceNodeFilesystemAPI: false, + extensions: ['js', 'json'], + }); + + const spawnArgs = mockedSpawn.mock.calls[0]![1] as string[]; + expect(spawnArgs).toContain('-iname'); + expect(spawnArgs).toContain('*.js'); + expect(spawnArgs).toContain('*.json'); + }); + + test('falls back to node fs when forceNodeFilesystemAPI is true', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + }); + + const { changedFiles } = await crawl({ + forceNodeFilesystemAPI: true, + }); + + expect(mockedSpawn).not.toHaveBeenCalled(); + expect(sorted(changedFiles.keys())).toEqual(['fruits/apple.js']); + }); + }); + + describe('abort signal', () => { + test('aborts on pre-aborted signal', async () => { + const err = new Error('aborted for test'); + await expect( + crawl({ + abortSignal: AbortSignal.abort(err), + }) + ).rejects.toThrow(err); + }); + + test('aborts when signalled after start', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + }); + + const err = new Error('aborted for test'); + const abortController = new AbortController(); + + const fakePerfLogger: PerfLogger = { + point() { + abortController.abort(err); + }, + annotate() { + abortController.abort(err); + }, + subSpan() { + return fakePerfLogger; + }, + }; + + await expect( + crawl({ + perfLogger: fakePerfLogger, + abortSignal: abortController.signal, + }) + ).rejects.toThrow(err); + }); + }); +}); diff --git a/packages/@expo/metro-file-map/src/crawlers/node/hasNativeFindSupport.ts b/packages/@expo/metro-file-map/src/crawlers/node/hasNativeFindSupport.ts new file mode 100644 index 00000000000000..13dce3ae9bb667 --- /dev/null +++ b/packages/@expo/metro-file-map/src/crawlers/node/hasNativeFindSupport.ts @@ -0,0 +1,26 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { spawn } from 'child_process'; + +export default async function hasNativeFindSupport(): Promise { + try { + return await new Promise((resolve) => { + // Check the find binary supports the non-POSIX -iname parameter wrapped in parens. + const args = ['.', '-type', 'f', '(', '-iname', '*.ts', '-o', '-iname', '*.js', ')']; + const child = spawn('find', args, { cwd: __dirname }); + child.on('error', () => { + resolve(false); + }); + child.on('exit', (code) => { + resolve(code === 0); + }); + }); + } catch { + return false; + } +} diff --git a/packages/@expo/metro-file-map/src/crawlers/node/index.ts b/packages/@expo/metro-file-map/src/crawlers/node/index.ts new file mode 100644 index 00000000000000..fbb3b02c91d5f1 --- /dev/null +++ b/packages/@expo/metro-file-map/src/crawlers/node/index.ts @@ -0,0 +1,208 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ + +import { spawn } from 'child_process'; +import * as fs from 'graceful-fs'; +import { platform } from 'os'; +import * as path from 'path'; + +import hasNativeFindSupport from './hasNativeFindSupport'; +import { RootPathUtils } from '../../lib/RootPathUtils'; +import type { Console, CrawlerOptions, CrawlResult, FileData, IgnoreMatcher } from '../../types'; + +const debug = require('debug')('Metro:NodeCrawler'); + +type Callback = (result: FileData) => void; + +function find( + roots: readonly string[], + extensions: readonly string[], + ignore: IgnoreMatcher, + includeSymlinks: boolean, + rootDir: string, + console: Console, + callback: Callback +): void { + const result: FileData = new Map(); + let activeCalls = 0; + const pathUtils = new RootPathUtils(rootDir); + + function search(directory: string): void { + activeCalls++; + fs.readdir(directory, { withFileTypes: true }, (err, entries) => { + activeCalls--; + if (err) { + console.warn( + `Error "${(err as any).code ?? err.message}" reading contents of "${directory}", skipping. Add this directory to your ignore list to exclude it.` + ); + } else { + entries.forEach((entry: fs.Dirent) => { + const file = path.join(directory, entry.name.toString()); + + if (ignore(file)) { + return; + } + + if (entry.isSymbolicLink() && !includeSymlinks) { + return; + } + + if (entry.isDirectory()) { + search(file); + return; + } + + activeCalls++; + + fs.lstat(file, (err, stat) => { + activeCalls--; + + if (!err && stat) { + const ext = path.extname(file).substr(1); + if (stat.isSymbolicLink() || extensions.includes(ext)) { + result.set(pathUtils.absoluteToNormal(file), [ + stat.mtime.getTime(), + stat.size, + 0, + null, + stat.isSymbolicLink() ? 1 : 0, + null, + ]); + } + } + + if (activeCalls === 0) { + callback(result); + } + }); + }); + } + + if (activeCalls === 0) { + callback(result); + } + }); + } + + if (roots.length > 0) { + roots.forEach(search); + } else { + callback(result); + } +} + +function findNative( + roots: readonly string[], + extensions: readonly string[], + ignore: IgnoreMatcher, + includeSymlinks: boolean, + rootDir: string, + console: Console, + callback: Callback +): void { + // Examples: + // ( ( -type f ( -iname *.js ) ) ) + // ( ( -type f ( -iname *.js -o -iname *.ts ) ) ) + // ( ( -type f ( -iname *.js ) ) -o -type l ) + // ( ( -type f ) -o -type l ) + const extensionClause = extensions.length + ? `( ${extensions.map((ext) => `-iname *.${ext}`).join(' -o ')} )` + : ''; // Empty inner expressions eg "( )" are not allowed + const expression = `( ( -type f ${extensionClause} ) ${includeSymlinks ? '-o -type l ' : ''})`; + + const pathUtils = new RootPathUtils(rootDir); + + const child = spawn('find', [...roots, ...expression.split(' ')]); + let stdout = ''; + if (child.stdout == null) { + throw new Error( + 'stdout is null - this should never happen. Please open up an issue at https://github.com/facebook/metro' + ); + } + child.stdout.setEncoding('utf-8'); + child.stdout.on('data', (data) => (stdout += data)); + + child.stdout.on('close', () => { + const lines = stdout + .trim() + .split('\n') + .filter((x) => !ignore(x)); + const result: FileData = new Map(); + let count = lines.length; + if (!count) { + callback(new Map()); + } else { + lines.forEach((filePath) => { + fs.lstat(filePath, (err, stat) => { + if (!err && stat) { + result.set(pathUtils.absoluteToNormal(filePath), [ + stat.mtime.getTime(), + stat.size, + 0, + null, + stat.isSymbolicLink() ? 1 : 0, + null, + ]); + } + if (--count === 0) { + callback(result); + } + }); + }); + } + }); +} + +export default async function nodeCrawl(options: CrawlerOptions): Promise { + const { + console, + previousState, + extensions, + forceNodeFilesystemAPI, + ignore, + rootDir, + includeSymlinks, + perfLogger, + roots, + abortSignal, + subpath, + } = options; + + abortSignal?.throwIfAborted(); + + perfLogger?.point('nodeCrawl_start'); + const useNativeFind = + !forceNodeFilesystemAPI && platform() !== 'win32' && (await hasNativeFindSupport()); + + debug('Using system find: %s', useNativeFind); + + return new Promise((resolve, reject) => { + const callback: Callback = (fileData) => { + const difference = previousState.fileSystem.getDifference(fileData, { + subpath, + }); + + perfLogger?.point('nodeCrawl_end'); + + try { + // TODO: Use AbortSignal.reason directly when Flow supports it + abortSignal?.throwIfAborted(); + } catch (e) { + reject(e); + } + resolve(difference); + }; + + if (useNativeFind) { + findNative(roots, extensions, ignore, includeSymlinks, rootDir, console, callback); + } else { + find(roots, extensions, ignore, includeSymlinks, rootDir, console, callback); + } + }); +} diff --git a/packages/@expo/metro-file-map/src/crawlers/watchman/index.ts b/packages/@expo/metro-file-map/src/crawlers/watchman/index.ts new file mode 100644 index 00000000000000..ab1bb346ab81a1 --- /dev/null +++ b/packages/@expo/metro-file-map/src/crawlers/watchman/index.ts @@ -0,0 +1,335 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { FileChange, WatchProjectResponse } from 'fb-watchman'; +import watchman from 'fb-watchman'; +import invariant from 'invariant'; +import * as path from 'path'; +import { performance } from 'perf_hooks'; + +import { planQuery } from './planQuery'; +import { RootPathUtils } from '../../lib/RootPathUtils'; +import normalizePathSeparatorsToPosix from '../../lib/normalizePathSeparatorsToPosix'; +import normalizePathSeparatorsToSystem from '../../lib/normalizePathSeparatorsToSystem'; +import type { + WatchmanClockSpec, + CanonicalPath, + CrawlerOptions, + CrawlResult, + FileData, + FileMetadata, + Path, +} from '../../types'; + +// NOTE(@kitten): Not exported by @types/fb-watchman +interface WatchmanWatchResponse extends WatchProjectResponse { + watcher: string; +} + +// NOTE(@kitten): Not exported by @types/fb-watchman +interface WatchmanQueryResponse { + clock: string | { clock: string }; + is_fresh_instance: boolean; + files: FileChange[]; + warning?: string; +} + +/** Posix-separated absolute path as key */ +type WatchmanRoots = Map; + +const WATCHMAN_WARNING_INITIAL_DELAY_MILLISECONDS = 10000; +const WATCHMAN_WARNING_INTERVAL_MILLISECONDS = 20000; + +const watchmanURL = 'https://facebook.github.io/watchman/docs/troubleshooting'; + +function makeWatchmanError(error: Error): Error { + error.message = + `Watchman error: ${error.message.trim()}. Make sure watchman ` + + `is running for this project. See ${watchmanURL}.`; + return error; +} + +export default async function watchmanCrawl({ + abortSignal, + computeSha1, + extensions, + ignore, + includeSymlinks, + onStatus, + perfLogger, + previousState, + rootDir, + roots, +}: CrawlerOptions): Promise { + abortSignal?.throwIfAborted(); + + const client = new watchman.Client(); + const pathUtils = new RootPathUtils(rootDir); + abortSignal?.addEventListener('abort', () => client.end()); + + perfLogger?.point('watchmanCrawl_start'); + + const newClocks = new Map(); + + let clientError: Error | undefined; + client.on('error', (error: Error) => { + clientError = makeWatchmanError(error); + }); + + // TODO: Fix to use fb-watchman types + const cmd = async (command: 'watch-project' | 'query', ...args: any[]): Promise => { + let didLogWatchmanWaitMessage = false; + const startTime = performance.now(); + const logWatchmanWaitMessage = () => { + didLogWatchmanWaitMessage = true; + onStatus({ + type: 'watchman_slow_command', + timeElapsed: performance.now() - startTime, + command, + }); + }; + let intervalOrTimeoutId: ReturnType | ReturnType = + setTimeout(() => { + logWatchmanWaitMessage(); + intervalOrTimeoutId = setInterval( + logWatchmanWaitMessage, + WATCHMAN_WARNING_INTERVAL_MILLISECONDS + ); + }, WATCHMAN_WARNING_INITIAL_DELAY_MILLISECONDS); + try { + const response = await new Promise((resolve, reject) => { + // NOTE: dynamic call of command + return (client.command as Function)( + [command, ...args], + (error: Error | null, result: WatchmanQueryResponse) => + error ? reject(makeWatchmanError(error)) : resolve(result) + ); + }); + if ('warning' in response) { + onStatus({ + type: 'watchman_warning', + warning: response.warning, + command, + }); + } + return response as unknown as T; + } finally { + // NOTE: clearInterval / clearTimeout are interchangeable + clearInterval(intervalOrTimeoutId); + if (didLogWatchmanWaitMessage) { + onStatus({ + type: 'watchman_slow_command_complete', + timeElapsed: performance.now() - startTime, + command, + }); + } + } + }; + + async function getWatchmanRoots(roots: readonly Path[]): Promise { + perfLogger?.point('watchmanCrawl/getWatchmanRoots_start'); + const watchmanRoots: WatchmanRoots = new Map(); + await Promise.all( + roots.map(async (root, index) => { + perfLogger?.point(`watchmanCrawl/watchProject_${index}_start`); + const response = await cmd('watch-project', root); + perfLogger?.point(`watchmanCrawl/watchProject_${index}_end`); + const existing = watchmanRoots.get(response.watch); + // A root can only be filtered if it was never seen with a + // relative_path before. + const canBeFiltered = !existing || existing.directoryFilters.length > 0; + + if (canBeFiltered) { + if (response.relative_path) { + watchmanRoots.set(response.watch, { + watcher: response.watcher, + directoryFilters: (existing?.directoryFilters || []).concat(response.relative_path), + }); + } else { + // Make the filter directories an empty array to signal that this + // root was already seen and needs to be watched for all files or + // directories. + watchmanRoots.set(response.watch, { + watcher: response.watcher, + directoryFilters: [], + }); + } + } + }) + ); + perfLogger?.point('watchmanCrawl/getWatchmanRoots_end'); + return watchmanRoots; + } + + async function queryWatchmanForDirs(rootProjectDirMappings: WatchmanRoots) { + perfLogger?.point('watchmanCrawl/queryWatchmanForDirs_start'); + const results = new Map(); + let isFresh = false; + + await Promise.all( + Array.from(rootProjectDirMappings).map( + async ([posixSeparatedRoot, { directoryFilters, watcher }], index) => { + // Jest is only going to store one type of clock; a string that + // represents a local clock. However, the Watchman crawler supports + // a second type of clock that can be written by automation outside of + // Jest, called an "scm query", which fetches changed files based on + // source control mergebases. The reason this is necessary is because + // local clocks are not portable across systems, but scm queries are. + // By using scm queries, we can create the haste map on a different + // system and import it, transforming the clock into a local clock. + const since = previousState.clocks.get( + normalizePathSeparatorsToPosix( + pathUtils.absoluteToNormal(normalizePathSeparatorsToSystem(posixSeparatedRoot)) + ) + ); + + perfLogger?.annotate({ + bool: { + [`watchmanCrawl/query_${index}_has_clock`]: since != null, + }, + }); + + const { query, queryGenerator } = planQuery({ + since, + extensions, + directoryFilters, + includeSha1: computeSha1, + includeSymlinks, + }); + + perfLogger?.annotate({ + string: { + [`watchmanCrawl/query_${index}_watcher`]: watcher ?? 'unknown', + [`watchmanCrawl/query_${index}_generator`]: queryGenerator, + }, + }); + + perfLogger?.point(`watchmanCrawl/query_${index}_start`); + const response = await cmd('query', posixSeparatedRoot, query); + perfLogger?.point(`watchmanCrawl/query_${index}_end`); + + // When a source-control query is used, we ignore the "is fresh" + // response from Watchman because it will be true despite the query + // being incremental. + const isSourceControlQuery = + typeof since !== 'string' && since?.scm?.['mergebase-with'] != null; + if (!isSourceControlQuery) { + isFresh = isFresh || response.is_fresh_instance; + } + + results.set(posixSeparatedRoot, response); + } + ) + ); + + perfLogger?.point('watchmanCrawl/queryWatchmanForDirs_end'); + + return { + isFresh, + results, + }; + } + + let removedFiles: Set = new Set(); + let changedFiles: FileData = new Map(); + let results: Map | undefined; + let isFresh = false; + let queryError: Error | undefined; + try { + const watchmanRoots = await getWatchmanRoots(roots); + const watchmanFileResults = await queryWatchmanForDirs(watchmanRoots); + results = watchmanFileResults.results; + isFresh = watchmanFileResults.isFresh; + } catch (e: any) { + queryError = e; + } + client.end(); + + if (results == null) { + if (clientError) { + perfLogger?.annotate({ + string: { + 'watchmanCrawl/client_error': clientError.message ?? '[message missing]', + }, + }); + } + if (queryError) { + perfLogger?.annotate({ + string: { + 'watchmanCrawl/query_error': queryError.message ?? '[message missing]', + }, + }); + } + perfLogger?.point('watchmanCrawl_end'); + abortSignal?.throwIfAborted(); + throw queryError ?? clientError ?? new Error('Watchman file results missing'); + } + + perfLogger?.point('watchmanCrawl/processResults_start'); + + const freshFileData: FileData = new Map(); + + for (const [watchRoot, response] of results) { + const fsRoot = normalizePathSeparatorsToSystem(watchRoot); + const relativeFsRoot = pathUtils.absoluteToNormal(fsRoot); + newClocks.set( + normalizePathSeparatorsToPosix(relativeFsRoot), + // Ensure we persist only the local clock. + typeof response.clock === 'string' ? response.clock : response.clock.clock + ); + + for (const fileData of response.files) { + const filePath = fsRoot + path.sep + normalizePathSeparatorsToSystem(fileData.name); + const relativeFilePath = pathUtils.absoluteToNormal(filePath); + + if (!fileData.exists) { + if (!isFresh) { + removedFiles.add(relativeFilePath); + } + // Whether watchman can return exists: false in a fresh instance + // response is unknown, but there's nothing we need to do in that case. + } else if (!ignore(filePath)) { + const { mtime_ms, size } = fileData; + invariant(mtime_ms != null && size != null, 'missing file data in watchman response'); + const mtime = typeof mtime_ms === 'number' ? mtime_ms : mtime_ms.toNumber(); + + let sha1hex: string | undefined = fileData['content.sha1hex']; + if (typeof sha1hex !== 'string' || sha1hex.length !== 40) { + sha1hex = undefined; + } + + let symlinkInfo: 0 | 1 | string = 0; + if (fileData.type === 'l') { + symlinkInfo = fileData['symlink_target'] ?? 1; + } + + const nextData: FileMetadata = [mtime, size, 0, sha1hex ?? null, symlinkInfo, null]; + + // If watchman is fresh, the removed files map starts with all files + // and we remove them as we verify they still exist. + if (isFresh) { + freshFileData.set(relativeFilePath, nextData); + } else { + changedFiles.set(relativeFilePath, nextData); + } + } + } + } + + if (isFresh) { + ({ changedFiles, removedFiles } = previousState.fileSystem.getDifference(freshFileData)); + } + + perfLogger?.point('watchmanCrawl/processResults_end'); + perfLogger?.point('watchmanCrawl_end'); + abortSignal?.throwIfAborted(); + return { + changedFiles, + removedFiles, + clocks: newClocks, + }; +} diff --git a/packages/@expo/metro-file-map/src/crawlers/watchman/planQuery.ts b/packages/@expo/metro-file-map/src/crawlers/watchman/planQuery.ts new file mode 100644 index 00000000000000..44ced0c57f5530 --- /dev/null +++ b/packages/@expo/metro-file-map/src/crawlers/watchman/planQuery.ts @@ -0,0 +1,131 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// NOTE(@kitten): Local type aliases for Watchman query planning types. +type WatchmanExpression = readonly [string, ...any[]]; +type WatchmanDirnameExpression = readonly ['dirname', string]; +type WatchmanQuerySince = string | Readonly<{ scm: Readonly<{ 'mergebase-with': string }> }>; + +interface WatchmanQuery { + fields?: string[]; + expression?: WatchmanExpression; + since?: WatchmanQuerySince; + glob?: string[]; + glob_includedotfiles?: boolean; + suffix?: readonly string[]; +} + +export function planQuery({ + since, + directoryFilters, + extensions, + includeSha1, + includeSymlinks, +}: { + readonly since: WatchmanQuerySince | null | undefined; + readonly directoryFilters: readonly string[]; + readonly extensions: readonly string[]; + readonly includeSha1: boolean; + readonly includeSymlinks: boolean; +}): { + query: WatchmanQuery; + queryGenerator: string; +} { + const fields = ['name', 'exists', 'mtime_ms', 'size']; + if (includeSha1) { + fields.push('content.sha1hex'); + } + + /** + * Note on symlink_target: + * + * Watchman supports requesting the symlink_target field, which is + * *potentially* more efficient if targets can be read from metadata without + * reading/materialising files. However, at the time of writing, Watchman has + * issues reporting symlink_target on some backends[1]. Additionally, though + * the Eden watcher is known to work, it reads links serially[2] on demand[3] + * - less efficiently than we can do ourselves. + * [1] https://github.com/facebook/watchman/issues/1084 + * [2] https://github.com/facebook/watchman/blob/v2023.01.02.00/watchman/watcher/eden.cpp#L476-L485 + * [3] https://github.com/facebook/watchman/blob/v2023.01.02.00/watchman/watcher/eden.cpp#L433-L434 + */ + if (includeSymlinks) { + fields.push('type'); + } + + const allOfTerms: WatchmanExpression[] = includeSymlinks + ? [['anyof', ['allof', ['type', 'f'], ['suffix', extensions]], ['type', 'l']]] + : [['type', 'f']]; + + const query: WatchmanQuery = { fields }; + + /** + * Watchman "query planner". + * + * Watchman file queries consist of 1 or more generators that feed + * files through the expression evaluator. + * + * Strategy: + * 1. Select the narrowest possible generator so that the expression + * evaluator has fewer candidates to process. + * 2. Evaluate expressions from narrowest to broadest. + * 3. Don't use an expression to recheck a condition that the + * generator already guarantees. + * 4. Compose expressions to avoid combinatorial explosions in the + * number of terms. + * + * The ordering of generators/filters, from narrow to broad, is: + * - since = O(changes) + * - glob / dirname = O(files in a subtree of the repo) + * - suffix = O(files in the repo) + * + * We assume that file extensions are ~uniformly distributed in the + * repo but Haste map projects are focused on a handful of + * directories. Therefore `glob` < `suffix`. + */ + let queryGenerator: string | undefined; + if (since != null) { + // Prefer the since generator whenever we have a clock + query.since = since; + queryGenerator = 'since'; + + // Filter on directories using an anyof expression + if (directoryFilters.length > 0) { + allOfTerms.push([ + 'anyof', + ...directoryFilters.map((dir): WatchmanDirnameExpression => ['dirname', dir]), + ]); + } + } else if (directoryFilters.length > 0) { + // Use the `glob` generator and filter only by extension. + query.glob = directoryFilters.map((directory) => `${directory}/**`); + query.glob_includedotfiles = true; + queryGenerator = 'glob'; + } else if (!includeSymlinks) { + // Use the `suffix` generator with no path/extension filtering, as long + // as we don't need (suffixless) directory symlinks. + query.suffix = extensions; + queryGenerator = 'suffix'; + } else { + // Fall back to `all` if we need symlinks and don't have a clock or + // directory filters. + queryGenerator = 'all'; + } + + // `includeSymlinks` implies we need (suffixless) directory links. In the + // case of the `suffix` generator, a suffix expression would be redundant. + if (!includeSymlinks && queryGenerator !== 'suffix') { + allOfTerms.push(['suffix', extensions]); + } + + // If we only have one "all of" expression we can use it directly, otherwise + // wrap in ['allof', ...expressions]. By construction we should never have + // length 0. + query.expression = allOfTerms.length === 1 ? allOfTerms[0] : ['allof', ...allOfTerms]; + + return { query, queryGenerator }; +} diff --git a/packages/@expo/metro-file-map/src/index.ts b/packages/@expo/metro-file-map/src/index.ts new file mode 100644 index 00000000000000..49cb57bbea3125 --- /dev/null +++ b/packages/@expo/metro-file-map/src/index.ts @@ -0,0 +1,1050 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import EventEmitter from 'events'; +import { promises as fsPromises } from 'fs'; +import invariant from 'invariant'; +import * as path from 'path'; +import { performance } from 'perf_hooks'; + +import { Watcher } from './Watcher'; +import { DiskCacheManager } from './cache/DiskCacheManager'; +import H from './constants'; +import { FileProcessor } from './lib/FileProcessor'; +import { FileSystemChangeAggregator } from './lib/FileSystemChangeAggregator'; +import { RootPathUtils } from './lib/RootPathUtils'; +import TreeFS from './lib/TreeFS'; +import checkWatchmanCapabilities from './lib/checkWatchmanCapabilities'; +import normalizePathSeparatorsToPosix from './lib/normalizePathSeparatorsToPosix'; +import normalizePathSeparatorsToSystem from './lib/normalizePathSeparatorsToSystem'; +import type { + BuildParameters, + BuildResult, + CacheData, + CacheManager, + CacheManagerFactory, + CacheManagerFactoryOptions, + CanonicalPath, + ChangedFileMetadata, + ChangeEvent, + ChangeEventClock, + ChangeEventMetadata, + Console, + CrawlerOptions, + CrawlResult, + FileData, + FileMapPlugin, + FileMapPluginWorker, + FileMetadata, + FileSystem, + HasteMapData, + HasteMapItem, + HType, + InputFileMapPlugin, + MutableFileSystem, + Path, + PerfLogger, + PerfLoggerFactory, + ProcessFileFunction, + WatcherBackendChangeEvent, + WatchmanClocks, +} from './types'; + +const debug = require('debug')('Metro:FileMap'); + +export type { + BuildParameters, + BuildResult, + CacheData, + ChangeEventMetadata, + FileData, + FileMap, + FileSystem, + HasteMapData, + HasteMapItem, + InputFileMapPlugin, +}; + +export interface InputOptions { + readonly computeSha1?: boolean | undefined | null; + readonly enableSymlinks?: boolean | undefined | null; + readonly extensions: readonly string[]; + readonly forceNodeFilesystemAPI?: boolean | undefined | null; + readonly ignorePattern?: RegExp | undefined | null; + readonly plugins?: readonly InputFileMapPlugin[] | undefined; + readonly retainAllFiles: boolean; + readonly rootDir: string; + readonly roots: readonly string[]; + + readonly cacheManagerFactory?: CacheManagerFactory | undefined | null; + readonly console?: Console; + readonly healthCheck: HealthCheckOptions; + readonly maxFilesPerWorker?: number | undefined | null; + readonly maxWorkers: number; + readonly perfLoggerFactory?: PerfLoggerFactory | undefined | null; + readonly resetCache?: boolean | undefined | null; + readonly useWatchman?: boolean | undefined | null; + readonly watch?: boolean | undefined | null; + readonly watchmanDeferStates?: readonly string[]; +} + +interface HealthCheckOptions { + readonly enabled: boolean; + readonly interval: number; + readonly timeout: number; + readonly filePrefix: string; +} + +interface InternalOptions extends BuildParameters { + readonly healthCheck: HealthCheckOptions; + readonly perfLoggerFactory: PerfLoggerFactory | undefined | null; + readonly resetCache: boolean | undefined | null; + readonly useWatchman: boolean; + readonly watch: boolean; + readonly watchmanDeferStates: readonly string[]; +} + +interface IndexedPlugin { + readonly plugin: FileMapPlugin; + readonly dataIdx: number | undefined | null; +} + +type InternalEnqueuedEvent = + | { + readonly clock: ChangeEventClock | undefined | null; + readonly relativeFilePath: string; + readonly metadata: FileMetadata; + readonly type: 'touch'; + } + | { + readonly clock: ChangeEventClock | undefined | null; + readonly relativeFilePath: string; + readonly type: 'delete'; + }; + +export { DiskCacheManager } from './cache/DiskCacheManager'; +export { default as DependencyPlugin } from './plugins/DependencyPlugin'; +export type { DependencyPluginOptions } from './plugins/DependencyPlugin'; +export { DuplicateHasteCandidatesError } from './plugins/haste/DuplicateHasteCandidatesError'; +export { HasteConflictsError } from './plugins/haste/HasteConflictsError'; +export { default as HastePlugin } from './plugins/HastePlugin'; + +export type { HasteMap } from './types'; +export type { HealthCheckResult } from './Watcher'; +export type { + CacheManager, + CacheManagerFactory, + CacheManagerFactoryOptions, + CacheManagerWriteOptions, + ChangeEvent, + DependencyExtractor, + WatcherStatus, +} from './types'; + +// This should be bumped whenever a code change to `metro-file-map` itself +// would cause a change to the cache data structure and/or content (for a given +// filesystem state and build parameters). +const CACHE_BREAKER = '11'; + +const CHANGE_INTERVAL = 30; + +const NODE_MODULES = path.sep + 'node_modules' + path.sep; +const VCS_DIRECTORIES = /[/\\]\.(git|hg)[/\\]/.source; +const WATCHMAN_REQUIRED_CAPABILITIES = [ + 'field-content.sha1hex', + 'relative_root', + 'suffix-set', + 'wildmatch', +]; + +/** + * FileMap includes a JavaScript implementation of Facebook's haste module system. + * + * This implementation is inspired by https://github.com/facebook/node-haste + * and was built with for high-performance in large code repositories with + * hundreds of thousands of files. This implementation is scalable and provides + * predictable performance. + * + * Because the file map creation and synchronization is critical to startup + * performance and most tasks are blocked by I/O this class makes heavy use of + * synchronous operations. It uses worker processes for parallelizing file + * access and metadata extraction. + * + * The data structures created by `metro-file-map` can be used directly from the + * cache without further processing. The metadata objects in the `files` and + * `map` objects contain cross-references: a metadata object from one can look + * up the corresponding metadata object in the other map. Note that in most + * projects, the number of files will be greater than the number of haste + * modules one module can refer to many files based on platform extensions. + * + * type CacheData = { + * clocks: WatchmanClocks, + * files: {[filepath: string]: FileMetadata}, + * map: {[id: string]: HasteMapItem}, + * mocks: {[id: string]: string}, + * } + * + * // Watchman clocks are used for query synchronization and file system deltas. + * type WatchmanClocks = {[filepath: string]: string}; + * + * type FileMetadata = { + * id: ?string, // used to look up module metadata objects in `map`. + * mtime: number, // check for outdated files. + * size: number, // size of the file in bytes. + * visited: boolean, // whether the file has been parsed or not. + * dependencies: Array, // all relative dependencies of this file. + * sha1: ?string, // SHA-1 of the file, if requested via options. + * symlink: ?(1 | 0 | string), // Truthy if symlink, string is target + * }; + * + * // Modules can be targeted to a specific platform based on the file name. + * // Example: platform.ios.js and Platform.android.js will both map to the same + * // `Platform` module. The platform should be specified during resolution. + * type HasteMapItem = {[platform: string]: ModuleMetadata}; + * + * // + * type ModuleMetadata = { + * path: string, // the path to look up the file object in `files`. + * type: string, // the module type (either `package` or `module`). + * }; + * + * Note that the data structures described above are conceptual only. The actual + * implementation uses arrays and constant keys for metadata storage. Instead of + * `{id: 'flatMap', mtime: 3421, size: 42, visited: true, dependencies: []}` the real + * representation is similar to `['flatMap', 3421, 42, 1, []]` to save storage space + * and reduce parse and write time of a big JSON blob. + * + * The FileMap is created as follows: + * 1. read data from the cache or create an empty structure. + * + * 2. crawl the file system. + * * empty cache: crawl the entire file system. + * * cache available: + * * if watchman is available: get file system delta changes. + * * if watchman is unavailable: crawl the entire file system. + * * build metadata objects for every file. This builds the `files` part of + * the `FileMap`. + * + * 3. visit and extract metadata from changed files, including sha1, + * depedendencies, and any plugins. + * * this is done in parallel over worker processes to improve performance. + * * the worst case is to visit all files. + * * the best case is no file system access and retrieving all data from + * the cache. + * * the average case is a small number of changed files. + * + * 4. serialize the new `FileMap` in a cache file. + * + */ +export default class FileMap extends EventEmitter { + // NOTE(@kitten): Expo brand to recognize patched `metro-file-map -> @expo/metro-file-map` + readonly __expo = true; + + #buildPromise: Promise | undefined | null; + readonly #cacheManager: CacheManager; + #canUseWatchmanPromise: Promise | undefined; + #changeID: number; + #changeInterval: ReturnType | undefined | null; + readonly #console: Console; + readonly #crawlerAbortController: AbortController; + readonly #fileProcessor: FileProcessor; + #healthCheckInterval: ReturnType | undefined | null; + readonly #options: InternalOptions; + readonly #pathUtils: RootPathUtils; + readonly #plugins: readonly IndexedPlugin[]; + readonly #startupPerfLogger: PerfLogger | undefined | null; + #watcher: Watcher | undefined | null; + + static create(options: InputOptions): FileMap { + return new FileMap(options); + } + + constructor(options: InputOptions) { + super(); + + if (options.perfLoggerFactory) { + this.#startupPerfLogger = options.perfLoggerFactory?.('START_UP').subSpan('fileMap') ?? null; + this.#startupPerfLogger?.point('constructor_start'); + } + + // Add VCS_DIRECTORIES to provided ignorePattern + let ignorePattern; + if (options.ignorePattern) { + const inputIgnorePattern = options.ignorePattern; + if (inputIgnorePattern instanceof RegExp) { + ignorePattern = new RegExp( + inputIgnorePattern.source.concat('|' + VCS_DIRECTORIES), + inputIgnorePattern.flags + ); + } else { + throw new Error('metro-file-map: the `ignorePattern` option must be a RegExp'); + } + } else { + ignorePattern = new RegExp(VCS_DIRECTORIES); + } + + this.#console = options.console || globalThis.console; + + let dataSlot: number = H.PLUGINDATA; + + const indexedPlugins: IndexedPlugin[] = []; + const pluginWorkers: FileMapPluginWorker[] = []; + const plugins = options.plugins ?? []; + for (const plugin of plugins) { + const maybeWorker = plugin.getWorker(); + indexedPlugins.push({ + plugin, + dataIdx: maybeWorker != null ? dataSlot++ : null, + }); + if (maybeWorker != null) { + pluginWorkers.push(maybeWorker); + } + } + this.#plugins = indexedPlugins; + + const buildParameters: BuildParameters = { + cacheBreaker: CACHE_BREAKER, + computeSha1: options.computeSha1 || false, + enableSymlinks: options.enableSymlinks || false, + extensions: options.extensions, + forceNodeFilesystemAPI: !!options.forceNodeFilesystemAPI, + ignorePattern, + plugins, + retainAllFiles: options.retainAllFiles, + rootDir: options.rootDir, + roots: Array.from(new Set(options.roots)), + }; + + this.#options = { + ...buildParameters, + healthCheck: options.healthCheck, + perfLoggerFactory: options.perfLoggerFactory, + resetCache: options.resetCache, + useWatchman: options.useWatchman == null ? true : options.useWatchman, + watch: !!options.watch, + watchmanDeferStates: options.watchmanDeferStates ?? [], + }; + + const cacheFactoryOptions: CacheManagerFactoryOptions = { + buildParameters, + }; + this.#cacheManager = options.cacheManagerFactory + ? options.cacheManagerFactory.call(null, cacheFactoryOptions) + : new DiskCacheManager(cacheFactoryOptions, {}); + + this.#fileProcessor = new FileProcessor({ + maxFilesPerWorker: options.maxFilesPerWorker, + maxWorkers: options.maxWorkers, + perfLogger: this.#startupPerfLogger, + pluginWorkers, + rootDir: options.rootDir, + }); + + this.#buildPromise = null; + this.#pathUtils = new RootPathUtils(options.rootDir); + this.#startupPerfLogger?.point('constructor_end'); + this.#crawlerAbortController = new AbortController(); + this.#changeID = 0; + } + + build(): Promise { + this.#startupPerfLogger?.point('build_start'); + if (!this.#buildPromise) { + this.#buildPromise = (async () => { + let initialData: CacheData | undefined | null; + if (this.#options.resetCache !== true) { + initialData = await this.read(); + } + if (!initialData) { + debug('Not using a cache'); + } else { + debug('Cache loaded (%d clock(s))', initialData.clocks.size); + } + + const rootDir = this.#options.rootDir; + this.#startupPerfLogger?.point('constructFileSystem_start'); + const processFile: ProcessFileFunction = (normalPath, metadata, opts) => { + const result = this.#fileProcessor.processRegularFile(normalPath, metadata, { + computeSha1: opts.computeSha1, + maybeReturnContent: true, + }); + debug('Lazily processed file: %s', normalPath); + // Emit an event to inform caches that there is new data to save. + this.emit('metadata'); + return result?.content; + }; + const fileSystem = + initialData != null + ? TreeFS.fromDeserializedSnapshot({ + // Typed `mixed` because we've read this from an external + // source. It'd be too expensive to validate at runtime, so + // trust our cache manager that this is correct. + fileSystemData: initialData.fileSystemData as any, + processFile, + rootDir, + }) + : new TreeFS({ processFile, rootDir }); + this.#startupPerfLogger?.point('constructFileSystem_end'); + + const plugins = this.#plugins; + + // Initialize plugins from cached file system and plugin state while + // crawling to build a diff of current state vs cached. `fileSystem` + // is not mutated during either operation. + const [fileDelta] = await Promise.all([ + this.#buildFileDelta({ + clocks: initialData?.clocks ?? new Map(), + fileSystem, + }), + Promise.all( + plugins.map(({ plugin, dataIdx }) => + plugin.initialize({ + files: { + lookup: (mixedPath) => { + const result = fileSystem.lookup(mixedPath); + if (!result.exists) { + return { exists: false }; + } + if (result.type === 'd') { + return { exists: true, type: 'd' }; + } + return { + exists: true, + type: 'f', + pluginData: dataIdx != null ? result.metadata[dataIdx] : null, + }; + }, + fileIterator: (opts) => + mapIterable( + fileSystem.metadataIterator(opts), + ({ baseName, canonicalPath, metadata }) => ({ + baseName, + canonicalPath, + pluginData: dataIdx != null ? metadata[dataIdx] : null, + }) + ), + }, + pluginState: initialData?.plugins.get(plugin.name), + }) + ) + ), + ]); + + // Update `fileSystem` and plugins based on the file delta. + const actualChanges = await this.#applyFileDelta(fileSystem, plugins, fileDelta); + + const changeSize = actualChanges.getSize(); + + // Validate plugins before persisting them. + plugins.forEach(({ plugin }) => plugin.assertValid()); + + const watchmanClocks = new Map('clocks' in fileDelta ? fileDelta.clocks : []); + await this.#takeSnapshotAndPersist(fileSystem, watchmanClocks, plugins, changeSize > 0); + debug('Finished mapping files (%d changes).', changeSize); + + await this.#watch(fileSystem, watchmanClocks, plugins); + return { fileSystem }; + })(); + } + return this.#buildPromise.then((result) => { + this.#startupPerfLogger?.point('build_end'); + return result; + }); + } + + /** + * 1. read data from the cache or create an empty structure. + */ + async read(): Promise { + let data: CacheData | undefined | null; + this.#startupPerfLogger?.point('read_start'); + try { + data = await this.#cacheManager.read(); + } catch (e: any) { + this.#console.warn('Error while reading cache, falling back to a full crawl:\n', e); + this.#startupPerfLogger?.annotate({ + string: { cacheReadError: e.toString() }, + }); + } + this.#startupPerfLogger?.point('read_end'); + return data; + } + + /** + * 2. crawl the file system. + */ + async #buildFileDelta(previousState: CrawlerOptions['previousState']): Promise { + this.#startupPerfLogger?.point('buildFileDelta_start'); + + const { + computeSha1, + enableSymlinks, + extensions, + forceNodeFilesystemAPI, + ignorePattern, + retainAllFiles, + roots, + rootDir, + watch, + watchmanDeferStates, + } = this.#options; + + this.#watcher = new Watcher({ + abortSignal: this.#crawlerAbortController.signal, + computeSha1, + console: this.#console, + enableSymlinks, + extensions, + forceNodeFilesystemAPI, + healthCheckFilePrefix: this.#options.healthCheck.filePrefix, + // TODO: Refactor out the two different ignore strategies here. + ignoreForCrawl: (filePath) => { + const ignoreMatched = ignorePattern.test(filePath); + return ignoreMatched || (!retainAllFiles && filePath.includes(NODE_MODULES)); + }, + ignorePatternForWatch: ignorePattern, + perfLogger: this.#startupPerfLogger, + previousState, + rootDir, + roots, + useWatchman: await this.#shouldUseWatchman(), + watch, + watchmanDeferStates, + }); + const watcher = this.#watcher; + + watcher.on('status', (status) => this.emit('status', status)); + + const result = await watcher.crawl(); + this.#startupPerfLogger?.point('buildFileDelta_end'); + return result; + } + + #maybeReadLink(normalPath: Path, fileMetadata: FileMetadata): Promise | undefined | null { + // If we only need to read a link, it's more efficient to do it in-band + // (with async file IO) than to have the overhead of worker IO. + if (fileMetadata[H.SYMLINK] === 1) { + return fsPromises + .readlink(this.#pathUtils.normalToAbsolute(normalPath)) + .then((symlinkTarget) => { + fileMetadata[H.VISITED] = 1; + fileMetadata[H.SYMLINK] = symlinkTarget; + }); + } + return null; + } + + async #applyFileDelta( + fileSystem: MutableFileSystem, + plugins: readonly IndexedPlugin[], + delta: Readonly<{ + changedFiles: FileData; + removedFiles: ReadonlySet; + clocks?: WatchmanClocks; + }> + ): Promise { + this.#startupPerfLogger?.point('applyFileDelta_start'); + const { changedFiles, removedFiles } = delta; + this.#startupPerfLogger?.point('applyFileDelta_preprocess_start'); + // Remove files first so that we don't mistake moved modules + // modules as duplicates. + this.#startupPerfLogger?.point('applyFileDelta_remove_start'); + const changeAggregator = new FileSystemChangeAggregator(); + for (const relativeFilePath of removedFiles) { + fileSystem.remove(relativeFilePath, changeAggregator); + } + this.#startupPerfLogger?.point('applyFileDelta_remove_end'); + + const readLinkPromises: Promise[] = []; + const readLinkErrors: { + normalFilePath: string; + error: Error & { code?: string }; + }[] = []; + const filesToProcess: [string, FileMetadata][] = []; + + for (const [normalFilePath, fileData] of changedFiles) { + // A crawler may preserve the H.VISITED flag to indicate that the file + // contents are unchaged and it doesn't need visiting again. + if (fileData[H.VISITED] === 1) { + continue; + } + + if (fileData[H.SYMLINK] === 0) { + filesToProcess.push([normalFilePath, fileData]); + } else { + const maybeReadLink = this.#maybeReadLink(normalFilePath, fileData); + if (maybeReadLink) { + readLinkPromises.push( + maybeReadLink.catch((error) => { + readLinkErrors.push({ normalFilePath, error }); + }) + ); + } + } + } + this.#startupPerfLogger?.point('applyFileDelta_preprocess_end'); + + debug( + 'Found %d added/modified files and %d symlinks.', + filesToProcess.length, + readLinkPromises.length + ); + + this.#startupPerfLogger?.point('applyFileDelta_process_start'); + const [batchResult] = await Promise.all([ + this.#fileProcessor.processBatch(filesToProcess, { + computeSha1: this.#options.computeSha1, + maybeReturnContent: false, + }), + Promise.all(readLinkPromises), + ]); + this.#startupPerfLogger?.point('applyFileDelta_process_end'); + + // It's possible that a file could be deleted between being seen by the + // crawler and our attempt to process it. For our purposes, this is + // equivalent to the file being deleted before the crawl, being absent + // from `changedFiles`, and (if we loaded from cache, and the file + // existed previously) possibly being reported in `removedFiles`. + // + // Treat the file accordingly - don't add it to `FileSystem`, and remove + // it if it already exists. We're not emitting events at this point in + // startup, so there's nothing more to do. + this.#startupPerfLogger?.point('applyFileDelta_missing_start'); + for (const { normalFilePath, error } of batchResult.errors.concat(readLinkErrors)) { + if (['ENOENT', 'EACCESS'].includes(error.code ?? '')) { + delta.changedFiles.delete(normalFilePath); + fileSystem.remove(normalFilePath, changeAggregator); + } else { + // Anything else is fatal. + throw error; + } + } + + this.#startupPerfLogger?.point('applyFileDelta_missing_end'); + + this.#startupPerfLogger?.point('applyFileDelta_add_start'); + fileSystem.bulkAddOrModify(changedFiles, changeAggregator); + this.#startupPerfLogger?.point('applyFileDelta_add_end'); + + this.#startupPerfLogger?.point('applyFileDelta_updatePlugins_start'); + this.#plugins.forEach(({ plugin, dataIdx }) => { + plugin.onChanged( + changeAggregator.getMappedView( + dataIdx != null ? (metadata) => metadata[dataIdx] : () => null + ) + ); + }); + this.#startupPerfLogger?.point('applyFileDelta_updatePlugins_end'); + this.#startupPerfLogger?.point('applyFileDelta_end'); + + return changeAggregator; + } + + /** + * 4. Serialize a snapshot of our raw data via the configured cache manager + */ + async #takeSnapshotAndPersist( + fileSystem: FileSystem, + clocks: WatchmanClocks, + plugins: readonly IndexedPlugin[], + changedSinceCacheRead: boolean + ) { + this.#startupPerfLogger?.point('persist_start'); + await this.#cacheManager.write( + () => ({ + clocks: new Map(clocks), + fileSystemData: fileSystem.getSerializableSnapshot(), + plugins: new Map( + plugins.map(({ plugin }) => [plugin.name, plugin.getSerializableSnapshot()]) + ), + }), + { + changedSinceCacheRead, + eventSource: { + onChange: (cb) => { + // Inform the cache about changes to internal state, including: + // - File system changes + this.on('change', cb); + // - Changes to stored metadata, e.g. on lazy processing. + this.on('metadata', cb); + return () => { + this.removeListener('change', cb); + this.removeListener('metadata', cb); + }; + }, + }, + onWriteError: (error) => { + this.#console.warn('[metro-file-map] Cache write error\n:', error); + }, + } + ); + this.#startupPerfLogger?.point('persist_end'); + } + + /** + * Watch mode + */ + async #watch( + fileSystem: MutableFileSystem, + clocks: WatchmanClocks, + plugins: readonly IndexedPlugin[] + ): Promise { + this.#startupPerfLogger?.point('watch_start'); + if (!this.#options.watch) { + this.#startupPerfLogger?.point('watch_end'); + return; + } + + const hasWatchedExtension = (filePath: string) => + this.#options.extensions.some((ext) => filePath.endsWith(ext)); + + let nextEmit: + | { + events: InternalEnqueuedEvent[]; + firstEventTimestamp: number; + firstEnqueuedTimestamp: number; + } + | undefined + | null = null; + + const emitChange = () => { + if (nextEmit == null) { + // Nothing to emit + return; + } + const { events, firstEventTimestamp, firstEnqueuedTimestamp } = nextEmit; + + const changeAggregator = new FileSystemChangeAggregator(); + + // Process a sequence of events. Note that preserving ordering is + // important here - a file may be both removed and added in the same + // batch. + // `changeAggregator` flattens this over time into the net change from + // this sequence. + for (const event of events) { + const { relativeFilePath, clock } = event; + if (event.type === 'delete') { + fileSystem.remove(relativeFilePath, changeAggregator); + } else { + fileSystem.addOrModify(relativeFilePath, event.metadata, changeAggregator); + } + this.#updateClock(clocks, clock); + } + + const changeSize = changeAggregator.getSize(); + + if (changeSize === 0) { + // We had events, but they've exactly cancelled each other out, reset + // so that timers are correct for the next change. + nextEmit = null; + return; + } + + this.#plugins.forEach(({ plugin, dataIdx }) => { + plugin.onChanged( + changeAggregator.getMappedView( + dataIdx != null ? (metadata) => metadata[dataIdx] : () => null + ) + ); + }); + + const toPublicMetadata = (metadata: Readonly): ChangedFileMetadata => ({ + isSymlink: metadata[H.SYMLINK] !== 0, + modifiedTime: metadata[H.MTIME] ?? null, + }); + + const changesWithMetadata = changeAggregator.getMappedView(toPublicMetadata); + + const hmrPerfLogger = this.#options.perfLoggerFactory?.('HMR', { + key: this.#getNextChangeID(), + }); + if (hmrPerfLogger != null) { + hmrPerfLogger.start({ timestamp: firstEventTimestamp }); + hmrPerfLogger.point('waitingForChangeInterval_start', { + timestamp: firstEnqueuedTimestamp, + }); + hmrPerfLogger.point('waitingForChangeInterval_end'); + hmrPerfLogger.annotate({ int: { changeSize } }); + hmrPerfLogger.point('fileChange_start'); + } + const changeEvent: ChangeEvent = { + changes: changesWithMetadata, + logger: hmrPerfLogger, + rootDir: this.#options.rootDir, + }; + this.emit('change', changeEvent); + nextEmit = null; + }; + + let changeQueue: Promise = Promise.resolve(); + + const onChange = (change: WatcherBackendChangeEvent) => { + // Recrawl events bypass normal filtering - they trigger a full subdirectory scan + if ( + change.event !== 'recrawl' && + change.metadata && + // Ignore all directory events + (change.metadata.type === 'd' || + // Ignore regular files with unwatched extensions + (change.metadata.type === 'f' && !hasWatchedExtension(change.relativePath)) || + // Don't emit events relating to symlinks if enableSymlinks: false + (!this.#options.enableSymlinks && change.metadata?.type === 'l')) + ) { + return; + } + + const absoluteFilePath = path.join( + change.root, + normalizePathSeparatorsToSystem(change.relativePath) + ); + + // Ignore files (including symlinks) whose path matches ignorePattern + // (we don't ignore node_modules in watch mode) + if (this.#options.ignorePattern.test(absoluteFilePath)) { + return; + } + + const relativeFilePath = this.#pathUtils.absoluteToNormal(absoluteFilePath); + + const onChangeStartTime = performance.timeOrigin + performance.now(); + + const enqueueEvent = (event: InternalEnqueuedEvent) => { + nextEmit ??= { + events: [], + firstEnqueuedTimestamp: performance.timeOrigin + performance.now(), + firstEventTimestamp: onChangeStartTime, + }; + nextEmit.events.push(event); + }; + + changeQueue = changeQueue + .then(async () => { + // If we get duplicate events for the same file, ignore them. + if ( + nextEmit != null && + nextEmit.events.find( + (event) => + event.type === change.event && + event.relativeFilePath === relativeFilePath && + ((!('metadata' in event) && !change.metadata) || + ('metadata' in event && + change.metadata && + event.metadata[H.MTIME] != null && + change.metadata.modifiedTime != null && + event.metadata[H.MTIME] === change.metadata.modifiedTime)) + ) + ) { + return null; + } + + // If the file was added or modified, + // parse it and update the file map. + if (change.event === 'touch') { + invariant( + change.metadata.size != null, + 'since the file exists or changed, it should have known size' + ); + const fileMetadata: FileMetadata = [ + change.metadata.modifiedTime ?? null, + change.metadata.size, + 0, + null, + change.metadata.type === 'l' ? 1 : 0, + null, + ]; + + try { + if (change.metadata.type === 'l') { + await this.#maybeReadLink(relativeFilePath, fileMetadata); + } else { + await this.#fileProcessor.processRegularFile(relativeFilePath, fileMetadata, { + computeSha1: this.#options.computeSha1, + maybeReturnContent: false, + }); + } + enqueueEvent({ + clock: change.clock, + relativeFilePath, + metadata: fileMetadata, + type: change.event, + }); + } catch (e: any) { + if (!['ENOENT', 'EACCESS'].includes(e.code)) { + throw e; + } + // Swallow ENOENT/ACCESS errors silently. Safe because either: + // - We never knew about the file, so neither did any consumers. + // Or, + // - The watcher will soon (or has already) report a "delete" + // event for it, and we'll clean up in the usual way at that + // point. + } + } else if (change.event === 'delete') { + enqueueEvent({ + clock: change.clock, + relativeFilePath, + type: 'delete', + }); + } else if (change.event === 'recrawl') { + // Recrawl event: flush pending changes and re-crawl the directory + emitChange(); + + // The relativePath is relative to the watcher root (change.root), + // but we need a path relative to rootDir for the recrawl. + const absoluteDirPath = path.join( + change.root, + normalizePathSeparatorsToSystem(change.relativePath) + ); + const subpath = this.#pathUtils.absoluteToNormal(absoluteDirPath); + + // Crawl the specific subdirectory + const watcher = this.#watcher; + invariant(watcher != null, 'Watcher must be initialized'); + const crawlResult = await watcher.recrawl(subpath, fileSystem); + + // Skip if no changes + if (crawlResult.changedFiles.size === 0 && crawlResult.removedFiles.size === 0) { + return null; + } + + // Reuse the same batch processing logic as build() + const recrawlChangeAggregator = await this.#applyFileDelta( + fileSystem, + this.#plugins, + crawlResult + ); + + // Update clock if provided + this.#updateClock(clocks, change.clock); + + // Skip emit if no changes after processing + if (recrawlChangeAggregator.getSize() === 0) { + return null; + } + + // Emit changes directly + const toPublicMetadata = (metadata: Readonly): ChangedFileMetadata => ({ + isSymlink: metadata[H.SYMLINK] !== 0, + modifiedTime: metadata[H.MTIME] ?? null, + }); + + const changesWithMetadata = recrawlChangeAggregator.getMappedView(toPublicMetadata); + + const changeEvent: ChangeEvent = { + changes: changesWithMetadata, + logger: null, + rootDir: this.#options.rootDir, + }; + this.emit('change', changeEvent); + } else { + throw new Error( + `metro-file-map: Unrecognized event type from watcher: ${(change as any).event}` + ); + } + return null; + }) + .catch((error: Error) => { + this.#console.error(`metro-file-map: watch error:\n ${error.stack}\n`); + }); + }; + + this.#changeInterval = setInterval(emitChange, CHANGE_INTERVAL); + + invariant(this.#watcher != null, 'Expected #watcher to have been initialised by build()'); + await this.#watcher.watch(onChange); + + if (this.#options.healthCheck.enabled) { + const performHealthCheck = () => { + if (!this.#watcher) { + return; + } + // eslint-disable-next-line @typescript-eslint/no-floating-promises + this.#watcher.checkHealth(this.#options.healthCheck.timeout).then((result) => { + this.emit('healthCheck', result); + }); + }; + performHealthCheck(); + this.#healthCheckInterval = setInterval( + performHealthCheck, + this.#options.healthCheck.interval + ); + } + this.#startupPerfLogger?.point('watch_end'); + } + + async end(): Promise { + if (this.#changeInterval) { + clearInterval(this.#changeInterval); + } + if (this.#healthCheckInterval) { + clearInterval(this.#healthCheckInterval); + } + + this.#crawlerAbortController.abort(); + + await Promise.all([ + this.#fileProcessor.end(), + this.#watcher?.close(), + this.#cacheManager.end(), + ]); + } + + async #shouldUseWatchman(): Promise { + if (!this.#options.useWatchman) { + return false; + } + if (!this.#canUseWatchmanPromise) { + this.#canUseWatchmanPromise = checkWatchmanCapabilities(WATCHMAN_REQUIRED_CAPABILITIES) + .then(({ version }) => { + this.#startupPerfLogger?.annotate({ + string: { + watchmanVersion: version, + }, + }); + return true; + }) + .catch((e: any) => { + // TODO: Advise people to either install Watchman or set + // `useWatchman: false` here? + this.#startupPerfLogger?.annotate({ + string: { + watchmanFailedCapabilityCheck: e?.message ?? '[missing]', + }, + }); + return false; + }); + } + return this.#canUseWatchmanPromise; + } + + #getNextChangeID(): number { + if (this.#changeID >= Number.MAX_SAFE_INTEGER) { + this.#changeID = 0; + } + return ++this.#changeID; + } + + #updateClock(clocks: WatchmanClocks, newClock: ChangeEventClock | undefined | null): void { + if (newClock == null) { + return; + } + const [absoluteWatchRoot, clockSpec] = newClock; + const relativeFsRoot = this.#pathUtils.absoluteToNormal(absoluteWatchRoot); + clocks.set(normalizePathSeparatorsToPosix(relativeFsRoot), clockSpec); + } + + static H: HType = H; +} + +// TODO: Replace with it.map() from Node 22+ +function mapIterable(it: Iterable, fn: (item: T) => S): Iterable { + return (function* mapped() { + for (const item of it) { + yield fn(item); + } + })(); +} diff --git a/packages/@expo/metro-file-map/src/lib/FileProcessor.ts b/packages/@expo/metro-file-map/src/lib/FileProcessor.ts new file mode 100644 index 00000000000000..b5cd3fa1705a50 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/FileProcessor.ts @@ -0,0 +1,270 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { Worker as JestWorker } from 'jest-worker'; +import { sep } from 'path'; + +import H from '../constants'; +import type { + FileMapPluginWorker, + FileMetadata, + PerfLogger, + WorkerMessage, + WorkerMetadata, + WorkerSetupArgs, +} from '../types'; +import { Worker } from '../worker'; +import { RootPathUtils } from './RootPathUtils'; + +const debug = require('debug')('Metro:FileMap'); + +interface ProcessFileRequest { + /** + * Populate metadata[H.SHA1] with the SHA1 of the file's contents. + */ + readonly computeSha1: boolean; + /** + * Only if processing has already required reading the file's contents, return + * the contents as a Buffer - null otherwise. Not supported for batches. + */ + readonly maybeReturnContent: boolean; +} + +interface AsyncWorker { + processFile(message: WorkerMessage): Promise; + end(): Promise; +} + +interface MaybeCodedError extends Error { + code?: string; +} + +const NODE_MODULES_SEP = 'node_modules' + sep; +const MAX_FILES_PER_WORKER = 100; + +export class FileProcessor { + #maxFilesPerWorker: number; + #maxWorkers: number; + #perfLogger: PerfLogger | undefined | null; + #pluginWorkers: readonly FileMapPluginWorker[]; + #inBandWorker: Worker; + #rootPathUtils: RootPathUtils; + + constructor( + opts: Readonly<{ + maxFilesPerWorker?: number | null; + maxWorkers: number; + pluginWorkers?: readonly FileMapPluginWorker[] | null; + perfLogger?: PerfLogger | null; + rootDir: string; + }> + ) { + this.#maxFilesPerWorker = opts.maxFilesPerWorker ?? MAX_FILES_PER_WORKER; + this.#maxWorkers = opts.maxWorkers; + this.#pluginWorkers = opts.pluginWorkers ?? []; + this.#inBandWorker = new Worker({ + plugins: this.#pluginWorkers.map((plugin) => plugin.worker), + }); + this.#perfLogger = opts.perfLogger; + this.#rootPathUtils = new RootPathUtils(opts.rootDir); + } + + async processBatch( + files: readonly [relativePath: string, FileMetadata][], + req: ProcessFileRequest + ): Promise<{ + errors: { + normalFilePath: string; + error: MaybeCodedError; + }[]; + }> { + const errors: { normalFilePath: string; error: MaybeCodedError }[] = []; + + const workerJobs = files + .map(([normalFilePath, fileMetadata]): [WorkerMessage, FileMetadata] | null => { + const maybeWorkerInput = this.#getWorkerInput(normalFilePath, fileMetadata, req); + if (!maybeWorkerInput) { + return null; + } + return [maybeWorkerInput, fileMetadata]; + }) + .filter((x) => x != null); + + const numWorkers = Math.min( + this.#maxWorkers, + Math.ceil(workerJobs.length / this.#maxFilesPerWorker) + ); + const batchWorker = this.#getBatchWorker(numWorkers); + + if (req.maybeReturnContent) { + throw new Error('Batch processing does not support returning file contents'); + } + + await Promise.all( + workerJobs.map(([workerInput, fileMetadata]) => { + return batchWorker + .processFile(workerInput) + .then((reply) => processWorkerReply(reply, workerInput.pluginsToRun, fileMetadata)) + .catch((error) => + errors.push({ + normalFilePath: this.#rootPathUtils.absoluteToNormal(workerInput.filePath), + error: normalizeWorkerError(error), + }) + ); + }) + ); + await batchWorker.end(); + return { errors }; + } + + processRegularFile( + normalPath: string, + fileMetadata: FileMetadata, + req: ProcessFileRequest + ): { content: Buffer | undefined | null } | null { + const workerInput = this.#getWorkerInput(normalPath, fileMetadata, req); + return workerInput + ? { + content: processWorkerReply( + this.#inBandWorker.processFile(workerInput), + workerInput.pluginsToRun, + fileMetadata + ), + } + : null; + } + + #getWorkerInput( + normalPath: string, + fileMetadata: FileMetadata, + req: ProcessFileRequest + ): WorkerMessage | null { + if (fileMetadata[H.SYMLINK] !== 0) { + // Only process regular files + return null; + } + + const computeSha1 = req.computeSha1 && fileMetadata[H.SHA1] == null; + const { maybeReturnContent } = req; + + const nodeModulesIdx = normalPath.indexOf(NODE_MODULES_SEP); + // Path may begin 'node_modules/' or contain '/node_modules/'. + const isNodeModules = + nodeModulesIdx === 0 || (nodeModulesIdx > 0 && normalPath[nodeModulesIdx - 1] === sep); + + // Indices of plugins with a passing filter + const pluginsToRun = + this.#pluginWorkers?.reduce((prev, plugin, idx) => { + if (plugin.filter({ isNodeModules, normalPath })) { + prev.push(idx); + } + return prev; + }, [] as number[]) ?? []; + + if (!computeSha1 && pluginsToRun.length === 0) { + // Nothing to process + return null; + } + + // Use a cheaper worker configuration for node_modules files, because + // they may never be Haste modules or packages. + // + // Note that we'd only expect node_modules files to reach this point if + // retainAllFiles is true, or they're touched during watch mode. + if (isNodeModules) { + if (computeSha1) { + return { + computeSha1: true, + filePath: this.#rootPathUtils.normalToAbsolute(normalPath), + maybeReturnContent, + pluginsToRun, + }; + } + return null; + } + + return { + computeSha1, + filePath: this.#rootPathUtils.normalToAbsolute(normalPath), + maybeReturnContent, + pluginsToRun, + }; + } + + /** + * Creates workers or parses files and extracts metadata in-process. + */ + #getBatchWorker(numWorkers: number): AsyncWorker { + if (numWorkers <= 1) { + // In-band worker with the same interface as a Jest worker farm + return { + processFile: async (message) => this.#inBandWorker.processFile(message), + end: async () => {}, + }; + } + const workerPath = require.resolve('../worker'); + debug('Creating worker farm of %d worker threads', numWorkers); + this.#perfLogger?.point('initWorkers_start'); + const jestWorker = new JestWorker(workerPath, { + exposedMethods: ['processFile'], + maxRetries: 3, + numWorkers, + enableWorkerThreads: true, + forkOptions: { + // Don't pass Node arguments down to workers. In particular, avoid + // unnecessarily registering Babel when we're running Metro from + // source (our worker is plain CommonJS). + execArgv: [], + }, + setupArgs: [ + { + plugins: this.#pluginWorkers.map((plugin) => plugin.worker), + } as WorkerSetupArgs, + ], + }) as JestWorker & AsyncWorker; + this.#perfLogger?.point('initWorkers_end'); + // Only log worker init once + this.#perfLogger = null; + return jestWorker; + } + + async end(): Promise {} +} + +function processWorkerReply( + metadata: WorkerMetadata, + pluginsRun: readonly number[], + fileMetadata: FileMetadata +): Buffer | undefined | null { + fileMetadata[H.VISITED] = 1; + const pluginData = metadata.pluginData; + if (pluginData) { + for (const [i, pluginIdx] of pluginsRun.entries()) { + fileMetadata[H.PLUGINDATA + pluginIdx] = pluginData[i]; + } + } + + if (metadata.sha1 != null) { + fileMetadata[H.SHA1] = metadata.sha1; + } + + return metadata.content; +} + +function normalizeWorkerError(mixedError: Error | string | null | undefined): MaybeCodedError { + if ( + mixedError == null || + typeof mixedError !== 'object' || + mixedError.message == null || + mixedError.stack == null + ) { + const error = new Error(mixedError as string); + error.stack = ''; // Remove stack for stack-less errors. + return error; + } + return mixedError; +} diff --git a/packages/@expo/metro-file-map/src/lib/FileSystemChangeAggregator.ts b/packages/@expo/metro-file-map/src/lib/FileSystemChangeAggregator.ts new file mode 100644 index 00000000000000..db6a8f62e3bd42 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/FileSystemChangeAggregator.ts @@ -0,0 +1,133 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { + CanonicalPath, + FileMetadata, + FileSystemListener, + ReadonlyFileSystemChanges, +} from '../types'; + +export class FileSystemChangeAggregator implements FileSystemListener { + // Mutually exclusive with removedDirectories + readonly #addedDirectories: Set = new Set(); + // Mutually exclusive with addedDirectories + readonly #removedDirectories: Set = new Set(); + + // Mutually exclusive with modified and removed files + readonly #addedFiles: Map = new Map(); + // Mutually exclusive with added and removed files + readonly #modifiedFiles: Map = new Map(); + // Mutually exclusive with added and modified files + readonly #removedFiles: Map = new Map(); + + // Removed files must be paired with the file's metadata the last time it was + // observable by consumers - ie, immediately *before* this batch. To report + // this accurately with minimal overhead, we'll note the current metadata of + // a file the first time it is modified or removed within a batch. If it is + // re-added, modified and removed again, we still have the initial metadata. + // This is particularly important if, say, a regular file is replaced by a + // symlink, or vice-versa. + readonly #initialMetadata: Map = new Map(); + + directoryAdded(canonicalPath: CanonicalPath): void { + // Only add to newDirectories if this directory wasn't previously removed + // (i.e., it's truly new). If it was removed and re-added, the net effect + // is no directory change. + if (!this.#removedDirectories.delete(canonicalPath)) { + this.#addedDirectories.add(canonicalPath); + } + } + + directoryRemoved(canonicalPath: CanonicalPath): void { + if (!this.#addedDirectories.delete(canonicalPath)) { + this.#removedDirectories.add(canonicalPath); + } + } + + fileAdded(canonicalPath: CanonicalPath, data: FileMetadata): void { + if (this.#removedFiles.delete(canonicalPath)) { + // File was removed then re-added in the same batch - treat as modification + this.#modifiedFiles.set(canonicalPath, data); + } else { + // New file + this.#addedFiles.set(canonicalPath, data); + } + } + + fileModified(canonicalPath: CanonicalPath, oldData: FileMetadata, newData: FileMetadata): void { + if (this.#addedFiles.has(canonicalPath)) { + // File did not exist before this batch. Further modification only + // updates metadata + this.#addedFiles.set(canonicalPath, newData); + } else { + if (!this.#initialMetadata.has(canonicalPath)) { + this.#initialMetadata.set(canonicalPath, oldData); + } + this.#modifiedFiles.set(canonicalPath, newData); + } + } + + fileRemoved(canonicalPath: CanonicalPath, data: FileMetadata): void { + // Check if this file was added in the same batch + if (!this.#addedFiles.delete(canonicalPath)) { + let initialData = this.#initialMetadata.get(canonicalPath); + if (!initialData) { + initialData = data; + this.#initialMetadata.set(canonicalPath, initialData); + } + + // File was not added in this batch, so add to removed with last metadata + this.#modifiedFiles.delete(canonicalPath); + this.#removedFiles.set(canonicalPath, initialData); + } + // else: File was added then removed in the same batch - no net change + } + + getSize(): number { + return ( + this.#addedDirectories.size + + this.#removedDirectories.size + + this.#addedFiles.size + + this.#modifiedFiles.size + + this.#removedFiles.size + ); + } + + getView(): ReadonlyFileSystemChanges { + return { + addedDirectories: this.#addedDirectories, + removedDirectories: this.#removedDirectories, + addedFiles: this.#addedFiles, + modifiedFiles: this.#modifiedFiles, + removedFiles: this.#removedFiles, + }; + } + + getMappedView(metadataMapFn: (metadata: FileMetadata) => T): ReadonlyFileSystemChanges { + return { + addedDirectories: this.#addedDirectories, + removedDirectories: this.#removedDirectories, + addedFiles: mapIterable(this.#addedFiles, metadataMapFn), + modifiedFiles: mapIterable(this.#modifiedFiles, metadataMapFn), + removedFiles: mapIterable(this.#removedFiles, metadataMapFn), + }; + } +} + +function mapIterable( + map: Map, + metadataMapFn: (metadata: FileMetadata) => T +): Iterable> { + return { + *[Symbol.iterator](): Iterator> { + for (const [path, metadata] of map) { + yield [path, metadataMapFn(metadata)]; + } + }, + }; +} diff --git a/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts b/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts new file mode 100644 index 00000000000000..6b86965a28be07 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts @@ -0,0 +1,301 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import invariant from 'invariant'; +import path from 'path'; + +/** + * This module provides path utility functions - similar to `node:path` - + * optimised for Metro's use case (many paths, few roots) under assumptions + * typically safe to make within Metro - namely: + * + * - All input path separators must be system-native. + * - Double/redundant separators like '/foo//bar' are not supported. + * - All characters except separators are assumed to be valid in path segments. + * + * - A "well-formed" path is any path following the rules above. + * - A "normal" path is a root-relative well-formed path with no redundant + * indirections. Normal paths have no leading './`, and the normal path of + * the root is the empty string. + * + * Output and input paths are at least well-formed (normal where indicated by + * naming). + * + * Trailing path separators are preserved, except for fs roots in + * normalToAbsolute (fs roots always have a trailing separator), and the + * project root in absoluteToNormal and relativeToNormal (the project root is + * always the empty string, and is always a directory, so a trailing separator + * is redundant). + * + * As of Node 20, absoluteToNormal is ~8x faster than `path.relative` and + * `normalToAbsolute` is ~20x faster than `path.resolve`, benchmarked on the + * real inputs from building FB's product graph. Some well-formed inputs + * (e.g., /project/./foo/../bar), are handled but not optimised, and we fall + * back to `node:path` equivalents in those cases. + */ + +const UP_FRAGMENT_SEP = '..' + path.sep; +const SEP_UP_FRAGMENT = path.sep + '..'; +const UP_FRAGMENT_SEP_LENGTH = UP_FRAGMENT_SEP.length; +const CURRENT_FRAGMENT = '.' + path.sep; + +export class RootPathUtils { + #rootDir: string; + #rootDirnames: readonly string[]; + #rootParts: readonly string[]; + #rootDepth: number; + + constructor(rootDir: string) { + this.#rootDir = rootDir; + const rootDirnames = []; + for ( + let next = rootDir, previous = null; + previous !== next; + previous = next, next = path.dirname(next) + ) { + rootDirnames.push(next); + } + this.#rootDirnames = rootDirnames; + + this.#rootDepth = rootDirnames.length - 1; + + const rootParts = rootDir.split(path.sep); + // If rootDir is a filesystem root (C:\ or /), it will end in a separator and + // give a spurious empty entry at the end of rootParts. + if (this.#rootDepth === 0) { + rootParts.pop(); + } + + this.#rootParts = rootParts; + } + + getBasenameOfNthAncestor(n: number): string { + return this.#rootParts[this.#rootParts.length - 1 - n]!; + } + + getParts(): readonly string[] { + return this.#rootParts; + } + + // absolutePath may be any well-formed absolute path. + absoluteToNormal(absolutePath: string): string { + let endOfMatchingPrefix = 0; + let lastMatchingPartIdx = 0; + + for ( + let nextPart: string | undefined = this.#rootParts[0], nextLength = nextPart!.length; + nextPart != null && + // Check that absolutePath is equal to nextPart + '/' or ends with + // nextPart, starting from endOfMatchingPrefix. + absolutePath.startsWith(nextPart, endOfMatchingPrefix) && + (absolutePath.length === endOfMatchingPrefix + nextLength || + absolutePath[endOfMatchingPrefix + nextLength] === path.sep); + + ) { + // Move our matching pointer forward and load the next part. + endOfMatchingPrefix += nextLength + 1; + nextPart = this.#rootParts[++lastMatchingPartIdx]; + nextLength = nextPart?.length ?? 0; + } + + // If our root is /project/root and we're given /project/bar/foo.js, we + // have matched up to '/project', and will need to return a path + // beginning '../' (one prepended indirection, to go up from 'root'). + // + // If we're given /project/../project2/otherroot, we have one level of + // indirection up to prepend in the same way as above. There's another + // explicit indirection already present in the input - we'll account for + // that in tryCollapseIndirectionsInSuffix. + const upIndirectionsToPrepend = this.#rootParts.length - lastMatchingPartIdx; + + return ( + this.#tryCollapseIndirectionsInSuffix( + absolutePath, + endOfMatchingPrefix, + upIndirectionsToPrepend + )?.collapsedPath ?? this.#slowAbsoluteToNormal(absolutePath) + ); + } + + #slowAbsoluteToNormal(absolutePath: string): string { + const endsWithSep = absolutePath.endsWith(path.sep); + const result = path.relative(this.#rootDir, absolutePath); + return endsWithSep && !result.endsWith(path.sep) ? result + path.sep : result; + } + + // `normalPath` is assumed to be normal (root-relative, no redundant + // indirection), per the definition above. + normalToAbsolute(normalPath: string): string { + let left = this.#rootDir; + let i = 0; + let pos = 0; + while ( + normalPath.startsWith(UP_FRAGMENT_SEP, pos) || + (normalPath.endsWith('..') && normalPath.length === 2 + pos) + ) { + left = this.#rootDirnames[i === this.#rootDepth ? this.#rootDepth : ++i]!; + pos += UP_FRAGMENT_SEP_LENGTH; + } + const right = pos === 0 ? normalPath : normalPath.slice(pos); + if (right.length === 0) { + return left; + } + // left may already end in a path separator only if it is a filesystem root, + // '/' or 'X:\'. + if (i === this.#rootDepth) { + return left + right; + } + return left + path.sep + right; + } + + relativeToNormal(relativePath: string): string { + return ( + this.#tryCollapseIndirectionsInSuffix(relativePath, 0, 0)?.collapsedPath ?? + path.relative(this.#rootDir, path.join(this.#rootDir, relativePath)) + ); + } + + // If a path is a direct ancestor of the project root (or the root itself), + // return a number with the degrees of separation, e.g. root=0, parent=1,.. + // or null otherwise. + getAncestorOfRootIdx(normalPath: string): number | null { + if (normalPath === '') { + return 0; + } + if (normalPath === '..') { + return 1; + } + // Otherwise a *normal* path is only a root ancestor if it is a sequence of + // '../' segments followed by '..', so the length tells us the number of + // up fragments. + if (normalPath.endsWith(SEP_UP_FRAGMENT)) { + return (normalPath.length + 1) / 3; + } + return null; + } + + // Takes a normal and relative path, and joins them efficiently into a normal + // path, including collapsing trailing '..' in the first part with leading + // project root segments in the relative part. + joinNormalToRelative( + normalPath: string, + relativePath: string + ): { normalPath: string; collapsedSegments: number } { + if (normalPath === '') { + return { collapsedSegments: 0, normalPath: relativePath }; + } + if (relativePath === '') { + return { collapsedSegments: 0, normalPath }; + } + const left = normalPath + path.sep; + const rawPath = left + relativePath; + if (normalPath === '..' || normalPath.endsWith(SEP_UP_FRAGMENT)) { + const collapsed = this.#tryCollapseIndirectionsInSuffix(rawPath, 0, 0); + invariant(collapsed != null, 'Failed to collapse'); + return { + collapsedSegments: collapsed.collapsedSegments, + normalPath: collapsed.collapsedPath, + }; + } + return { + collapsedSegments: 0, + normalPath: rawPath, + }; + } + + relative(from: string, to: string): string { + return path.relative(from, to); + } + + // Internal: Tries to collapse sequences like `../root/foo` for root + // `/project/root` down to the normal 'foo'. + #tryCollapseIndirectionsInSuffix( + fullPath: string, // A string ending with the relative path to process + startOfRelativePart: number, // Index of the start of part to process + implicitUpIndirections: number // 0=root-relative, 1=dirname(root)-relative... + ): { collapsedPath: string; collapsedSegments: number } | null { + let totalUpIndirections = implicitUpIndirections; + let collapsedSegments = 0; + // Allow any sequence of indirection fragments at the start of the + // unmatched suffix e.g /project/[../../foo], but bail out to Node's + // path.relative if we find a possible indirection after any later segment, + // or on any "./" that isn't a "../". + for (let pos = startOfRelativePart; ; pos += UP_FRAGMENT_SEP_LENGTH) { + const nextIndirection = fullPath.indexOf(CURRENT_FRAGMENT, pos); + if (nextIndirection === -1) { + // If we have any indirections, they may "collapse" if a subsequent + // segment re-enters a directory we had previously exited, e.g: + // /project/root/../root/foo should collapse to /project/root/foo' and + // return foo, not ../root/foo. + // + // We match each segment following redirections, in turn, against the + // part of the root path they may collapse into, and break on the first + // mismatch. + while (totalUpIndirections > 0) { + const segmentToMaybeCollapse = + this.#rootParts[this.#rootParts.length - totalUpIndirections]!; + if ( + fullPath.startsWith(segmentToMaybeCollapse, pos) && + // The following character should be either a separator or end of + // string + (fullPath.length === segmentToMaybeCollapse.length + pos || + fullPath[segmentToMaybeCollapse.length + pos] === path.sep) + ) { + pos += segmentToMaybeCollapse.length + 1; + collapsedSegments++; + totalUpIndirections--; + } else { + break; + } + } + // After collapsing we may have no more segments remaining (following + // '..' indirections). Ensure that we don't drop or add a trailing + // separator in this case by taking .slice(pos-1). In any other case, + // we know that fullPath[pos] is a separator. + if (pos >= fullPath.length) { + return { + collapsedPath: + totalUpIndirections > 0 + ? UP_FRAGMENT_SEP.repeat(totalUpIndirections - 1) + '..' + fullPath.slice(pos - 1) + : '', + collapsedSegments, + }; + } + const right = pos > 0 ? fullPath.slice(pos) : fullPath; + if (right === '..' && totalUpIndirections >= this.#rootParts.length - 1) { + // If we have no right side (or an indirection that would take us + // below the root), just ensure we don't include a trailing separtor. + return { + collapsedPath: UP_FRAGMENT_SEP.repeat(totalUpIndirections).slice(0, -1), + collapsedSegments, + }; + } + // Optimisation for the common case, saves a concatenation. + if (totalUpIndirections === 0) { + return { collapsedPath: right, collapsedSegments }; + } + return { + collapsedPath: UP_FRAGMENT_SEP.repeat(totalUpIndirections) + right, + collapsedSegments, + }; + } + + // Cap the number of indirections at the total number of root segments. + // File systems treat '..' at the root as '.'. + if (totalUpIndirections < this.#rootParts.length - 1) { + totalUpIndirections++; + } + + if ( + nextIndirection !== pos + 1 || // Fallback when ./ later in the path, or leading + fullPath[pos] !== '.' // and for anything other than a leading ../ + ) { + return null; + } + } + } +} diff --git a/packages/@expo/metro-file-map/src/lib/TreeFS.ts b/packages/@expo/metro-file-map/src/lib/TreeFS.ts new file mode 100644 index 00000000000000..2b2ee1855d5e73 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/TreeFS.ts @@ -0,0 +1,1214 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import invariant from 'invariant'; +import path from 'path'; + +import H from '../constants'; +import type { + CacheData, + FileData, + FileMetadata, + FileStats, + FileSystemListener, + LookupResult, + MutableFileSystem, + Path, + ProcessFileFunction, +} from '../types'; +import { RootPathUtils } from './RootPathUtils'; + +type DirectoryNode = Map; +type FileNode = FileMetadata; +type MixedNode = FileNode | DirectoryNode; + +function isDirectory(node: MixedNode | null | undefined): node is DirectoryNode { + return node instanceof Map; +} + +function isRegularFile(node: FileNode): boolean { + return node[H.SYMLINK] === 0; +} + +interface NormalizedSymlinkTarget { + ancestorOfRootIdx: number | null; + normalPath: string; + startOfBasenameIdx: number; +} + +interface DeserializedSnapshotInput { + rootDir: string; + fileSystemData: DirectoryNode; + processFile: ProcessFileFunction; +} + +interface TreeFSOptions { + rootDir: Path; + files?: FileData; + processFile: ProcessFileFunction; +} + +interface MatchFilesOptions { + /* Filter relative paths against a pattern. */ + readonly filter?: RegExp | null; + /* `filter` is applied against absolute paths, vs rootDir-relative. (default: false) */ + readonly filterCompareAbsolute?: boolean; + /* `filter` is applied against posix-delimited paths, even on Windows. (default: false) */ + readonly filterComparePosix?: boolean; + /* Follow symlinks when enumerating paths. (default: false) */ + readonly follow?: boolean; + /* Should search for files recursively. (default: true) */ + readonly recursive?: boolean; + /* Match files under a given root, or null for all files */ + readonly rootDir?: Path | null; +} + +interface MetadataIteratorOptions { + readonly includeSymlinks: boolean; + readonly includeNodeModules: boolean; +} + +/** + * OVERVIEW: + * + * TreeFS is Metro's in-memory representation of the file system. It is + * structured as a tree of non-empty maps and leaves (tuples), with the root + * node representing the given `rootDir`, typically Metro's _project root_ + * (not a filesystem root). Map keys are path segments, and branches outside + * the project root are accessed via `'..'`. + * + * EXAMPLE: + * + * For a root dir '/data/project', the file '/data/other/app/index.js' would + * have metadata at #rootNode.get('..').get('other').get('app').get('index.js') + * + * SERIALISATION: + * + * #rootNode is designed to be directly serialisable and directly portable (for + * a given project) between different root directories and operating systems. + * + * SYMLINKS: + * + * Symlinks are represented as nodes whose metadata contains their literal + * target. Literal targets are resolved to normal paths at runtime, and cached. + * If a symlink is encountered during traversal, we restart traversal at the + * root node targeting join(normal symlink target, remaining path suffix). + * + * NODE TYPES: + * + * - A directory (including a parent directory at '..') is represented by a + * `Map` of basenames to any other node type. + * - A file is represented by an `Array` (tuple) of metadata, of which: + * - A regular file has node[H.SYMLINK] === 0 + * - A symlink has node[H.SYMLINK] === 1 or + * typeof node[H.SYMLINK] === 'string', where a string is the literal + * content of the symlink (i.e. from readlink), if known. + * + * TERMINOLOGY: + * + * - mixedPath + * A root-relative or absolute path + * - relativePath + * A root-relative path + * - normalPath + * A root-relative, normalised path (no extraneous '.' or '..'), may have a + * single trailing slash + * - canonicalPath + * A root-relative, normalised, real path (no symlinks in dirname), never has + * a trailing slash + */ +export default class TreeFS implements MutableFileSystem { + readonly #cachedNormalSymlinkTargets: WeakMap = new WeakMap(); + readonly #pathUtils: RootPathUtils; + readonly #processFile: ProcessFileFunction; + readonly #rootDir: Path; + #rootNode: DirectoryNode = new Map(); + + constructor(opts: TreeFSOptions) { + const { rootDir, files, processFile } = opts; + this.#rootDir = rootDir; + this.#pathUtils = new RootPathUtils(rootDir); + this.#processFile = processFile; + if (files != null) { + this.bulkAddOrModify(files); + } + } + + getSerializableSnapshot(): CacheData['fileSystemData'] { + return this.#cloneTree(this.#rootNode); + } + + static fromDeserializedSnapshot(args: DeserializedSnapshotInput): TreeFS { + const { rootDir, fileSystemData, processFile } = args; + const tfs = new TreeFS({ processFile, rootDir }); + tfs.#rootNode = fileSystemData; + return tfs; + } + + getSize(mixedPath: Path): number | null { + const fileMetadata = this.#getFileData(mixedPath); + return (fileMetadata && fileMetadata[H.SIZE]) ?? null; + } + + getDifference( + files: FileData, + options?: { + /** + * Only consider files under this normal subdirectory when computing + * removedFiles. If not provided, all files in the file system are + * considered. + */ + readonly subpath?: string; + } + ): { + changedFiles: FileData; + removedFiles: Set; + } { + const changedFiles: FileData = new Map(files); + const removedFiles: Set = new Set(); + const subpath = options?.subpath; + + // If a subpath is specified, start iteration from that node + let rootNode: DirectoryNode = this.#rootNode; + let prefix: string = ''; + if (subpath != null && subpath !== '') { + const lookupResult = this.#lookupByNormalPath(subpath, { + followLeaf: true, + }); + if (!lookupResult.exists || !isDirectory(lookupResult.node)) { + // Directory doesn't exist, nothing to compare - all files are new + return { changedFiles, removedFiles }; + } + rootNode = lookupResult.node; + prefix = lookupResult.canonicalPath; + } + + for (const { canonicalPath, metadata } of this.#metadataIterator( + rootNode, + { + includeNodeModules: true, + includeSymlinks: true, + }, + prefix + )) { + const newMetadata = files.get(canonicalPath); + if (newMetadata) { + if (isRegularFile(newMetadata) !== isRegularFile(metadata)) { + // Types differ, file has changed + continue; + } + if ( + newMetadata[H.MTIME] != null && + // TODO: Remove when mtime is null if not populated + newMetadata[H.MTIME] !== 0 && + newMetadata[H.MTIME] === metadata[H.MTIME] + ) { + // Types and modified time match - not changed. + changedFiles.delete(canonicalPath); + } else if ( + newMetadata[H.SHA1] != null && + newMetadata[H.SHA1] === metadata[H.SHA1] && + metadata[H.VISITED] === 1 + ) { + // Content matches - update modified time but don't revisit + const updatedMetadata = [...metadata] as FileMetadata; + updatedMetadata[H.MTIME] = newMetadata[H.MTIME]; + changedFiles.set(canonicalPath, updatedMetadata); + } + } else { + removedFiles.add(canonicalPath); + } + } + return { + changedFiles, + removedFiles, + }; + } + + getSha1(mixedPath: Path): string | null { + const fileMetadata = this.#getFileData(mixedPath); + return (fileMetadata && fileMetadata[H.SHA1]) ?? null; + } + + async getOrComputeSha1( + mixedPath: Path + ): Promise<{ sha1: string; content?: Buffer } | null | undefined> { + const normalPath = this.#normalizePath(mixedPath); + const result = this.#lookupByNormalPath(normalPath, { + followLeaf: true, + }); + if (!result.exists || isDirectory(result.node)) { + return null; + } + const { canonicalPath, node: fileMetadata } = result; + + // Empty strings + const existing = fileMetadata[H.SHA1]; + if (existing != null && existing.length > 0) { + return { sha1: existing }; + } + + // Mutate the metadata we first retrieved. This may be orphaned or about + // to be overwritten if the file changes while we are processing it - + // by only mutating the original metadata, we don't risk caching a stale + // SHA-1 after a change event. + const maybeContent = await this.#processFile(canonicalPath, fileMetadata, { + computeSha1: true, + }); + const sha1 = fileMetadata[H.SHA1]; + invariant( + sha1 != null && sha1.length > 0, + "File processing didn't populate a SHA-1 hash for %s", + canonicalPath + ); + + return maybeContent + ? { + content: maybeContent, + sha1, + } + : { sha1 }; + } + + exists(mixedPath: Path): boolean { + const result = this.#getFileData(mixedPath); + return result != null; + } + + lookup(mixedPath: Path): LookupResult { + const normalPath = this.#normalizePath(mixedPath); + const links = new Set(); + const result = this.#lookupByNormalPath(normalPath, { + collectLinkPaths: links, + followLeaf: true, + }); + if (!result.exists) { + const { canonicalMissingPath } = result; + return { + exists: false, + links, + missing: this.#pathUtils.normalToAbsolute(canonicalMissingPath), + }; + } + const { canonicalPath, node } = result; + const realPath = this.#pathUtils.normalToAbsolute(canonicalPath); + if (isDirectory(node)) { + return { exists: true, links, realPath, type: 'd' }; + } + invariant( + isRegularFile(node), + 'lookup follows symlinks, so should never return one (%s -> %s)', + mixedPath, + canonicalPath + ); + return { exists: true, links, realPath, type: 'f', metadata: node }; + } + + getAllFiles(): Path[] { + return Array.from( + this.metadataIterator({ + includeNodeModules: true, + includeSymlinks: false, + }), + ({ canonicalPath }) => this.#pathUtils.normalToAbsolute(canonicalPath) + ); + } + + linkStats(mixedPath: Path): FileStats | null { + const fileMetadata = this.#getFileData(mixedPath, { followLeaf: false }); + if (fileMetadata == null) { + return null; + } + const fileType = isRegularFile(fileMetadata) ? 'f' : 'l'; + return { + fileType, + modifiedTime: fileMetadata[H.MTIME], + size: fileMetadata[H.SIZE], + }; + } + + /** + * Given a search context, return a list of file paths matching the query. + * The query matches against normalized paths which start with `./`, + * for example: `a/b.js` -> `./a/b.js` + */ + *matchFiles(opts: MatchFilesOptions): Generator { + const { + filter = null, + filterCompareAbsolute = false, + filterComparePosix = false, + follow = false, + recursive = true, + rootDir = null, + } = opts; + const normalRoot = rootDir == null ? '' : this.#normalizePath(rootDir); + const contextRootResult = this.#lookupByNormalPath(normalRoot); + if (!contextRootResult.exists) { + return; + } + const { + ancestorOfRootIdx, + canonicalPath: rootRealPath, + node: contextRoot, + parentNode: contextRootParent, + } = contextRootResult; + if (!isDirectory(contextRoot)) { + return; + } + const contextRootAbsolutePath = + rootRealPath === '' ? this.#rootDir : path.join(this.#rootDir, rootRealPath); + + const prefix = filterComparePosix ? './' : '.' + path.sep; + + const contextRootAbsolutePathForComparison = + filterComparePosix && path.sep !== '/' + ? contextRootAbsolutePath.replaceAll(path.sep, '/') + : contextRootAbsolutePath; + + for (const relativePathForComparison of this.#pathIterator( + contextRoot, + contextRootParent, + ancestorOfRootIdx, + { + alwaysYieldPosix: filterComparePosix, + canonicalPathOfRoot: rootRealPath, + follow, + recursive, + subtreeOnly: rootDir != null, + } + )) { + if ( + filter == null || + filter.test( + // NOTE(EvanBacon): Ensure files start with `./` for matching purposes + // this ensures packages work across Metro and Webpack (ex: Storybook for React DOM / React Native). + // `a/b.js` -> `./a/b.js` + filterCompareAbsolute === true + ? path.join(contextRootAbsolutePathForComparison, relativePathForComparison) + : prefix + relativePathForComparison + ) + ) { + const relativePath = + filterComparePosix === true && path.sep !== '/' + ? relativePathForComparison.replaceAll('/', path.sep) + : relativePathForComparison; + + yield path.join(contextRootAbsolutePath, relativePath); + } + } + } + + addOrModify(mixedPath: Path, metadata: FileMetadata, changeListener?: FileSystemListener): void { + const normalPath = this.#normalizePath(mixedPath); + // Walk the tree to find the *real* path of the parent node, creating + // directories as we need. + const parentDirNode = this.#lookupByNormalPath(path.dirname(normalPath), { + changeListener, + makeDirectories: true, + }); + if (!parentDirNode.exists) { + throw new Error(`TreeFS: Failed to make parent directory entry for ${mixedPath}`); + } + // Normalize the resulting path to account for the parent node being root. + const canonicalPath = this.#normalizePath( + parentDirNode.canonicalPath + path.sep + path.basename(normalPath) + ); + this.bulkAddOrModify(new Map([[canonicalPath, metadata]]), changeListener); + } + + bulkAddOrModify(addedOrModifiedFiles: FileData, changeListener?: FileSystemListener): void { + // Optimisation: Bulk FileData are typically clustered by directory, so we + // optimise for that case by remembering the last directory we looked up. + // Experiments with large result sets show this to be significantly (~30%) + // faster than caching all lookups in a Map, and 70% faster than no cache. + let lastDir: string | undefined; + let directoryNode: DirectoryNode | undefined; + + for (const [normalPath, metadata] of addedOrModifiedFiles) { + const lastSepIdx = normalPath.lastIndexOf(path.sep); + const dirname = lastSepIdx === -1 ? '' : normalPath.slice(0, lastSepIdx); + const basename = lastSepIdx === -1 ? normalPath : normalPath.slice(lastSepIdx + 1); + + if (directoryNode == null || dirname !== lastDir) { + const lookup = this.#lookupByNormalPath(dirname, { + changeListener, + followLeaf: false, + makeDirectories: true, + }); + if (!lookup.exists) { + // This should only be possible if the input is non-real and + // lookup hits a broken symlink. + throw new Error( + `TreeFS: Unexpected error adding ${normalPath}.\nMissing: ` + + lookup.canonicalMissingPath + ); + } + if (!isDirectory(lookup.node)) { + throw new Error( + `TreeFS: Could not add directory ${dirname}, adding ${normalPath}. ` + + `${dirname} already exists in the file map as a file.` + ); + } + lastDir = dirname; + directoryNode = lookup.node; + } + if (changeListener != null) { + const existingNode = directoryNode.get(basename); + if (existingNode != null) { + invariant( + !isDirectory(existingNode), + 'Detected addition or modification of file %s, but it is tracked as a non-empty directory', + normalPath + ); + // File already exists - this is a modification + changeListener.fileModified(normalPath, existingNode, metadata); + } else { + // New file + changeListener.fileAdded(normalPath, metadata); + } + } + directoryNode.set(basename, metadata); + } + } + + remove(mixedPath: Path, changeListener?: FileSystemListener): void { + const normalPath = this.#normalizePath(mixedPath); + const result = this.#lookupByNormalPath(normalPath, { followLeaf: false }); + if (!result.exists) { + return; + } + const { parentNode, canonicalPath, node } = result; + + if (isDirectory(node) && node.size > 0) { + for (const basename of node.keys()) { + this.remove(canonicalPath + path.sep + basename, changeListener); + } + // Removing the last file will delete this directory + return; + } + if (parentNode != null) { + if (changeListener != null) { + if (isDirectory(node)) { + changeListener.directoryRemoved(canonicalPath); + } else { + changeListener.fileRemoved(canonicalPath, node); + } + } + parentNode.delete(path.basename(canonicalPath)); + if (parentNode.size === 0 && parentNode !== this.#rootNode) { + // NB: This isn't the most efficient algorithm - in the case of + // removing the last file in a deep hierarchy it's O(depth^2), but + // that's not expected to be a case common enough to justify + // implementation complexity, or slowing down more common uses of + // _lookupByNormalPath. + this.remove(path.dirname(canonicalPath), changeListener); + } + } + } + + /** + * The core traversal algorithm of TreeFS - takes a normal path and traverses + * through a tree of maps keyed on path segments, returning the node, + * canonical path, and other metadata if successful, or the first missing + * segment otherwise. + * + * When a symlink is encountered, we set a new target of the symlink's + * normalised target path plus the remainder of the original target path. In + * this way, the eventual target path in a successful lookup has all symlinks + * resolved, and gives us the real path "for free". Similarly if a traversal + * fails, we automatically have the real path of the first non-existent node. + * + * Note that this code is extremely hot during resolution, being the most + * expensive part of a file existence check. Benchmark any modifications! + */ + #lookupByNormalPath( + requestedNormalPath: string, + opts: { + collectAncestors?: { + ancestorOfRootIdx: number | null | undefined; + node: DirectoryNode; + normalPath: string; + segmentName: string; + }[]; + /** + * Mutable Set into which absolute real paths of traversed symlinks will + * be added. Omit for performance if not needed. + */ + collectLinkPaths?: Set | null | undefined; + + /** + * Low-level callbacks called on mutations of TreeFS data. + * Omit for performance if not needed. + */ + changeListener?: FileSystemListener; + + /** + * Like lstat vs stat, whether to follow a symlink at the basename of + * the given path, or return the details of the symlink itself. + */ + followLeaf?: boolean; + /** + * Whether to (recursively) create missing directory nodes during + * traversal, useful when adding files. Will throw if an expected + * directory is already present as a file. + */ + makeDirectories?: boolean; + startPathIdx?: number; + startNode?: DirectoryNode; + start?: { + ancestorOfRootIdx: number | null | undefined; + node: DirectoryNode; + pathIdx: number; + }; + } = { followLeaf: true, makeDirectories: false } + ): + | { + ancestorOfRootIdx: number | null | undefined; + canonicalPath: string; + exists: true; + node: MixedNode; + parentNode: DirectoryNode; + } + | { + ancestorOfRootIdx: number | null | undefined; + canonicalPath: string; + exists: true; + node: DirectoryNode; + parentNode: null; + } + | { + canonicalMissingPath: string; + missingSegmentName: string; + exists: false; + } { + // We'll update the target if we hit a symlink. + let targetNormalPath = requestedNormalPath; + // Lazy-initialised set of seen target paths, to detect symlink cycles. + let seen: Set | undefined; + // Pointer to the first character of the current path segment in + // targetNormalPath. + let fromIdx = opts.start?.pathIdx ?? 0; + // The parent of the current segment. + let parentNode = opts.start?.node ?? this.#rootNode; + // If a returned node is (an ancestor of) the root, this is the number of + // levels below the root, i.e. '' is 0, '..' is 1, '../..' is 2, otherwise + // null. + let ancestorOfRootIdx: number | null | undefined = opts.start?.ancestorOfRootIdx ?? 0; + + const { collectAncestors, changeListener } = opts; + + // Used only when collecting ancestors, to avoid double-counting nodes and + // paths when traversing a symlink takes us back to rootNode and out again. + // This tracks the first character of the first segment not already + // collected. + let unseenPathFromIdx = 0; + + while (targetNormalPath.length > fromIdx) { + const nextSepIdx = targetNormalPath.indexOf(path.sep, fromIdx); + const isLastSegment = nextSepIdx === -1; + const segmentName = isLastSegment + ? targetNormalPath.slice(fromIdx) + : targetNormalPath.slice(fromIdx, nextSepIdx); + const isUnseen = fromIdx >= unseenPathFromIdx; + fromIdx = !isLastSegment ? nextSepIdx + 1 : targetNormalPath.length; + + if (segmentName === '.') { + continue; + } + + let segmentNode = parentNode.get(segmentName); + + // In normal paths all indirections are at the prefix, so we are at the + // nth ancestor of the root iff the path so far is n '..' segments. + if (segmentName === '..' && ancestorOfRootIdx != null) { + ancestorOfRootIdx++; + } else if (segmentNode != null) { + ancestorOfRootIdx = null; + } + + if (segmentNode == null) { + if (opts.makeDirectories !== true && segmentName !== '..') { + return { + canonicalMissingPath: isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1), + exists: false, + missingSegmentName: segmentName, + }; + } + segmentNode = new Map(); + if (opts.makeDirectories === true) { + if (changeListener != null) { + const canonicalPath = isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1); + changeListener.directoryAdded(canonicalPath); + } + parentNode.set(segmentName, segmentNode); + } + } + + // We are done if... + if ( + // ...at a directory node and the only subsequent character is `/`, or + (nextSepIdx === targetNormalPath.length - 1 && isDirectory(segmentNode)) || + // there are no subsequent `/`, and this node is anything but a symlink + // we're required to resolve due to followLeaf. + (isLastSegment && + (isDirectory(segmentNode) || isRegularFile(segmentNode) || opts.followLeaf === false)) + ) { + return { + ancestorOfRootIdx, + canonicalPath: isLastSegment ? targetNormalPath : targetNormalPath.slice(0, -1), // remove trailing `/` + exists: true, + node: segmentNode, + parentNode, + }; + } + + // If the next node is a directory, go into it + if (isDirectory(segmentNode)) { + parentNode = segmentNode; + if (collectAncestors && isUnseen) { + const currentPath = isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1); + collectAncestors.push({ + ancestorOfRootIdx, + node: segmentNode, + normalPath: currentPath, + segmentName, + }); + } + } else { + const currentPath = isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1); + + if (isRegularFile(segmentNode)) { + // Regular file in a directory path + return { + canonicalMissingPath: currentPath, + exists: false, + missingSegmentName: segmentName, + }; + } + + // Symlink in a directory path + const normalSymlinkTarget = this.#resolveSymlinkTargetToNormalPath( + segmentNode, + currentPath + ); + if (opts.collectLinkPaths) { + opts.collectLinkPaths.add(this.#pathUtils.normalToAbsolute(currentPath)); + } + + const remainingTargetPath = isLastSegment ? '' : targetNormalPath.slice(fromIdx); + + // Append any subsequent path segments to the symlink target, and reset + // with our new target. + const joinedResult = this.#pathUtils.joinNormalToRelative( + normalSymlinkTarget.normalPath, + remainingTargetPath + ); + + targetNormalPath = joinedResult.normalPath; + + // Two special cases (covered by unit tests): + // + // If the symlink target is the root, the root should be a counted as + // an ancestor. We'd otherwise miss counting it because we normally + // push new ancestors only when entering a directory. + // + // If the symlink target is an ancestor of the root *and* joining it + // with the remaining path results in collapsing segments, e.g: + // '../..' + 'parentofroot/root/foo.js' = 'foo.js', then we must add + // parentofroot and root as ancestors. + if ( + collectAncestors && + !isLastSegment && + // No-op optimisation to bail out the common case of nothing to do. + (normalSymlinkTarget.ancestorOfRootIdx === 0 || joinedResult.collapsedSegments > 0) + ) { + let node: MixedNode = this.#rootNode; + let collapsedPath = ''; + const reverseAncestors = []; + for (let i = 0; i <= joinedResult.collapsedSegments && isDirectory(node); i++) { + if ( + // Add the root only if the target is the root or we have + // collapsed segments. + i > 0 || + normalSymlinkTarget.ancestorOfRootIdx === 0 || + joinedResult.collapsedSegments > 0 + ) { + reverseAncestors.push({ + ancestorOfRootIdx: i, + node, + normalPath: collapsedPath, + segmentName: this.#pathUtils.getBasenameOfNthAncestor(i), + }); + } + node = node.get('..') ?? new Map(); + collapsedPath = collapsedPath === '' ? '..' : collapsedPath + path.sep + '..'; + } + collectAncestors.push(...reverseAncestors.reverse()); + } + + // For the purpose of collecting ancestors: Ignore the traversal to + // the symlink target, and start collecting ancestors only + // from the target itself (ie, the basename of the normal target path) + // onwards. + unseenPathFromIdx = normalSymlinkTarget.startOfBasenameIdx; + + if (seen == null) { + // Optimisation: set this lazily only when we've encountered a symlink + seen = new Set([requestedNormalPath]); + } + if (seen.has(targetNormalPath)) { + // TODO: Warn `Symlink cycle detected: ${[...seen, node].join(' -> ')}` + return { + canonicalMissingPath: targetNormalPath, + exists: false, + missingSegmentName: segmentName, + }; + } + seen.add(targetNormalPath); + fromIdx = 0; + parentNode = this.#rootNode; + ancestorOfRootIdx = 0; + } + } + invariant(parentNode === this.#rootNode, 'Unexpectedly escaped traversal'); + return { + ancestorOfRootIdx: 0, + canonicalPath: targetNormalPath, + exists: true, + node: this.#rootNode, + parentNode: null, + }; + } + + /** + * Given a start path (which need not exist), a subpath and type, and + * optionally a 'breakOnSegment', performs the following: + * + * X = mixedStartPath + * do + * if basename(X) === opts.breakOnSegment + * return null + * if X + subpath exists and has type opts.subpathType + * return { + * absolutePath: realpath(X + subpath) + * containerRelativePath: relative(mixedStartPath, X) + * } + * X = dirname(X) + * while X !== dirname(X) + * + * If opts.invalidatedBy is given, collects all absolute, real paths that if + * added or removed may invalidate this result. + * + * Useful for finding the closest package scope (subpath: package.json, + * type f, breakOnSegment: node_modules) or closest potential package root + * (subpath: node_modules/pkg, type: d) in Node.js resolution. + */ + hierarchicalLookup( + mixedStartPath: string, + subpath: string, + opts: { + breakOnSegment: string | null | undefined; + invalidatedBy: Set | null | undefined; + subpathType: 'f' | 'd'; + } + ): + | { + absolutePath: string; + containerRelativePath: string; + } + | null + | undefined { + const ancestorsOfInput: { + ancestorOfRootIdx: number | null | undefined; + node: DirectoryNode; + normalPath: string; + segmentName: string; + }[] = []; + const normalPath = this.#normalizePath(mixedStartPath); + const invalidatedBy = opts.invalidatedBy; + const closestLookup = this.#lookupByNormalPath(normalPath, { + collectAncestors: ancestorsOfInput, + collectLinkPaths: invalidatedBy, + }); + + if (closestLookup.exists && isDirectory(closestLookup.node)) { + const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath( + closestLookup.canonicalPath, + subpath, + opts.subpathType, + invalidatedBy, + null + ); + if (maybeAbsolutePathMatch != null) { + return { + absolutePath: maybeAbsolutePathMatch, + containerRelativePath: '', + }; + } + } else { + if (invalidatedBy && (!closestLookup.exists || !isDirectory(closestLookup.node))) { + invalidatedBy.add( + this.#pathUtils.normalToAbsolute( + closestLookup.exists ? closestLookup.canonicalPath : closestLookup.canonicalMissingPath + ) + ); + } + if ( + opts.breakOnSegment != null && + !closestLookup.exists && + closestLookup.missingSegmentName === opts.breakOnSegment + ) { + return null; + } + } + + // Let the "common root" be the nearest common ancestor of this.rootDir + // and the input path. We'll look for a match in two stages: + // 1. Every collected ancestor of the input path, from nearest to furthest, + // that is a descendent of the common root + // 2. The common root, and its ancestors. + let commonRoot = this.#rootNode; + let commonRootDepth = 0; + + // Collected ancestors do not include the lookup result itself, so go one + // further if the input path is itself a root ancestor. + if (closestLookup.exists && closestLookup.ancestorOfRootIdx != null) { + commonRootDepth = closestLookup.ancestorOfRootIdx; + invariant(isDirectory(closestLookup.node), 'ancestors of the root must be directories'); + commonRoot = closestLookup.node; + } else { + // Establish the common root by counting the '..' segments at the start + // of the collected ancestors. + for (const ancestor of ancestorsOfInput) { + if (ancestor.ancestorOfRootIdx == null) { + break; + } + commonRootDepth = ancestor.ancestorOfRootIdx; + commonRoot = ancestor.node; + } + } + + // Phase 1: Consider descendenants of the common root, from deepest to + // shallowest. + for ( + let candidateIdx = ancestorsOfInput.length - 1; + candidateIdx >= commonRootDepth; + --candidateIdx + ) { + const candidate = ancestorsOfInput[candidateIdx]!; + if (candidate.segmentName === opts.breakOnSegment) { + return null; + } + const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath( + candidate.normalPath, + subpath, + opts.subpathType, + invalidatedBy, + { + ancestorOfRootIdx: candidate.ancestorOfRootIdx, + node: candidate.node, + pathIdx: candidate.normalPath.length > 0 ? candidate.normalPath.length + 1 : 0, + } + ); + if (maybeAbsolutePathMatch != null) { + // Determine the input path relative to the current candidate. Note + // that the candidate path will always be canonical (real), whereas the + // input may contain symlinks, so the candidate is not necessarily a + // prefix of the input. Use the fact that each remaining candidate + // corresponds to a leading segment of the input normal path, and + // discard the first candidateIdx + 1 segments of the input path. + // + // The next 5 lines are equivalent to (but faster than) + // normalPath.split('/').slice(candidateIdx + 1).join('/'). + let prefixLength = commonRootDepth * 3; // Leading '../' + for (let i = commonRootDepth; i <= candidateIdx; i++) { + prefixLength = normalPath.indexOf(path.sep, prefixLength + 1); + } + const containerRelativePath = normalPath.slice(prefixLength + 1); + return { + absolutePath: maybeAbsolutePathMatch, + containerRelativePath, + }; + } + } + + // Phase 2: Consider the common root and its ancestors + + // This will be '', '..', '../..', etc. + let candidateNormalPath = + commonRootDepth > 0 ? normalPath.slice(0, 3 * commonRootDepth - 1) : ''; + const remainingNormalPath = normalPath.slice(commonRootDepth * 3); + + let nextNode: MixedNode | null | undefined = commonRoot; + let depthBelowCommonRoot = 0; + + while (isDirectory(nextNode)) { + const maybeAbsolutePathMatch = this.#checkCandidateHasSubpath( + candidateNormalPath, + subpath, + opts.subpathType, + invalidatedBy, + null + ); + if (maybeAbsolutePathMatch != null) { + const rootDirParts = this.#pathUtils.getParts(); + const relativeParts = + depthBelowCommonRoot > 0 + ? rootDirParts.slice( + -(depthBelowCommonRoot + commonRootDepth), + commonRootDepth > 0 ? -commonRootDepth : undefined + ) + : []; + if (remainingNormalPath !== '') { + (relativeParts as string[]).push(remainingNormalPath); + } + return { + absolutePath: maybeAbsolutePathMatch, + containerRelativePath: relativeParts.join(path.sep), + }; + } + depthBelowCommonRoot++; + candidateNormalPath = + candidateNormalPath === '' ? '..' : candidateNormalPath + path.sep + '..'; + nextNode = nextNode.get('..'); + } + return null; + } + + #checkCandidateHasSubpath( + normalCandidatePath: string, + subpath: string, + subpathType: 'f' | 'd', + invalidatedBy: Set | null | undefined, + start: + | { + ancestorOfRootIdx: number | null | undefined; + node: DirectoryNode; + pathIdx: number; + } + | null + | undefined + ): string | null { + const lookupResult = this.#lookupByNormalPath( + this.#pathUtils.joinNormalToRelative(normalCandidatePath, subpath).normalPath, + { + collectLinkPaths: invalidatedBy, + } + ); + if ( + lookupResult.exists && + // Should be a Map iff subpathType is directory + isDirectory(lookupResult.node) === (subpathType === 'd') + ) { + return this.#pathUtils.normalToAbsolute(lookupResult.canonicalPath); + } else if (invalidatedBy) { + invalidatedBy.add( + this.#pathUtils.normalToAbsolute( + lookupResult.exists ? lookupResult.canonicalPath : lookupResult.canonicalMissingPath + ) + ); + } + return null; + } + + *metadataIterator(opts: MetadataIteratorOptions): Generator<{ + baseName: string; + canonicalPath: string; + metadata: FileMetadata; + }> { + yield* this.#metadataIterator(this.#rootNode, opts); + } + + *#metadataIterator( + rootNode: DirectoryNode, + opts: Readonly<{ includeSymlinks: boolean; includeNodeModules: boolean }>, + prefix: string = '' + ): Generator<{ + baseName: string; + canonicalPath: string; + metadata: FileMetadata; + }> { + for (const [name, node] of rootNode) { + if (!opts.includeNodeModules && isDirectory(node) && name === 'node_modules') { + continue; + } + const prefixedName = prefix === '' ? name : prefix + path.sep + name; + if (isDirectory(node)) { + yield* this.#metadataIterator(node, opts, prefixedName); + } else if (isRegularFile(node) || opts.includeSymlinks) { + yield { baseName: name, canonicalPath: prefixedName, metadata: node }; + } + } + } + + #normalizePath(relativeOrAbsolutePath: Path): string { + return path.isAbsolute(relativeOrAbsolutePath) + ? this.#pathUtils.absoluteToNormal(relativeOrAbsolutePath) + : this.#pathUtils.relativeToNormal(relativeOrAbsolutePath); + } + + *#directoryNodeIterator( + node: DirectoryNode, + parent: DirectoryNode | null | undefined, + ancestorOfRootIdx: number | null | undefined + ): Generator<[string, MixedNode]> { + if (ancestorOfRootIdx != null && ancestorOfRootIdx > 0 && parent) { + yield [this.#pathUtils.getBasenameOfNthAncestor(ancestorOfRootIdx - 1), parent]; + } + yield* node.entries(); + } + + /** + * Enumerate paths under a given node, including symlinks and through + * symlinks (if `follow` is enabled). + */ + *#pathIterator( + iterationRootNode: DirectoryNode, + iterationRootParentNode: DirectoryNode | null | undefined, + ancestorOfRootIdx: number | null | undefined, + opts: Readonly<{ + alwaysYieldPosix: boolean; + canonicalPathOfRoot: string; + follow: boolean; + recursive: boolean; + subtreeOnly: boolean; + }>, + pathPrefix: string = '', + followedLinks: ReadonlySet = new Set() + ): Iterable { + const pathSep = opts.alwaysYieldPosix ? '/' : path.sep; + const prefixWithSep = pathPrefix === '' ? pathPrefix : pathPrefix + pathSep; + for (const [name, node] of this.#directoryNodeIterator( + iterationRootNode, + iterationRootParentNode, + ancestorOfRootIdx + )) { + if (opts.subtreeOnly && name === '..') { + continue; + } + + const nodePath = prefixWithSep + name; + if (!isDirectory(node)) { + if (isRegularFile(node)) { + // regular file + yield nodePath; + } else { + // symlink + const nodePathWithSystemSeparators = + pathSep === path.sep ? nodePath : nodePath.replaceAll(pathSep, path.sep); + + // Although both paths are normal, the node path may begin '..' so we + // can't simply concatenate. + const normalPathOfSymlink = path.join( + opts.canonicalPathOfRoot, + nodePathWithSystemSeparators + ); + + // We can't resolve the symlink directly here because we only have + // its normal path, and we need a canonical path for resolution + // (imagine our normal path contains a symlink 'bar' -> '.', and we + // are at /foo/bar/baz where baz -> '..' - that should resolve to + // /foo, not /foo/bar). We *can* use _lookupByNormalPath to walk to + // the canonical symlink, and then to its target. + const resolved = this.#lookupByNormalPath(normalPathOfSymlink, { + followLeaf: true, + }); + if (!resolved.exists) { + // Symlink goes nowhere, nothing to report. + continue; + } + const target = resolved.node; + if (!isDirectory(target)) { + // Symlink points to a file, just yield the path of the symlink. + yield nodePath; + } else if (opts.recursive && opts.follow && !followedLinks.has(node)) { + // Symlink points to a directory - iterate over its contents using + // the path where we found the symlink as a prefix. + yield* this.#pathIterator( + target, + resolved.parentNode, + resolved.ancestorOfRootIdx, + opts, + nodePath, + new Set([...followedLinks, node]) + ); + } + } + } else if (opts.recursive) { + yield* this.#pathIterator( + node, + iterationRootParentNode, + ancestorOfRootIdx != null && ancestorOfRootIdx > 0 ? ancestorOfRootIdx - 1 : null, + opts, + nodePath, + followedLinks + ); + } + } + } + + #resolveSymlinkTargetToNormalPath( + symlinkNode: FileMetadata, + canonicalPathOfSymlink: Path + ): NormalizedSymlinkTarget { + const cachedResult = this.#cachedNormalSymlinkTargets.get(symlinkNode); + if (cachedResult != null) { + return cachedResult; + } + + const literalSymlinkTarget = symlinkNode[H.SYMLINK]; + invariant(typeof literalSymlinkTarget === 'string', 'Expected symlink target to be populated.'); + const absoluteSymlinkTarget = path.resolve( + this.#rootDir, + canonicalPathOfSymlink, + '..', // Symlink target is relative to its containing directory. + literalSymlinkTarget // May be absolute, in which case the above are ignored + ); + const normalSymlinkTarget = path.relative(this.#rootDir, absoluteSymlinkTarget); + const result = { + ancestorOfRootIdx: this.#pathUtils.getAncestorOfRootIdx(normalSymlinkTarget), + normalPath: normalSymlinkTarget, + startOfBasenameIdx: normalSymlinkTarget.lastIndexOf(path.sep) + 1, + }; + this.#cachedNormalSymlinkTargets.set(symlinkNode, result); + return result; + } + + #getFileData( + filePath: Path, + opts: { followLeaf: boolean } = { followLeaf: true } + ): FileMetadata | null { + const normalPath = this.#normalizePath(filePath); + const result = this.#lookupByNormalPath(normalPath, { + followLeaf: opts.followLeaf, + }); + if (!result.exists || isDirectory(result.node)) { + return null; + } + return result.node; + } + + #cloneTree(root: DirectoryNode): DirectoryNode { + const clone: DirectoryNode = new Map(); + for (const [name, node] of root) { + if (isDirectory(node)) { + clone.set(name, this.#cloneTree(node)); + } else { + clone.set(name, [...node]); + } + } + return clone; + } +} diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/FileProcessor.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/FileProcessor.test.ts new file mode 100644 index 00000000000000..02e5405bb2247f --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/__tests__/FileProcessor.test.ts @@ -0,0 +1,343 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +import H from '../../constants'; +import type { FileMapPluginWorker, FileMetadata, WorkerMessage, WorkerMetadata } from '../../types'; + +const MockJestWorker = jest.fn().mockImplementation(() => ({ + processFile: async () => ({}), + end: async () => {}, +})); +const mockWorkerFn = jest.fn().mockReturnValue({}); + +// Convenience function to write paths with posix separators but convert them +// to system separators +const p = (filePath: string): string => + process.platform === 'win32' ? filePath.replace(/\//g, '\\').replace(/^\\/, 'C:\\') : filePath; + +const defaultOptions = { + maxWorkers: 5, + perfLogger: null, + pluginWorkers: [] as readonly FileMapPluginWorker[], + rootDir: process.platform === 'win32' ? 'C:\\root' : '/root', +}; + +describe('processBatch', () => { + let FileProcessor: typeof import('../FileProcessor').FileProcessor; + + beforeEach(() => { + jest.resetModules(); + jest.clearAllMocks(); + jest.mock('jest-worker', () => ({ + Worker: MockJestWorker, + })); + jest.mock('../../worker', () => ({ + setup: () => {}, + processFile: mockWorkerFn, + Worker: class { + processFile: (msg: WorkerMessage) => WorkerMetadata = mockWorkerFn; + }, + })); + FileProcessor = require('../FileProcessor').FileProcessor; + }); + + test('never creates more than maxWorkers', async () => { + const processor = new FileProcessor({ + ...defaultOptions, + maxWorkers: 5, + maxFilesPerWorker: 1, + }); + + await processor.processBatch(getNMockFiles(100), { + computeSha1: true, + maybeReturnContent: false, + }); + + expect(MockJestWorker).toHaveBeenCalledWith( + expect.stringContaining('worker'), + expect.objectContaining({ + numWorkers: 5, + }) + ); + }); + + test('processes in band if workload <= maxFilesPerWorker', async () => { + const processor = new FileProcessor({ + ...defaultOptions, + maxWorkers: 5, + maxFilesPerWorker: 50, + }); + + await processor.processBatch(getNMockFiles(50), { + computeSha1: true, + maybeReturnContent: false, + }); + + expect(MockJestWorker).not.toHaveBeenCalled(); + expect(mockWorkerFn).toHaveBeenCalledTimes(50); + }); + + test('calculates number of workers based on actual jobs after filtering no-ops', async () => { + const processor = new FileProcessor({ + ...defaultOptions, + maxWorkers: 10, + maxFilesPerWorker: 10, + }); + + // Create 100 files, but some already have SHA1 hashes (no-op jobs) + const filesWithSomeAlreadyHashed: [string, FileMetadata][] = new Array(100) + .fill(null) + .map((_, i) => { + const metadata: FileMetadata = + i < 50 + ? // First 50 files already have SHA1 hashes + [123, 234, 0, 'existing-sha1-hash', 0] + : // Last 50 files need SHA1 computation + [123, 234, 0, null, 0]; + return [`file${i}.js`, metadata]; + }); + + await processor.processBatch(filesWithSomeAlreadyHashed, { + computeSha1: true, + maybeReturnContent: false, + }); + + // Should create workers based on 50 actual jobs, not 100 total files + // 50 jobs / 10 maxFilesPerWorker = 5 workers + expect(MockJestWorker).toHaveBeenCalledWith( + expect.stringContaining('worker'), + expect.objectContaining({ + numWorkers: 5, + }) + ); + }); + + test('plugin filters are called with correct arguments', async () => { + const mockFilter = jest.fn().mockReturnValue(true); + + const processor = new FileProcessor({ + ...defaultOptions, + pluginWorkers: [ + { + worker: { + modulePath: 'mock-plugin-1', + setupArgs: {}, + }, + filter: mockFilter, + }, + ], + }); + + await processor.processBatch( + [ + [p('src/Component.js'), [123, 234, 0, null, 0, null]], + [p('node_modules/lib/index.js'), [123, 234, 0, null, 0, null]], + [p('packages/node_modules/foo.js'), [123, 234, 0, null, 0, null]], + ], + { + computeSha1: true, + maybeReturnContent: false, + } + ); + + // Filter should be called for regular file with isNodeModules = false + expect(mockFilter).toHaveBeenCalledWith({ + normalPath: p('src/Component.js'), + isNodeModules: false, + }); + + // Filter should be called for node_modules files with isNodeModules = true + expect(mockFilter).toHaveBeenCalledWith({ + normalPath: p('node_modules/lib/index.js'), + isNodeModules: true, + }); + expect(mockFilter).toHaveBeenCalledWith({ + normalPath: p('packages/node_modules/foo.js'), + isNodeModules: true, + }); + }); + + test('pluginsToRun is correctly passed to workers based on filter results', async () => { + const mockFilter1 = jest.fn().mockReturnValue(true); + const mockFilter2 = jest.fn().mockReturnValue(false); + const mockFilter3 = jest.fn().mockReturnValue(true); + + const processor = new FileProcessor({ + ...defaultOptions, + pluginWorkers: [ + { + worker: { modulePath: 'mock-plugin-1', setupArgs: {} }, + filter: mockFilter1, + }, + { + worker: { modulePath: 'mock-plugin-2', setupArgs: {} }, + filter: mockFilter2, + }, + { + worker: { modulePath: 'mock-plugin-3', setupArgs: {} }, + filter: mockFilter3, + }, + ], + }); + + await processor.processBatch([[p('src/Component.js'), [123, 234, 0, null, 0, null]]], { + computeSha1: true, + maybeReturnContent: false, + }); + + // Worker should be called with pluginsToRun containing indices 0 and 2 + expect(mockWorkerFn).toHaveBeenCalledWith( + expect.objectContaining({ + pluginsToRun: [0, 2], + }) + ); + }); + + test('worker reply plugin data is mapped to correct fileMetadata indices', async () => { + const mockFilter1 = jest.fn().mockReturnValue(true); + const mockFilter2 = jest.fn().mockReturnValue(false); + const mockFilter3 = jest.fn().mockReturnValue(true); + + mockWorkerFn.mockReturnValue({ + dependencies: null, + sha1: 'abc123', + pluginData: ['plugin0-data', 'plugin2-data'], + }); + + const processor = new FileProcessor({ + ...defaultOptions, + pluginWorkers: [ + { + worker: { modulePath: 'mock-plugin-1', setupArgs: {} }, + filter: mockFilter1, + }, + { + worker: { modulePath: 'mock-plugin-2', setupArgs: {} }, + filter: mockFilter2, + }, + { + worker: { modulePath: 'mock-plugin-3', setupArgs: {} }, + filter: mockFilter3, + }, + ], + }); + + const fileMetadata: FileMetadata = [123, 234, 0, null, 0, null]; + + await processor.processBatch([[p('src/Component.js'), fileMetadata]], { + computeSha1: true, + maybeReturnContent: false, + }); + + // Plugin 0 data at H.PLUGINDATA + 0 + expect(fileMetadata[H.PLUGINDATA + 0]).toBe('plugin0-data'); + // Plugin 2 data at H.PLUGINDATA + 2 (not at +1, because it's plugin index 2) + expect(fileMetadata[H.PLUGINDATA + 2]).toBe('plugin2-data'); + // Plugin 1 should not have data (filter returned false) + expect(fileMetadata[H.PLUGINDATA + 1]).toBeUndefined(); + + expect(fileMetadata[H.SHA1]).toBe('abc123'); + expect(fileMetadata[H.VISITED]).toBe(1); + }); + + test('file is skipped if no plugins match and no other work needed', async () => { + const mockFilter = jest.fn().mockReturnValue(false); + + const processor = new FileProcessor({ + ...defaultOptions, + pluginWorkers: [ + { + worker: { modulePath: 'mock-plugin', setupArgs: {} }, + filter: mockFilter, + }, + ], + }); + + const fileMetadata: FileMetadata = [123, 234, 0, 'existing-sha1', 0, null]; + + await processor.processBatch([[p('src/Component.js'), fileMetadata]], { + computeSha1: false, + maybeReturnContent: false, + }); + + expect(mockWorkerFn).not.toHaveBeenCalled(); + }); + + test('file is processed if at least one plugin matches', async () => { + const mockFilter = jest.fn().mockReturnValue(true); + + const processor = new FileProcessor({ + ...defaultOptions, + pluginWorkers: [ + { + worker: { modulePath: 'mock-plugin', setupArgs: {} }, + filter: mockFilter, + }, + ], + }); + + const fileMetadata: FileMetadata = [123, 234, 0, 'existing-sha1', 0, null]; + + await processor.processBatch([[p('src/Component.js'), fileMetadata]], { + computeSha1: false, + maybeReturnContent: false, + }); + + expect(mockWorkerFn).toHaveBeenCalledWith( + expect.objectContaining({ + pluginsToRun: [0], + }) + ); + }); +}); + +describe('processRegularFile', () => { + let FileProcessor: typeof import('../FileProcessor').FileProcessor; + const mockReadFileSync = jest.fn(); + + beforeEach(() => { + jest.resetModules(); + jest.clearAllMocks(); + jest.unmock('../../worker'); + jest.mock('graceful-fs', () => ({ + readFileSync: mockReadFileSync, + })); + FileProcessor = require('../FileProcessor').FileProcessor; + }); + + test('synchronously populates metadata', () => { + const processor = new FileProcessor(defaultOptions); + const [normalFilePath, metadata] = getNMockFiles(1)[0]!; + expect(metadata[H.SHA1]).toBeFalsy(); + + const fileContent = Buffer.from('hello world'); + mockReadFileSync.mockReturnValue(fileContent); + + const result = processor.processRegularFile(normalFilePath, metadata, { + computeSha1: true, + maybeReturnContent: true, + }); + + expect(mockReadFileSync).toHaveBeenCalledWith( + path.resolve(defaultOptions.rootDir, normalFilePath) + ); + + expect(result).toEqual({ + content: fileContent, + }); + + expect(metadata[H.SHA1]).toMatch(/^[a-f0-9]{40}$/); + }); +}); + +function getNMockFiles(numFiles: number): [string, FileMetadata][] { + return new Array(numFiles) + .fill(null) + .map((_, i) => [`file${i}.js`, [123, 234, 0, null, 0, null] as FileMetadata]); +} diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/FileSystemChangeAggregator.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/FileSystemChangeAggregator.test.ts new file mode 100644 index 00000000000000..ed0aced20ee782 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/__tests__/FileSystemChangeAggregator.test.ts @@ -0,0 +1,89 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { FileMetadata } from '../../types'; +import { FileSystemChangeAggregator } from '../FileSystemChangeAggregator'; + +let aggregator: FileSystemChangeAggregator; + +beforeEach(() => { + aggregator = new FileSystemChangeAggregator(); +}); + +const FOO = 'foo.js'; + +test('removing, adding, modifying and removing a file records initial data', () => { + aggregator.fileRemoved(FOO, makeData(0)); + aggregator.fileAdded(FOO, makeData(1)); + aggregator.fileModified(FOO, makeData(1), makeData(2)); + aggregator.fileRemoved(FOO, makeData(2)); + const changes = getData(aggregator); + expect(changes.removedFiles.size).toBe(1); + expect(changes.removedFiles.get(FOO)).toEqual(makeData(0)); +}); + +test('modifying then removing a file records initial data', () => { + aggregator.fileModified(FOO, makeData(0), makeData(1)); + aggregator.fileRemoved(FOO, makeData(1)); + const changes = getData(aggregator); + expect(changes.removedFiles.size).toBe(1); + expect(changes.modifiedFiles.size).toBe(0); + expect(changes.removedFiles.get(FOO)).toEqual(makeData(0)); +}); + +test('adding, modifying then removing a file records empty changes', () => { + aggregator.fileAdded(FOO, makeData(0)); + aggregator.fileModified(FOO, makeData(0), makeData(1)); + aggregator.fileRemoved(FOO, makeData(1)); + const changes = getData(aggregator); + expect(changes.addedFiles.size).toBe(0); + expect(changes.modifiedFiles.size).toBe(0); + expect(changes.removedFiles.size).toBe(0); +}); + +afterEach(() => { + // assert mutual exclusivity + const changes = aggregator.getView(); + const addedDirectories = new Set(changes.addedDirectories); + const removedDirectories = new Set(changes.removedDirectories); + const addedFilePaths = new Set(Array.from(changes.addedFiles, ([path]) => path)); + const modifiedFilePaths = new Set(Array.from(changes.modifiedFiles, ([path]) => path)); + const removedFilePaths = new Set(Array.from(changes.removedFiles, ([path]) => path)); + for (const dir of addedDirectories) { + expect(removedDirectories.has(dir)).toBe(false); + } + for (const dir of removedDirectories) { + expect(addedDirectories.has(dir)).toBe(false); + } + for (const filePath of addedFilePaths) { + expect(modifiedFilePaths.has(filePath)).toBe(false); + expect(removedFilePaths.has(filePath)).toBe(false); + } + for (const filePath of modifiedFilePaths) { + expect(addedFilePaths.has(filePath)).toBe(false); + expect(removedFilePaths.has(filePath)).toBe(false); + } + for (const filePath of removedFilePaths) { + expect(addedFilePaths.has(filePath)).toBe(false); + expect(modifiedFilePaths.has(filePath)).toBe(false); + } +}); + +function makeData(mtime: number = 0): FileMetadata { + return [mtime, 1, 0, null, 0]; +} + +function getData(agg: FileSystemChangeAggregator) { + const view = agg.getView(); + return { + addedDirectories: new Set(view.addedDirectories), + removedDirectories: new Set(view.removedDirectories), + addedFiles: new Map(Array.from(view.addedFiles, ([k, v]) => [k, v])), + modifiedFiles: new Map(Array.from(view.modifiedFiles, ([k, v]) => [k, v])), + removedFiles: new Map(Array.from(view.removedFiles, ([k, v]) => [k, v])), + }; +} diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts new file mode 100644 index 00000000000000..e4f5349f758270 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts @@ -0,0 +1,143 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { RootPathUtils as RootPathUtilsT } from '../RootPathUtils'; + +let mockPathModule: typeof import('path'); +jest.mock('path', () => mockPathModule); + +describe.each([['win32'], ['posix']] as const)('RootPathUtils on %s', (platform) => { + // Convenience function to write paths with posix separators but convert them + // to system separators + const p = (filePath: string): string => + platform === 'win32' ? filePath.replace(/\//g, '\\').replace(/^\\/, 'C:\\') : filePath; + + let RootPathUtils: typeof RootPathUtilsT; + let pathUtils: RootPathUtilsT; + let pathRelative: jest.SpyInstance; + let sep: string; + + beforeEach(() => { + jest.resetModules(); + mockPathModule = jest.requireActual('path')[platform]; + sep = mockPathModule.sep; + pathRelative = jest.spyOn(mockPathModule, 'relative'); + RootPathUtils = require('../RootPathUtils').RootPathUtils; + }); + + test.each([ + p('/project/root/baz/foobar'), + p('/project/root/../root2/foobar'), + p('/project/root/../../project2/foo'), + p('/project/root/../../project/foo'), + p('/project/root/../../project/foo/'), + p('/project/root/../../project/root'), + p('/project/root/../../project/root/'), + p('/project/root/../../project/root/foo.js'), + p('/project/bar'), + p('/project/bar/'), + p('/project/../outside/bar'), + p('/project/baz/foobar'), + p('/project/rootfoo/baz'), + p('/project'), + p('/project/'), + p('/'), + p('/outside'), + p('/outside/'), + ])(`absoluteToNormal('%s') is correct and optimised`, (absolutePath) => { + const rootDir = p('/project/root'); + pathUtils = new RootPathUtils(rootDir); + let expected = mockPathModule.relative(rootDir, absolutePath); + // Unlike path.relative, we expect to preserve trailing separators. + if (absolutePath.endsWith(sep) && expected !== '') { + expected += sep; + } + pathRelative.mockClear(); + expect(pathUtils.absoluteToNormal(absolutePath)).toEqual(expected); + expect(pathRelative).not.toHaveBeenCalled(); + }); + + describe.each([p('/project/root'), p('/')] as const)('root: %s', (rootDir) => { + beforeEach(() => { + pathRelative.mockClear(); + pathUtils = new RootPathUtils(rootDir); + }); + + test.each([ + p('/project/root/../root2/../root3/foo'), + p('/project/root/./baz/foo/bar'), + p('/project/root/a./../foo'), + p('/project/root/../a./foo'), + p('/project/root/.././foo'), + p('/project/root/.././foo/'), + ])(`absoluteToNormal('%s') falls back to path.relative`, (absolutePath) => { + let expected = mockPathModule.relative(rootDir, absolutePath); + // Unlike path.relative, we expect to preserve trailing separators. + if (absolutePath.endsWith(sep) && !expected.endsWith(sep)) { + expected += sep; + } + pathRelative.mockClear(); + expect(pathUtils.absoluteToNormal(absolutePath)).toEqual(expected); + expect(pathRelative).toHaveBeenCalled(); + }); + + test.each([ + p('..'), + p('../..'), + p('../../'), + p('normal/path'), + p('normal/path/'), + p('../normal/path'), + p('../normal/path/'), + p('../../normal/path'), + p('../../../normal/path'), + ])(`normalToAbsolute('%s') matches path.resolve`, (normalPath) => { + let expected = mockPathModule.resolve(rootDir, normalPath); + // Unlike path.resolve, we expect to preserve trailing separators. + if (normalPath.endsWith(sep) && !expected.endsWith(sep)) { + expected += sep; + } + expect(pathUtils.normalToAbsolute(normalPath)).toEqual(expected); + }); + + test.each([ + p('..'), + p('../root'), + p('../root/path'), + p('../project'), + p('../project/'), + p('../../project/root'), + p('../../project/root/'), + p('../../../normal/path'), + p('../../../normal/path/'), + p('../../..'), + ])(`relativeToNormal('%s') matches path.resolve + path.relative`, (relativePath) => { + let expected = mockPathModule.relative( + rootDir, + mockPathModule.resolve(rootDir, relativePath) + ); + // Unlike native path.resolve + path.relative, we expect to preserve + // trailing separators. (Consistent with path.normalize.) + if (relativePath.endsWith(sep) && !expected.endsWith(sep) && expected !== '') { + expected += sep; + } + expect(pathUtils.relativeToNormal(relativePath)).toEqual(expected); + }); + }); + + test.each([ + ['foo', null], + ['', 0], + ['..', 1], + [p('../..'), 2], + [p('../../..'), 3], + [p('../../../foo'), null], + [p('../../../..foo'), null], + ] as const)('getAncestorOfRootIdx (%s => %s)', (input, expected) => { + expect(pathUtils.getAncestorOfRootIdx(input)).toEqual(expected); + }); +}); diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts new file mode 100644 index 00000000000000..851cdef7d9b21a --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts @@ -0,0 +1,1077 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import H from '../../constants'; +import type { CanonicalPath, FileData, FileMetadata, FileSystemListener } from '../../types'; +import type TreeFSType from '../TreeFS'; + +let mockPathModule: typeof import('path'); +jest.mock('path', () => mockPathModule); + +describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { + // Convenience function to write paths with posix separators but convert them + // to system separators + const p = (filePath: string): string => + platform === 'win32' ? filePath.replace(/\//g, '\\').replace(/^\\/, 'C:\\') : filePath; + + let tfs: TreeFSType; + let TreeFS: typeof TreeFSType; + + beforeEach(() => { + jest.resetModules(); + mockPathModule = jest.requireActual('path')[platform]; + TreeFS = require('../TreeFS').default; + tfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('foo/another.js'), [123, 2, 0, null, 0, 'another']], + [p('foo/owndir'), [0, 0, 0, null, '.', null]], + [p('foo/link-to-bar.js'), [0, 0, 0, null, p('../bar.js'), null]], + [p('foo/link-to-another.js'), [0, 0, 0, null, p('another.js'), null]], + [p('../outside/external.js'), [0, 0, 0, null, 0, null]], + [p('bar.js'), [234, 3, 0, null, 0, 'bar']], + [p('link-to-foo'), [456, 0, 0, null, p('./../project/foo'), null]], + [p('abs-link-out'), [456, 0, 0, null, p('/outside/./baz/..'), null]], + [p('root'), [0, 0, 0, null, '..', null]], + [p('link-to-nowhere'), [123, 0, 0, null, p('./nowhere'), null]], + [p('link-to-self'), [123, 0, 0, null, p('./link-to-self'), null]], + [p('link-cycle-1'), [123, 0, 0, null, p('./link-cycle-2'), null]], + [p('link-cycle-2'), [123, 0, 0, null, p('./link-cycle-1'), null]], + [p('node_modules/pkg/a.js'), [123, 0, 0, null, 0, 'a']], + [p('node_modules/pkg/package.json'), [123, 0, 0, null, 0, 'pkg']], + ]), + processFile: () => { + throw new Error('Not implemented'); + }, + }); + }); + + test('all files iterator returns all regular files by real path', () => { + expect(tfs.getAllFiles().sort()).toEqual([ + p('/outside/external.js'), + p('/project/bar.js'), + p('/project/foo/another.js'), + p('/project/node_modules/pkg/a.js'), + p('/project/node_modules/pkg/package.json'), + ]); + }); + + test.each([ + p('/outside/external.js'), + p('/project/bar.js'), + p('/project/foo/another.js'), + p('/project/foo/link-to-another.js'), + p('/project/link-to-foo/another.js'), + p('/project/link-to-foo/link-to-another.js'), + p('/project/root/outside/external.js'), + ])('existence check passes for regular files via symlinks: %s', (filePath) => { + expect(tfs.exists(filePath)).toBe(true); + }); + + test('existence check fails for directories, symlinks to directories, or symlinks to nowhere', () => { + expect(tfs.exists(p('/project/foo'))).toBe(false); + expect(tfs.exists(p('/project/link-to-foo'))).toBe(false); + expect(tfs.exists(p('/project/link-to-nowhere'))).toBe(false); + }); + + test('implements linkStats()', () => { + expect(tfs.linkStats(p('/project/link-to-foo/another.js'))).toEqual({ + fileType: 'f', + modifiedTime: 123, + size: 2, + }); + expect(tfs.linkStats(p('bar.js'))).toEqual({ + fileType: 'f', + modifiedTime: 234, + size: 3, + }); + expect(tfs.linkStats(p('./link-to-foo'))).toEqual({ + fileType: 'l', + modifiedTime: 456, + size: 0, + }); + }); + + describe('lookup', () => { + test.each([ + [ + p('/project/foo/link-to-another.js'), + p('/project/foo/another.js'), + [p('/project/foo/link-to-another.js')], + ], + [p('/project/foo/link-to-bar.js'), p('/project/bar.js'), [p('/project/foo/link-to-bar.js')]], + [ + p('link-to-foo/link-to-another.js'), + p('/project/foo/another.js'), + [p('/project/link-to-foo'), p('/project/foo/link-to-another.js')], + ], + [p('/project/root/outside/external.js'), p('/outside/external.js'), [p('/project/root')]], + [p('/outside/../project/bar.js'), p('/project/bar.js'), []], + [p('root/project/bar.js'), p('/project/bar.js'), [p('/project/root')]], + ])('%s -> %s through expected symlinks', (givenPath, expectedRealPath, expectedSymlinks) => + expect(tfs.lookup(givenPath)).toEqual({ + exists: true, + links: new Set(expectedSymlinks), + realPath: expectedRealPath, + type: 'f', + metadata: expect.any(Array), + }) + ); + + test.each([ + [p('/project/bar.js/bad-parent'), [], p('/project/bar.js')], + [p('/project/bar.js/'), [], p('/project/bar.js')], + [p('/project/link-to-nowhere'), [p('/project/link-to-nowhere')], p('/project/nowhere')], + [p('/project/not/exists'), [], p('/project/not')], + [p('/project/root/missing'), [p('/project/root')], p('/missing')], + [p('/project/../missing'), [], p('/missing')], + [p('/project/foo/../../missing'), [], p('/missing')], + [p('/project/foo/../../project/missing'), [], p('/project/missing')], + ])( + 'non-existence for bad paths, missing files or broken links %s', + (givenPath, expectedSymlinks, missingPath) => + expect(tfs.lookup(givenPath)).toEqual({ + exists: false, + links: new Set(expectedSymlinks), + missing: missingPath, + }) + ); + + test.each([ + [p('/project/foo'), p('/project/foo')], + [p('/project/foo/'), p('/project/foo')], + [p('/project/root/outside'), p('/outside')], + ])('returns type: d for %s', (givenPath, expectedRealPath) => + expect(tfs.lookup(givenPath)).toMatchObject({ + exists: true, + type: 'd', + realPath: expectedRealPath, + }) + ); + + test('traversing the same symlink multiple times does not imply a cycle', () => { + expect(tfs.lookup(p('/project/foo/owndir/owndir/another.js'))).toMatchObject({ + exists: true, + realPath: p('/project/foo/another.js'), + type: 'f', + }); + }); + + test('ancestors of the root are not reported as missing', () => { + const innerTfs = new TreeFS({ + rootDir: p('/deep/project/root'), + files: new Map([ + [p('foo/index.js'), [123, 0, 0, null, 0, null]], + [p('link-up'), [123, 0, 0, null, p('..'), null]], + ]), + processFile: () => { + throw new Error('Not implemented'); + }, + }); + expect(innerTfs.lookup(p('/deep/missing/bar.js'))).toMatchObject({ + exists: false, + missing: p('/deep/missing'), + }); + expect(innerTfs.lookup(p('link-up/bar.js'))).toMatchObject({ + exists: false, + missing: p('/deep/project/bar.js'), + }); + expect(innerTfs.lookup(p('../../baz.js'))).toMatchObject({ + exists: false, + missing: p('/deep/baz.js'), + }); + expect(innerTfs.lookup(p('../../project/root/baz.js'))).toMatchObject({ + exists: false, + missing: p('/deep/project/root/baz.js'), + }); + }); + }); + + describe('symlinks to an ancestor of the project root', () => { + beforeEach(() => { + tfs.addOrModify(p('foo/link-up-2'), [0, 0, 0, null, p('../..'), null]); + }); + + test.each([ + [p('foo/link-up-2/project/bar.js'), p('/project/bar.js'), [p('/project/foo/link-up-2')]], + [ + p('foo/link-up-2/project/foo/link-up-2/project/bar.js'), + p('/project/bar.js'), + [p('/project/foo/link-up-2')], + ], + [ + p('foo/link-up-2/project/foo/link-up-2/outside/external.js'), + p('/outside/external.js'), + [p('/project/foo/link-up-2')], + ], + ])( + 'lookup can find files that go back towards the project root (%s)', + (mixedPath, expectedRealPath, expectedSymlinks) => { + expect(tfs.lookup(mixedPath)).toEqual({ + exists: true, + realPath: expectedRealPath, + links: new Set(expectedSymlinks), + type: 'f', + metadata: expect.any(Array), + }); + } + ); + + test('matchFiles follows links up', () => { + const matches = [ + ...tfs.matchFiles({ + rootDir: p('/project/foo'), + follow: true, + recursive: true, + }), + ]; + expect(matches).toContain(p('/project/foo/link-up-2/project/foo/another.js')); + // Only follow a symlink cycle once. + expect(matches).not.toContain( + p('/project/foo/link-up-2/project/foo/link-up-2/project/foo/another.js') + ); + }); + }); + + describe('getDifference', () => { + test('returns changed (inc. new) and removed files in given FileData', () => { + const newFiles: FileData = new Map([ + [p('new-file'), [789, 0, 0, null, 0, null]], + [p('link-to-foo'), [456, 0, 0, null, p('./foo'), null]], + // Different modified time, expect new mtime in changedFiles + [p('foo/another.js'), [124, 0, 0, null, 0, null]], + [p('link-cycle-1'), [123, 0, 0, null, p('./link-cycle-2'), null]], + [p('link-cycle-2'), [123, 0, 0, null, p('./link-cycle-1'), null]], + // Was a symlink, now a regular file + [p('link-to-self'), [123, 0, 0, null, 0, null]], + [p('link-to-nowhere'), [123, 0, 0, null, p('./nowhere'), null]], + [p('node_modules/pkg/a.js'), [123, 0, 0, null, 0, 'a']], + [p('node_modules/pkg/package.json'), [123, 0, 0, null, 0, 'pkg']], + ]); + expect(tfs.getDifference(newFiles)).toEqual({ + changedFiles: new Map([ + [p('new-file'), [789, 0, 0, null, 0, null]], + [p('foo/another.js'), [124, 0, 0, null, 0, null]], + [p('link-to-self'), [123, 0, 0, null, 0, null]], + ]), + removedFiles: new Set([ + p('foo/owndir'), + p('foo/link-to-bar.js'), + p('foo/link-to-another.js'), + p('../outside/external.js'), + p('bar.js'), + p('abs-link-out'), + p('root'), + ]), + }); + }); + + test('with subpath only considers files under that path', () => { + const newFiles: FileData = new Map([ + [p('foo/another.js'), [124, 0, 0, null, 0, null]], + ]); + + expect(tfs.getDifference(newFiles, { subpath: p('foo') })).toEqual({ + changedFiles: new Map([ + [p('foo/another.js'), [124, 0, 0, null, 0, null]], + ]), + removedFiles: new Set([ + p('foo/owndir'), + p('foo/link-to-bar.js'), + p('foo/link-to-another.js'), + ]), + }); + }); + + test('with subpath detects new files under that path', () => { + const newFiles: FileData = new Map([ + [p('foo/another.js'), [123, 2, 0, null, 0, 'another']], + [p('foo/new-file.js'), [456, 0, 0, null, 0, null]], + ]); + + const result = tfs.getDifference(newFiles, { subpath: p('foo') }); + + expect(result.changedFiles.has(p('foo/new-file.js'))).toBe(true); + expect(result.removedFiles).toEqual( + new Set([p('foo/owndir'), p('foo/link-to-bar.js'), p('foo/link-to-another.js')]) + ); + expect(result.removedFiles.has(p('bar.js'))).toBe(false); + expect(result.removedFiles.has(p('../outside/external.js'))).toBe(false); + }); + + test('with subpath for non-existent directory returns all as new', () => { + const newFiles: FileData = new Map([ + [p('nonexistent/file.js'), [123, 0, 0, null, 0, null]], + ]); + + expect(tfs.getDifference(newFiles, { subpath: p('nonexistent') })).toEqual({ + changedFiles: new Map([ + [p('nonexistent/file.js'), [123, 0, 0, null, 0, null]], + ]), + removedFiles: new Set(), + }); + }); + + test('with empty subpath behaves like no subdirectory specified', () => { + const newFiles: FileData = new Map([ + [p('foo/another.js'), [123, 0, 0, null, 0, null]], + ]); + + const withEmpty = tfs.getDifference(newFiles, { subpath: '' }); + const withUndefined = tfs.getDifference(newFiles); + + expect(withEmpty).toEqual(withUndefined); + }); + }); + + describe('hierarchicalLookup', () => { + let hlTfs: TreeFSType; + + beforeEach(() => { + hlTfs = new TreeFS({ + rootDir: p('/A/B/C'), + files: new Map( + ( + [ + [p('a/1/package.json'), [0, 0, 0, null, './real-package.json', null]], + [p('a/2/package.json'), [0, 0, 0, null, './notexist-package.json', null]], + [p('a/b/c/d/link-to-C'), [0, 0, 0, null, p('../../../..'), null]], + [p('a/b/c/d/link-to-B'), [0, 0, 0, null, p('../../../../..'), null]], + [p('a/b/c/d/link-to-A'), [0, 0, 0, null, p('../../../../../..'), null]], + [p('n_m/workspace/link-to-pkg'), [0, 0, 0, null, p('../../../workspace-pkg'), null]], + ] as [CanonicalPath, FileMetadata][] + ).concat( + [ + 'a/package.json', + 'a/b/package.json/index.js', + 'a/b/c/package.json', + 'a/b/c/d/foo.js', + 'a/1/real-package.json', + 'a/b/bar.js', + 'a/n_m/pkg/package.json', + 'a/n_m/pkg/foo.js', + 'a/n_m/pkg/subpath/deep/bar.js', + 'a/n_m/pkg/subpath/package.json', + 'a/n_m/pkg/n_m/pkg2/index.js', + 'a/n_m/pkg/n_m/pkg2/package.json', + '../../package.json', + '../../../a/b/package.json', + '../workspace-pkg/package.json', + ].map( + (posixPath) => + [p(posixPath), [0, 0, 0, null, 0, null]] as [CanonicalPath, FileMetadata] + ) + ) + ), + processFile: () => { + throw new Error('Not implemented'); + }, + }); + }); + + test.each([ + ['/A/B/C/a', '/A/B/C/a/package.json', '', []], + ['/A/B/C/a/b', '/A/B/C/a/package.json', 'b', ['/A/B/C/a/b/package.json']], + ['/A/B/C/a/package.json', '/A/B/C/a/package.json', 'package.json', ['/A/B/C/a/package.json']], + [ + '/A/B/C/a/b/notexists', + '/A/B/C/a/package.json', + 'b/notexists', + ['/A/B/C/a/b/notexists', '/A/B/C/a/b/package.json'], + ], + ['/A/B/C/a/b/c', '/A/B/C/a/b/c/package.json', '', []], + [ + '/A/B/C/other', + '/A/package.json', + 'B/C/other', + ['/A/B/C/other', '/A/B/C/package.json', '/A/B/package.json'], + ], + ['/A/B/C', '/A/package.json', 'B/C', ['/A/B/C/package.json', '/A/B/package.json']], + ['/A/B', '/A/package.json', 'B', ['/A/B/package.json']], + ['/A/B/foo', '/A/package.json', 'B/foo', ['/A/B/foo', '/A/B/package.json']], + ['/A/foo', '/A/package.json', 'foo', ['/A/foo']], + ['/foo', null, null, ['/foo', '/package.json']], + [ + '/A/B/C/a/b/c/d/link-to-C/foo.js', + '/A/B/C/a/b/c/package.json', + 'd/link-to-C/foo.js', + [ + '/A/B/C/a/b/c/d/link-to-C', + '/A/B/C/a/b/c/d/package.json', + '/A/B/C/foo.js', + '/A/B/C/package.json', + ], + ], + [ + '/A/B/C/a/b/c/d/link-to-B/C/foo.js', + '/A/B/C/a/b/c/package.json', + 'd/link-to-B/C/foo.js', + [ + '/A/B/C/a/b/c/d/link-to-B', + '/A/B/C/a/b/c/d/package.json', + '/A/B/C/foo.js', + '/A/B/C/package.json', + '/A/B/package.json', + ], + ], + [ + '/A/B/C/a/b/c/d/link-to-A/B/C/foo.js', + '/A/package.json', + 'B/C/foo.js', + ['/A/B/C/a/b/c/d/link-to-A', '/A/B/C/foo.js', '/A/B/C/package.json', '/A/B/package.json'], + ], + [ + '/A/B/C/a/1/foo.js', + '/A/B/C/a/1/real-package.json', + 'foo.js', + ['/A/B/C/a/1/foo.js', '/A/B/C/a/1/package.json'], + ], + [ + '/A/B/C/a/2/foo.js', + '/A/B/C/a/package.json', + '2/foo.js', + ['/A/B/C/a/2/foo.js', '/A/B/C/a/2/notexist-package.json', '/A/B/C/a/2/package.json'], + ], + [ + '/A/B/C/a/n_m/pkg/notexist.js', + '/A/B/C/a/n_m/pkg/package.json', + 'notexist.js', + ['/A/B/C/a/n_m/pkg/notexist.js'], + ], + [ + '/A/B/C/a/n_m/pkg/subpath/notexist.js', + '/A/B/C/a/n_m/pkg/subpath/package.json', + 'notexist.js', + ['/A/B/C/a/n_m/pkg/subpath/notexist.js'], + ], + [ + '/A/B/C/a/n_m/pkg/otherpath/notexist.js', + '/A/B/C/a/n_m/pkg/package.json', + 'otherpath/notexist.js', + ['/A/B/C/a/n_m/pkg/otherpath'], + ], + ['/A/B/C/a/n_m/pkg3/foo.js', null, null, ['/A/B/C/a/n_m/pkg3']], + ['/A/B/C/a/b/n_m/pkg/foo', null, null, ['/A/B/C/a/b/n_m']], + [ + '/A/B/C/n_m/workspace/link-to-pkg/subpath', + '/A/B/workspace-pkg/package.json', + 'subpath', + ['/A/B/C/n_m/workspace/link-to-pkg', '/A/B/workspace-pkg/subpath'], + ], + ])( + '%s => %s (relative %s, invalidatedBy %s)', + (startPath, expectedPath, expectedRelativeSubpath, expectedInvalidatedBy) => { + const pathMap = (normalPosixPath: string) => + mockPathModule.resolve(p('/A/B/C'), p(normalPosixPath)); + const invalidatedBy = new Set(); + expect( + hlTfs.hierarchicalLookup(p(startPath), 'package.json', { + breakOnSegment: 'n_m', + invalidatedBy, + subpathType: 'f', + }) + ).toEqual( + expectedPath == null + ? null + : { + absolutePath: pathMap(expectedPath), + containerRelativePath: p(expectedRelativeSubpath!), + } + ); + expect(invalidatedBy).toEqual(new Set(expectedInvalidatedBy.map(p))); + } + ); + }); + + describe('matchFiles', () => { + test('non-recursive, skipping deep paths', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/^\.\/.*/), + filterComparePosix: true, + follow: true, + recursive: false, + rootDir: p('/project'), + }) + ) + ).toEqual([p('/project/bar.js')]); + }); + + test('inner directory', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/.*/), + follow: true, + recursive: true, + rootDir: p('/project/foo'), + }) + ) + ).toEqual([ + p('/project/foo/another.js'), + p('/project/foo/owndir/another.js'), + p('/project/foo/owndir/link-to-bar.js'), + p('/project/foo/owndir/link-to-another.js'), + p('/project/foo/link-to-bar.js'), + p('/project/foo/link-to-another.js'), + ]); + }); + + test('outside rootDir', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/.*/), + follow: true, + recursive: true, + rootDir: p('/outside'), + }) + ) + ).toEqual([p('/outside/external.js')]); + }); + + test('ancestor of project root includes project root', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/^\.\/.*\/bar\.js/), + filterComparePosix: true, + follow: true, + recursive: true, + rootDir: p('/'), + }) + ) + ).toEqual([p('/project/bar.js')]); + }); + + test('recursive', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/.*/), + follow: true, + recursive: true, + rootDir: p('/project'), + }) + ) + ).toEqual([ + p('/project/foo/another.js'), + p('/project/foo/owndir/another.js'), + p('/project/foo/owndir/link-to-bar.js'), + p('/project/foo/owndir/link-to-another.js'), + p('/project/foo/link-to-bar.js'), + p('/project/foo/link-to-another.js'), + p('/project/bar.js'), + p('/project/link-to-foo/another.js'), + p('/project/link-to-foo/owndir/another.js'), + p('/project/link-to-foo/owndir/link-to-bar.js'), + p('/project/link-to-foo/owndir/link-to-another.js'), + p('/project/link-to-foo/link-to-bar.js'), + p('/project/link-to-foo/link-to-another.js'), + p('/project/abs-link-out/external.js'), + p('/project/root/project/foo/another.js'), + p('/project/root/project/foo/owndir/another.js'), + p('/project/root/project/foo/owndir/link-to-bar.js'), + p('/project/root/project/foo/owndir/link-to-another.js'), + p('/project/root/project/foo/link-to-bar.js'), + p('/project/root/project/foo/link-to-another.js'), + p('/project/root/project/bar.js'), + p('/project/root/project/link-to-foo/another.js'), + p('/project/root/project/link-to-foo/owndir/another.js'), + p('/project/root/project/link-to-foo/owndir/link-to-bar.js'), + p('/project/root/project/link-to-foo/owndir/link-to-another.js'), + p('/project/root/project/link-to-foo/link-to-bar.js'), + p('/project/root/project/link-to-foo/link-to-another.js'), + p('/project/root/project/abs-link-out/external.js'), + p('/project/root/project/node_modules/pkg/a.js'), + p('/project/root/project/node_modules/pkg/package.json'), + p('/project/root/outside/external.js'), + p('/project/node_modules/pkg/a.js'), + p('/project/node_modules/pkg/package.json'), + ]); + }); + + test('recursive, no follow', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/.*/), + follow: false, + recursive: true, + rootDir: p('/project'), + }) + ) + ).toEqual([ + p('/project/foo/another.js'), + p('/project/foo/link-to-bar.js'), + p('/project/foo/link-to-another.js'), + p('/project/bar.js'), + p('/project/node_modules/pkg/a.js'), + p('/project/node_modules/pkg/package.json'), + ]); + }); + + test('recursive with filter', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/\/another\.js/), + filterComparePosix: true, + follow: true, + recursive: true, + rootDir: p('/project'), + }) + ) + ).toEqual([ + p('/project/foo/another.js'), + p('/project/foo/owndir/another.js'), + p('/project/link-to-foo/another.js'), + p('/project/link-to-foo/owndir/another.js'), + p('/project/root/project/foo/another.js'), + p('/project/root/project/foo/owndir/another.js'), + p('/project/root/project/link-to-foo/another.js'), + p('/project/root/project/link-to-foo/owndir/another.js'), + ]); + }); + + test('outside root, null rootDir returns matches', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/external/), + follow: false, + recursive: true, + rootDir: null, + }) + ) + ).toEqual([p('/outside/external.js')]); + }); + + test('outside root, rootDir set to root has no matches', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/external/), + follow: false, + recursive: true, + rootDir: '', + }) + ) + ).toEqual([]); + }); + }); + + test('compare absolute', () => { + expect( + Array.from( + tfs.matchFiles({ + filter: new RegExp(/project/), + filterCompareAbsolute: true, + follow: false, + recursive: true, + rootDir: null, + }) + ) + ).toEqual([ + p('/project/foo/another.js'), + p('/project/foo/link-to-bar.js'), + p('/project/foo/link-to-another.js'), + p('/project/bar.js'), + p('/project/node_modules/pkg/a.js'), + p('/project/node_modules/pkg/package.json'), + ]); + }); + + describe('mutation', () => { + describe('addOrModify', () => { + test('accepts non-real and absolute paths', () => { + tfs.addOrModify(p('link-to-foo/new.js'), [0, 1, 0, null, 0, null]); + tfs.addOrModify(p('/project/fileatroot.js'), [0, 2, 0, null, 0, null]); + expect(tfs.getAllFiles().sort()).toEqual([ + p('/outside/external.js'), + p('/project/bar.js'), + p('/project/fileatroot.js'), + p('/project/foo/another.js'), + p('/project/foo/new.js'), + p('/project/node_modules/pkg/a.js'), + p('/project/node_modules/pkg/package.json'), + ]); + }); + }); + + describe('bulkAddOrModify', () => { + test('adds new files and modifies existing, new symlinks work', () => { + tfs.bulkAddOrModify( + new Map([ + [p('newdir/link-to-link-to-bar.js'), [0, 0, 0, null, p('../foo/link-to-bar.js'), null]], + [p('foo/baz.js'), [0, 0, 0, null, 0, null]], + [p('bar.js'), [999, 1, 0, null, 0, null]], + ]) + ); + + expect(tfs.getAllFiles().sort()).toEqual([ + p('/outside/external.js'), + p('/project/bar.js'), + p('/project/foo/another.js'), + p('/project/foo/baz.js'), + p('/project/node_modules/pkg/a.js'), + p('/project/node_modules/pkg/package.json'), + ]); + + expect(tfs.lookup(p('/project/newdir/link-to-link-to-bar.js')).realPath).toEqual( + p('/project/bar.js') + ); + + expect(tfs.linkStats('bar.js')).toEqual({ + modifiedTime: 999, + fileType: 'f', + size: 1, + }); + }); + }); + + describe('remove', () => { + test.each([ + [p('bar.js')], + [p('./bar.js')], + [p('./link-to-foo/.././bar.js')], + [p('/outside/../project/./bar.js')], + ])('removes a file: %s', (mixedPath) => { + expect(tfs.linkStats(mixedPath)).not.toBeNull(); + tfs.remove(mixedPath); + expect(tfs.linkStats(mixedPath)).toBeNull(); + }); + + test('deletes a symlink, not its target', () => { + expect(tfs.linkStats(p('foo/link-to-bar.js'))).not.toBeNull(); + expect(tfs.linkStats(p('bar.js'))).not.toBeNull(); + tfs.remove(p('foo/link-to-bar.js')); + expect(tfs.linkStats(p('foo/link-to-bar.js'))).toBeNull(); + expect(tfs.linkStats(p('bar.js'))).not.toBeNull(); + }); + + test('deletes empty ancestor directories', () => { + tfs.remove(p('node_modules/pkg/a.js')); + expect(tfs.lookup(p('node_modules/pkg'))).toMatchObject({ + exists: true, + type: 'd', + }); + tfs.remove(p('node_modules/pkg/package.json')); + expect(tfs.lookup(p('node_modules/pkg')).exists).toBe(false); + expect(tfs.lookup(p('node_modules')).exists).toBe(false); + }); + + test('deleting a non-empty directory also removes its empty parent', () => { + expect(tfs.lookup(p('node_modules/pkg')).exists).toBe(true); + expect(tfs.lookup(p('node_modules')).exists).toBe(true); + tfs.remove(p('node_modules/pkg')); + expect(tfs.lookup(p('node_modules/pkg/a.js')).exists).toBe(false); + expect(tfs.lookup(p('node_modules/pkg/package.json')).exists).toBe(false); + expect(tfs.lookup(p('node_modules/pkg')).exists).toBe(false); + expect(tfs.lookup(p('node_modules')).exists).toBe(false); + }); + + test('deleting all files leaves an empty map', () => { + for (const { canonicalPath } of tfs.metadataIterator({ + includeSymlinks: true, + includeNodeModules: true, + })) { + tfs.remove(canonicalPath); + } + expect(tfs.lookup(p('node_modules')).exists).toBe(false); + expect(tfs.lookup(p('foo')).exists).toBe(false); + }); + + test('no-op for a non-existent file', () => { + expect(() => tfs.remove('notexists.js')).not.toThrow(); + }); + }); + }); + + describe('metadataIterator', () => { + test('iterates over all files with Haste names, skipping node_modules and symlinks', () => { + expect([ + ...tfs.metadataIterator({ + includeSymlinks: false, + includeNodeModules: false, + }), + ]).toEqual([ + { + baseName: 'another.js', + canonicalPath: p('foo/another.js'), + metadata: [123, 2, 0, null, 0, 'another'], + }, + { + baseName: 'external.js', + canonicalPath: p('../outside/external.js'), + metadata: [0, 0, 0, null, 0, null], + }, + { + baseName: 'bar.js', + canonicalPath: p('bar.js'), + metadata: [234, 3, 0, null, 0, 'bar'], + }, + ]); + }); + + test('iterates over all files including node_modules, skipping symlinks', () => { + expect([ + ...tfs.metadataIterator({ + includeSymlinks: false, + includeNodeModules: true, + }), + ]).toEqual( + expect.arrayContaining([ + { + baseName: 'a.js', + canonicalPath: p('node_modules/pkg/a.js'), + metadata: [123, 0, 0, null, 0, 'a'], + }, + ]) + ); + }); + + test('iterates over all files including symlinks, skipping node_modules', () => { + expect([ + ...tfs.metadataIterator({ + includeSymlinks: true, + includeNodeModules: false, + }), + ]).toEqual( + expect.arrayContaining([ + { + baseName: 'link-to-bar.js', + canonicalPath: p('foo/link-to-bar.js'), + metadata: [0, 0, 0, null, p('../bar.js'), null], + }, + ]) + ); + }); + }); + + describe('getOrComputeSha1', () => { + const mockProcessFile = jest.fn(); + + beforeEach(() => { + tfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('foo.js'), [123, 0, 0, 'def456', 0, null]], + [p('bar.js'), [123, 0, 0, null, 0, null]], + [p('link-to-bar'), [456, 0, 0, null, p('./bar.js'), null]], + ]), + processFile: mockProcessFile, + }); + mockProcessFile.mockImplementation((_filePath: string, metadata: FileMetadata) => { + metadata[H.SHA1] = 'abc123'; + return; + }); + mockProcessFile.mockClear(); + }); + + test('returns the precomputed SHA-1 of a file if set', async () => { + expect(await tfs.getOrComputeSha1(p('foo.js'))).toEqual({ sha1: 'def456' }); + expect(mockProcessFile).not.toHaveBeenCalled(); + }); + + test('calls processFile exactly once if SHA-1 not initially set', async () => { + expect(await tfs.getOrComputeSha1(p('bar.js'))).toEqual({ sha1: 'abc123' }); + expect(mockProcessFile).toHaveBeenCalledWith(p('bar.js'), expect.any(Array), { + computeSha1: true, + }); + mockProcessFile.mockClear(); + expect(await tfs.getOrComputeSha1(p('bar.js'))).toEqual({ sha1: 'abc123' }); + expect(mockProcessFile).not.toHaveBeenCalled(); + }); + + test('returns file contents alongside SHA-1 if processFile provides it', async () => { + mockProcessFile.mockImplementationOnce((_filePath: string, metadata: FileMetadata) => { + metadata[H.SHA1] = 'bcd234'; + return Buffer.from('content'); + }); + expect(await tfs.getOrComputeSha1(p('bar.js'))).toEqual({ + sha1: 'bcd234', + content: Buffer.from('content'), + }); + expect(mockProcessFile).toHaveBeenCalledWith(p('bar.js'), expect.any(Array), { + computeSha1: true, + }); + mockProcessFile.mockClear(); + expect(await tfs.getOrComputeSha1(p('bar.js'))).toEqual({ + sha1: 'bcd234', + content: undefined, + }); + expect(mockProcessFile).not.toHaveBeenCalled(); + }); + + test('calls processFile on resolved symlink targets', async () => { + expect(await tfs.getOrComputeSha1(p('link-to-bar'))).toEqual({ sha1: 'abc123' }); + expect(mockProcessFile).toHaveBeenCalledWith(p('bar.js'), expect.any(Array), { + computeSha1: true, + }); + }); + + test('clears stored SHA-1 on modification', async () => { + let resolve: (sha1: string) => void; + const processPromise = new Promise((r) => (resolve = r)); + mockProcessFile.mockImplementationOnce(async (_filePath: string, metadata: FileMetadata) => { + metadata[H.SHA1] = await processPromise; + }); + const getOrComputePromise = tfs.getOrComputeSha1(p('bar.js')); + expect(mockProcessFile).toHaveBeenCalledWith(p('bar.js'), expect.any(Array), { + computeSha1: true, + }); + // Simulate the file being modified while we're waiting for the SHA1. + tfs.addOrModify(p('bar.js'), [123, 0, 0, null, 0, null]); + resolve!('newsha1'); + expect(await getOrComputePromise).toEqual({ sha1: 'newsha1' }); + // A second call re-computes + expect(await tfs.getOrComputeSha1(p('bar.js'))).toEqual({ sha1: 'abc123' }); + expect(mockProcessFile).toHaveBeenCalledTimes(2); + }); + }); + + describe('change listener', () => { + let simpleTfs: TreeFSType; + const logChange = jest.fn(); + const listener: FileSystemListener = { + fileAdded: (...args) => logChange('fileAdded', ...args), + fileModified: (...args) => logChange('fileModified', ...args), + fileRemoved: (...args) => logChange('fileRemoved', ...args), + directoryAdded: (...args) => logChange('directoryAdded', ...args), + directoryRemoved: (...args) => logChange('directoryRemoved', ...args), + }; + + beforeEach(() => { + logChange.mockClear(); + simpleTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('existing.js'), [123, 0, 0, '', 0]], + [p('dir/nested.js'), [456, 0, 0, '', 0]], + [p('mylink'), [0, 0, 0, '', p('./dir')]], + ]), + processFile: () => { + throw new Error('Not implemented'); + }, + }); + }); + + describe('addOrModify with listener', () => { + test('tracks added files when adding a new file', () => { + simpleTfs.addOrModify(p('new.js'), [789, 0, 0, '', 0], listener); + + expect(logChange.mock.calls).toEqual([['fileAdded', p('new.js'), [789, 0, 0, '', 0]]]); + }); + + test('tracks modified files when modifying an existing file', () => { + simpleTfs.addOrModify(p('existing.js'), [999, 0, 0, '', 0], listener); + + expect(logChange.mock.calls).toEqual([ + ['fileModified', p('existing.js'), [123, 0, 0, '', 0], [999, 0, 0, '', 0]], + ]); + }); + + test('tracks new directories when adding a file in a new directory', () => { + simpleTfs.addOrModify(p('newdir/file.js'), [123, 0, 0, '', '', 0, null], listener); + + expect(logChange.mock.calls).toEqual([ + ['directoryAdded', p('newdir')], + ['fileAdded', p('newdir/file.js'), [123, 0, 0, '', '', 0, null]], + ]); + }); + + test('tracks multiple new directories for deeply nested paths', () => { + simpleTfs.addOrModify(p('a/b/c/file.js'), [123, 0, 0, '', '', 0, null], listener); + expect(logChange.mock.calls).toEqual([ + ['directoryAdded', p('a')], + ['directoryAdded', p('a/b')], + ['directoryAdded', p('a/b/c')], + ['fileAdded', p('a/b/c/file.js'), [123, 0, 0, '', '', 0, null]], + ]); + }); + + test('does not track existing directories as new', () => { + simpleTfs.addOrModify(p('dir/another.js'), [789, 0, 0, '', '', 0, null], listener); + + expect(logChange.mock.calls).toEqual([ + ['fileAdded', p('dir/another.js'), [789, 0, 0, '', '', 0, null]], + ]); + }); + }); + + describe('bulkAddOrModify with listener', () => { + test('tracks multiple added files', () => { + simpleTfs.bulkAddOrModify( + new Map([ + [p('file1.js'), [1, 0, 0, '', '', 0, null]], + [p('file2.js'), [2, 0, 0, '', '', 0, null]], + [p('file3.js'), [3, 0, 0, '', '', 0, null]], + ]), + listener + ); + + expect(logChange.mock.calls).toEqual([ + ['fileAdded', p('file1.js'), [1, 0, 0, '', '', 0, null]], + ['fileAdded', p('file2.js'), [2, 0, 0, '', '', 0, null]], + ['fileAdded', p('file3.js'), [3, 0, 0, '', '', 0, null]], + ]); + }); + }); + + test('accumulates changes across multiple operations', () => { + simpleTfs.addOrModify(p('new1.js'), [1, 0, 0, '', 0], listener); + simpleTfs.addOrModify(p('new2/file.js'), [2, 0, 0, '', 0], listener); + simpleTfs.addOrModify(p('new2/file.js'), [3, 0, 0, '', 0], listener); + simpleTfs.addOrModify(p('new3/nested/file.js'), [3, 0, 0, '', 0], listener); + simpleTfs.remove(p('existing.js'), listener); + simpleTfs.remove(p('new2/file.js'), listener); + + expect(logChange.mock.calls).toEqual([ + ['fileAdded', p('new1.js'), [1, 0, 0, '', 0]], + ['directoryAdded', p('new2')], + ['fileAdded', p('new2/file.js'), [2, 0, 0, '', 0]], + ['fileModified', p('new2/file.js'), [2, 0, 0, '', 0], [3, 0, 0, '', 0]], + ['directoryAdded', p('new3')], + ['directoryAdded', p('new3/nested')], + ['fileAdded', p('new3/nested/file.js'), [3, 0, 0, '', 0]], + ['fileRemoved', p('existing.js'), [123, 0, 0, '', 0]], + ['fileRemoved', p('new2/file.js'), [3, 0, 0, '', 0]], + ['directoryRemoved', p('new2')], + ]); + }); + + describe('remove with listener', () => { + test('tracks removed files and directories when deleting a non-empty directory', () => { + simpleTfs.remove(p('dir'), listener); + + expect(logChange.mock.calls).toEqual([ + ['fileRemoved', p('dir/nested.js'), [456, 0, 0, '', 0]], + ['directoryRemoved', p('dir')], + ]); + }); + }); + + describe('symlinks with listener', () => { + test('tracks added files when adding a symlink', () => { + simpleTfs.addOrModify(p('link-to-existing'), [0, 0, 0, '', p('./existing.js')], listener); + + expect(logChange.mock.calls).toEqual([ + ['fileAdded', p('link-to-existing'), [0, 0, 0, '', p('./existing.js')]], + ]); + }); + + test('tracks removed symlinks with their metadata', () => { + simpleTfs.remove(p('mylink'), listener); + expect(logChange.mock.calls).toEqual([ + ['fileRemoved', p('mylink'), [0, 0, 0, '', p('./dir')]], + ]); + }); + }); + }); +}); diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/checkWatchmanCapabilities.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/checkWatchmanCapabilities.test.ts new file mode 100644 index 00000000000000..93ef6352333d5a --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/__tests__/checkWatchmanCapabilities.test.ts @@ -0,0 +1,77 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import checkWatchmanCapabilities from '../checkWatchmanCapabilities'; + +const mockExecFile = jest.fn(); +jest.mock('child_process', () => ({ + execFile: (...args: unknown[]) => mockExecFile(...args), +})); + +const mockSuccessResponse = JSON.stringify({ + version: 'v123', + capabilities: ['c1', 'c2'], +}); + +function setMockExecFileResponse(err: unknown, stdout?: unknown) { + mockExecFile.mockImplementation( + (file: string, _args: string[], cb: (err: unknown, result: unknown) => void) => { + expect(file).toBe('watchman'); + cb(err, err == null ? { stdout } : null); + } + ); +} + +describe('checkWatchmanCapabilities', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + test('executes watchman list-capabilities and resolves on success', async () => { + setMockExecFileResponse(null, mockSuccessResponse); + await expect(checkWatchmanCapabilities(['c1', 'c2'])).resolves.toEqual({ + version: 'v123', + }); + expect(mockExecFile).toHaveBeenCalledWith( + 'watchman', + ['list-capabilities', '--output-encoding=json', '--no-pretty', '--no-spawn'], + expect.any(Function) + ); + }); + + test('rejects when execFile reports ENOENT', async () => { + setMockExecFileResponse({ code: 'ENOENT' }); + await expect(checkWatchmanCapabilities([])).rejects.toMatchInlineSnapshot( + `[Error: Watchman is not installed or not available on PATH]` + ); + expect(mockExecFile).toHaveBeenCalled(); + }); + + test('rejects when execFile fails', async () => { + setMockExecFileResponse(new Error('execFile error')); + await expect(checkWatchmanCapabilities([])).rejects.toMatchInlineSnapshot( + `[Error: execFile error]` + ); + expect(mockExecFile).toHaveBeenCalled(); + }); + + test('rejects when the response is not JSON', async () => { + setMockExecFileResponse(null, 'not json'); + await expect(checkWatchmanCapabilities([])).rejects.toMatchInlineSnapshot( + `[Error: Failed to parse response from \`watchman list-capabilities\`]` + ); + expect(mockExecFile).toHaveBeenCalled(); + }); + + test('rejects when we are missing a required capability', async () => { + setMockExecFileResponse(null, mockSuccessResponse); + await expect(checkWatchmanCapabilities(['c1', 'other-cap'])).rejects.toMatchInlineSnapshot( + `[Error: The installed version of Watchman (v123) is missing required capabilities: other-cap]` + ); + expect(mockExecFile).toHaveBeenCalled(); + }); +}); diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/normalizePathSeparatorsToSystem.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/normalizePathSeparatorsToSystem.test.ts new file mode 100644 index 00000000000000..3995b1cd03aa51 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/__tests__/normalizePathSeparatorsToSystem.test.ts @@ -0,0 +1,22 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +describe('normalizePathSeparatorsToSystem', () => { + test('does nothing on posix', () => { + jest.resetModules(); + jest.mock('path', () => jest.requireActual('path').posix); + const normalizePathSeparatorsToSystem = require('../normalizePathSeparatorsToSystem').default; + expect(normalizePathSeparatorsToSystem('foo/bar/baz.js')).toEqual('foo/bar/baz.js'); + }); + + test('replace slashes on windows', () => { + jest.resetModules(); + jest.mock('path', () => jest.requireActual('path').win32); + const normalizePathSeparatorsToSystem = require('../normalizePathSeparatorsToSystem').default; + expect(normalizePathSeparatorsToSystem('foo/bar/baz.js')).toEqual('foo\\bar\\baz.js'); + }); +}); diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/rootRelativeCacheKeys.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/rootRelativeCacheKeys.test.ts new file mode 100644 index 00000000000000..998ecc6b0ff843 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/__tests__/rootRelativeCacheKeys.test.ts @@ -0,0 +1,109 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { BuildParameters, FileMapPlugin } from '../../types'; +import rootRelativeCacheKeys from '../rootRelativeCacheKeys'; + +const getMockPlugin = (cacheKey: string) => + ({ + getCacheKey: jest.fn(() => cacheKey), + }) as unknown as FileMapPlugin; + +const buildParameters: BuildParameters = { + computeSha1: false, + enableSymlinks: false, + extensions: ['a'], + forceNodeFilesystemAPI: false, + ignorePattern: /a/, + plugins: [getMockPlugin('1')], + retainAllFiles: false, + rootDir: '/root', + roots: ['a', 'b'], + cacheBreaker: 'a', +}; + +test('returns a distinct cache key for any change', () => { + const { rootDir: __, plugins: ___, ...simpleParameters } = buildParameters; + + const varyDefault = ( + key: T, + newVal: BuildParameters[T] + ): BuildParameters => { + return { ...buildParameters, [key]: newVal }; + }; + + const configs = (Object.keys(simpleParameters) as (keyof typeof simpleParameters)[]).map( + (key) => { + switch (key) { + // Boolean + case 'computeSha1': + case 'enableSymlinks': + case 'forceNodeFilesystemAPI': + case 'retainAllFiles': + return varyDefault(key, !buildParameters[key]); + // Strings + case 'cacheBreaker': + return varyDefault(key, 'foo'); + // String arrays + case 'extensions': + case 'roots': + return varyDefault(key, ['foo']); + // Regexp + case 'ignorePattern': + return varyDefault(key, /foo/); + default: + key satisfies never; + throw new Error('Unrecognised key in build parameters: ' + key); + } + } + ); + configs.push(buildParameters); + configs.push({ ...buildParameters, plugins: [] }); + configs.push({ ...buildParameters, plugins: [getMockPlugin('2')] }); + + // Generate hashes for each config + const configHashes = configs.map((config) => rootRelativeCacheKeys(config).relativeConfigHash); + + // We expect them all to have distinct hashes + const seen = new Map(); + for (const [i, configHash] of configHashes.entries()) { + const seenIndex = seen.get(configHash); + if (seenIndex != null) { + // Two configs have the same hash - let Jest print the differences + expect(configs[seenIndex]).toEqual(configs[i]); + } + seen.set(configHash, i); + } +}); + +describe('cross-platform cache keys', () => { + afterEach(() => { + jest.unmock('path'); + }); + + test('returns the same cache key for Windows and POSIX path parameters', () => { + let mockPathModule: typeof import('path'); + jest.mock('path', () => mockPathModule); + + jest.resetModules(); + mockPathModule = jest.requireActual('path').posix; + const configHashPosix = require('../rootRelativeCacheKeys').default({ + ...buildParameters, + rootDir: '/root', + roots: ['/root/a', '/b/c'], + }).relativeConfigHash; + + jest.resetModules(); + mockPathModule = jest.requireActual('path').win32; + const configHashWin32 = require('../rootRelativeCacheKeys').default({ + ...buildParameters, + rootDir: 'c:\\root', + roots: ['c:\\root\\a', 'c:\\b\\c'], + }).relativeConfigHash; + expect(configHashWin32).toEqual(configHashPosix); + }); +}); diff --git a/packages/@expo/metro-file-map/src/lib/checkWatchmanCapabilities.ts b/packages/@expo/metro-file-map/src/lib/checkWatchmanCapabilities.ts new file mode 100644 index 00000000000000..8c0ff5c7358573 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/checkWatchmanCapabilities.ts @@ -0,0 +1,60 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { execFile } from 'child_process'; +import { promisify } from 'util'; + +export default async function checkWatchmanCapabilities( + requiredCapabilities: readonly string[] +): Promise<{ version: string }> { + const execFilePromise = promisify(execFile); + + let rawResponse; + try { + const result = await execFilePromise('watchman', [ + 'list-capabilities', + '--output-encoding=json', + '--no-pretty', + '--no-spawn', // The client can answer this, so don't spawn a server + ]); + rawResponse = result.stdout; + } catch (e) { + if ((e as any)?.code === 'ENOENT') { + throw new Error('Watchman is not installed or not available on PATH'); + } + throw e; + } + + let parsedResponse: unknown; + try { + parsedResponse = JSON.parse(rawResponse); + } catch { + throw new Error('Failed to parse response from `watchman list-capabilities`'); + } + + if ( + parsedResponse == null || + typeof parsedResponse !== 'object' || + typeof (parsedResponse as any).version !== 'string' || + !Array.isArray((parsedResponse as any).capabilities) + ) { + throw new Error('Unexpected response from `watchman list-capabilities`'); + } + const version: string = (parsedResponse as any).version; + const capabilities = new Set((parsedResponse as any).capabilities); + const missingCapabilities = requiredCapabilities.filter( + (requiredCapability) => !capabilities.has(requiredCapability) + ); + if (missingCapabilities.length > 0) { + throw new Error( + `The installed version of Watchman (${version}) is missing required capabilities: ${missingCapabilities.join( + ', ' + )}` + ); + } + return { version }; +} diff --git a/packages/@expo/metro-file-map/src/lib/normalizePathSeparatorsToPosix.ts b/packages/@expo/metro-file-map/src/lib/normalizePathSeparatorsToPosix.ts new file mode 100644 index 00000000000000..ba50e641682aa5 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/normalizePathSeparatorsToPosix.ts @@ -0,0 +1,17 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +let normalizePathSeparatorsToPosix: (filePath: string) => string; +if (path.sep === '/') { + normalizePathSeparatorsToPosix = (filePath: string): string => filePath; +} else { + normalizePathSeparatorsToPosix = (filePath: string): string => filePath.replace(/\\/g, '/'); +} + +export default normalizePathSeparatorsToPosix; diff --git a/packages/@expo/metro-file-map/src/lib/normalizePathSeparatorsToSystem.ts b/packages/@expo/metro-file-map/src/lib/normalizePathSeparatorsToSystem.ts new file mode 100644 index 00000000000000..b932f2a32b076c --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/normalizePathSeparatorsToSystem.ts @@ -0,0 +1,17 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +let normalizePathSeparatorsToSystem: (filePath: string) => string; +if (path.sep === '/') { + normalizePathSeparatorsToSystem = (filePath: string): string => filePath; +} else { + normalizePathSeparatorsToSystem = (filePath: string): string => filePath.replace(/\//g, path.sep); +} + +export default normalizePathSeparatorsToSystem; diff --git a/packages/@expo/metro-file-map/src/lib/rootRelativeCacheKeys.ts b/packages/@expo/metro-file-map/src/lib/rootRelativeCacheKeys.ts new file mode 100644 index 00000000000000..09ceae5e11f56b --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/rootRelativeCacheKeys.ts @@ -0,0 +1,62 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { createHash } from 'crypto'; + +import type { BuildParameters } from '../types'; +import { RootPathUtils } from './RootPathUtils'; +import normalizePathSeparatorsToPosix from './normalizePathSeparatorsToPosix'; + +export default function rootRelativeCacheKeys(buildParameters: BuildParameters): { + rootDirHash: string; + relativeConfigHash: string; +} { + const { rootDir, plugins, ...otherParameters } = buildParameters; + const rootDirHash = createHash('md5') + .update(normalizePathSeparatorsToPosix(rootDir)) + .digest('hex'); + const pathUtils = new RootPathUtils(rootDir); + + const cacheComponents = (Object.keys(otherParameters) as (keyof typeof otherParameters)[]) + .sort() + .map((key) => { + switch (key) { + case 'roots': + return buildParameters[key].map((root) => + normalizePathSeparatorsToPosix(pathUtils.absoluteToNormal(root)) + ); + case 'cacheBreaker': + case 'extensions': + case 'computeSha1': + case 'enableSymlinks': + case 'forceNodeFilesystemAPI': + case 'retainAllFiles': + return buildParameters[key] ?? null; + case 'ignorePattern': + return buildParameters[key].toString(); + default: + key satisfies never; + throw new Error('Unrecognised key in build parameters: ' + key); + } + }); + + for (const plugin of plugins) { + cacheComponents.push(plugin.getCacheKey()); + } + + // JSON.stringify is stable here because we only deal in (nested) arrays of + // primitives. Use a different approach if this is expanded to include + // objects/Sets/Maps, etc. + const relativeConfigHash = createHash('md5') + .update(JSON.stringify(cacheComponents)) + .digest('hex'); + + return { + rootDirHash, + relativeConfigHash, + }; +} diff --git a/packages/@expo/metro-file-map/src/lib/sorting.ts b/packages/@expo/metro-file-map/src/lib/sorting.ts new file mode 100644 index 00000000000000..178a3b8510db0e --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/sorting.ts @@ -0,0 +1,32 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// Utilities for working with Array.prototype.sort + +export function compareStrings(a: null | string, b: null | string): number { + if (a == null) { + return b == null ? 0 : -1; + } + if (b == null) { + return 1; + } + return a.localeCompare(b); +} + +export function chainComparators( + ...comparators: ((a: T, b: T) => number)[] +): (a: T, b: T) => number { + return (a, b) => { + for (const comparator of comparators) { + const result = comparator(a, b); + if (result !== 0) { + return result; + } + } + return 0; + }; +} diff --git a/packages/@expo/metro-file-map/src/plugins/DependencyPlugin.ts b/packages/@expo/metro-file-map/src/plugins/DependencyPlugin.ts new file mode 100644 index 00000000000000..7ecb6476038ab3 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/DependencyPlugin.ts @@ -0,0 +1,68 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { Path } from '../types'; +import excludedExtensions from '../workerExclusionList'; +import FileDataPlugin from './FileDataPlugin'; + +export interface DependencyPluginOptions { + /** Path to custom dependency extractor module */ + readonly dependencyExtractor: string | null; + /** Whether to compute dependencies (performance optimization) */ + readonly computeDependencies: boolean; +} + +export default class DependencyPlugin extends FileDataPlugin { + constructor(options: DependencyPluginOptions) { + const { dependencyExtractor, computeDependencies } = options; + + let cacheKey: string; + if (dependencyExtractor != null) { + const mod = require(dependencyExtractor); + const getCacheKey = + mod?.getCacheKey ?? + (mod.__esModule === true && 'default' in mod ? mod.default : mod).getCacheKey; + cacheKey = getCacheKey?.() ?? dependencyExtractor; + } else { + cacheKey = 'default-dependency-extractor'; + } + + super({ + name: 'dependencies', + cacheKey, + worker: { + modulePath: require.resolve('./dependencies/worker'), + setupArgs: { + dependencyExtractor: dependencyExtractor ?? null, + }, + }, + filter: ({ normalPath, isNodeModules }) => { + if (!computeDependencies) { + return false; + } + if (isNodeModules) { + return false; + } + const ext = normalPath.substr(normalPath.lastIndexOf('.')); + return !excludedExtensions.has(ext); + }, + }); + } + + /** + * Get the list of dependencies for a given file. + * @param mixedPath Absolute or project-relative path to the file + * @returns Array of dependency module names, or null if the file doesn't exist + */ + getDependencies(mixedPath: Path): readonly string[] | null | undefined { + const result = this.getFileSystem().lookup(mixedPath); + if (result.exists && result.type === 'f') { + return result.pluginData ?? []; + } + return null; + } +} diff --git a/packages/@expo/metro-file-map/src/plugins/FileDataPlugin.ts b/packages/@expo/metro-file-map/src/plugins/FileDataPlugin.ts new file mode 100644 index 00000000000000..c2b920ebc1ae53 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/FileDataPlugin.ts @@ -0,0 +1,69 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { + FileMapPlugin, + FileMapPluginInitOptions, + FileMapPluginWorker, + ReadonlyFileSystemChanges, + V8Serializable, +} from '../types'; + +export interface FileDataPluginOptions extends FileMapPluginWorker { + readonly name: string; + readonly cacheKey: string; +} + +/** + * Base class for FileMap plugins that store per-file data via a worker and + * have no separate serializable state. Provides default no-op implementations + * of lifecycle methods that subclasses can override as needed. + */ +export default class FileDataPlugin< + PerFileData extends undefined | V8Serializable = undefined | V8Serializable, +> implements FileMapPlugin +{ + readonly name: string; + + #worker: FileMapPluginWorker; + #cacheKey: string; + #files: FileMapPluginInitOptions['files'] | undefined | null; + + constructor({ name, worker, filter, cacheKey }: FileDataPluginOptions) { + this.name = name; + this.#worker = { worker, filter }; + this.#cacheKey = cacheKey; + } + + async initialize(initOptions: FileMapPluginInitOptions): Promise { + this.#files = initOptions.files; + } + + getFileSystem(): FileMapPluginInitOptions['files'] { + const files = this.#files; + if (files == null) { + throw new Error(`${this.name} plugin has not been initialized`); + } + return files; + } + + onChanged(_changes: ReadonlyFileSystemChanges): void {} + + assertValid(): void {} + + getSerializableSnapshot(): null { + return null; + } + + getCacheKey(): string { + return this.#cacheKey; + } + + getWorker(): FileMapPluginWorker { + return this.#worker; + } +} diff --git a/packages/@expo/metro-file-map/src/plugins/HastePlugin.ts b/packages/@expo/metro-file-map/src/plugins/HastePlugin.ts new file mode 100644 index 00000000000000..2550f289f89fd3 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/HastePlugin.ts @@ -0,0 +1,464 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +import H from '../constants'; +import { RootPathUtils } from '../lib/RootPathUtils'; +import { chainComparators, compareStrings } from '../lib/sorting'; +import type { + Console, + DuplicatesIndex, + DuplicatesSet, + FileMapPlugin, + FileMapPluginInitOptions, + FileMapPluginWorker, + HasteConflict, + HasteMap, + HasteMapItem, + HasteMapItemMetadata, + HTypeValue, + Path, + PerfLogger, + ReadonlyFileSystemChanges, +} from '../types'; +import { DuplicateHasteCandidatesError } from './haste/DuplicateHasteCandidatesError'; +import { HasteConflictsError } from './haste/HasteConflictsError'; +import getPlatformExtension from './haste/getPlatformExtension'; + +const EMPTY_OBJ: Readonly<{ [key: string]: HasteMapItemMetadata }> = {}; +const EMPTY_MAP: ReadonlyMap = new Map(); +const PACKAGE_JSON = /(?:[/\\]|^)package\.json$/; + +// Periodically yield to the event loop to allow parallel I/O, etc. +// Based on 200k files taking up to 800ms => max 40ms between yields. +const YIELD_EVERY_NUM_HASTE_FILES = 10000; + +export interface HasteMapOptions { + readonly console?: Console | null; + readonly enableHastePackages: boolean; + readonly hasteImplModulePath: string | null; + readonly perfLogger?: PerfLogger | null; + readonly platforms: ReadonlySet; + readonly rootDir: Path; + readonly failValidationOnConflicts: boolean; +} + +export default class HastePlugin implements HasteMap, FileMapPlugin { + readonly name: 'haste' = 'haste'; + + readonly #console: Console | undefined | null; + readonly #duplicates: DuplicatesIndex = new Map(); + readonly #enableHastePackages: boolean; + readonly #failValidationOnConflicts: boolean; + #getModuleNameByPath: ((mixedPath: string) => string | null | undefined) | undefined; + readonly #hasteImplModulePath: string | undefined | null; + readonly #map: Map = new Map(); + readonly #pathUtils: RootPathUtils; + readonly #perfLogger: PerfLogger | undefined | null; + readonly #platforms: ReadonlySet; + + constructor(options: HasteMapOptions) { + this.#console = options.console ?? globalThis.console; + this.#enableHastePackages = options.enableHastePackages; + this.#hasteImplModulePath = options.hasteImplModulePath; + this.#perfLogger = options.perfLogger; + this.#platforms = options.platforms; + this.#pathUtils = new RootPathUtils(options.rootDir); + this.#failValidationOnConflicts = options.failValidationOnConflicts; + } + + async initialize({ files }: FileMapPluginInitOptions): Promise { + this.#perfLogger?.point('constructHasteMap_start'); + let hasteFiles = 0; + for (const { baseName, canonicalPath, pluginData: hasteId } of files.fileIterator({ + // Symlinks and node_modules are never Haste modules or packages. + includeNodeModules: false, + includeSymlinks: false, + })) { + if (hasteId == null) { + continue; + } + this.setModule(hasteId, [ + canonicalPath, + this.#enableHastePackages && baseName === 'package.json' ? H.PACKAGE : H.MODULE, + ]); + if (++hasteFiles % YIELD_EVERY_NUM_HASTE_FILES === 0) { + await new Promise(setImmediate); + } + } + this.#getModuleNameByPath = (mixedPath) => { + const result = files.lookup(mixedPath); + return result.exists && result.type === 'f' && typeof result.pluginData === 'string' + ? result.pluginData + : null; + }; + this.#perfLogger?.point('constructHasteMap_end'); + this.#perfLogger?.annotate({ int: { hasteFiles } }); + } + + getSerializableSnapshot(): null { + // Haste is not serialised, but built from traversing the file metadata + // on each run. This turns out to have comparable performance to + // serialisation, at least when Haste is dense, and makes for a much + // smaller cache. + return null; + } + + getModule( + name: string, + platform?: string | undefined | null, + supportsNativePlatform?: boolean | undefined | null, + type?: HTypeValue | undefined | null + ): Path | undefined | null { + const module = this.#getModuleMetadata(name, platform, !!supportsNativePlatform); + if (module && module[H.TYPE] === (type ?? H.MODULE)) { + const modulePath = module[H.PATH]; + return modulePath && this.#pathUtils.normalToAbsolute(modulePath); + } + return null; + } + + getModuleNameByPath(mixedPath: Path): string | undefined | null { + if (this.#getModuleNameByPath == null) { + throw new Error('HastePlugin has not been initialized before getModuleNameByPath'); + } + return this.#getModuleNameByPath(mixedPath) ?? null; + } + + getPackage( + name: string, + platform: string | undefined | null, + _supportsNativePlatform?: boolean | undefined | null + ): Path | undefined | null { + return this.getModule(name, platform, null, H.PACKAGE); + } + + /** + * When looking up a module's data, we walk through each eligible platform for + * the query. For each platform, we want to check if there are known + * duplicates for that name+platform pair. The duplication logic normally + * removes elements from the `map` object, but we want to check upfront to be + * extra sure. If metadata exists both in the `duplicates` object and the + * `map`, this would be a bug. + */ + #getModuleMetadata( + name: string, + platform: string | undefined | null, + supportsNativePlatform: boolean + ): HasteMapItemMetadata | null { + const map = this.#map.get(name) || EMPTY_OBJ; + const dupMap = this.#duplicates.get(name) || EMPTY_MAP; + if (platform != null) { + this.#assertNoDuplicates(name, platform, supportsNativePlatform, dupMap.get(platform)); + if (map[platform] != null) { + return map[platform]!; + } + } + if (supportsNativePlatform) { + this.#assertNoDuplicates( + name, + H.NATIVE_PLATFORM, + supportsNativePlatform, + dupMap.get(H.NATIVE_PLATFORM) + ); + if (map[H.NATIVE_PLATFORM]) { + return map[H.NATIVE_PLATFORM]!; + } + } + this.#assertNoDuplicates( + name, + H.GENERIC_PLATFORM, + supportsNativePlatform, + dupMap.get(H.GENERIC_PLATFORM) + ); + if (map[H.GENERIC_PLATFORM]) { + return map[H.GENERIC_PLATFORM]!; + } + return null; + } + + #assertNoDuplicates( + name: string, + platform: string, + supportsNativePlatform: boolean, + relativePathSet: DuplicatesSet | undefined | null + ): void { + if (relativePathSet == null) { + return; + } + const duplicates = new Map(); + + for (const [relativePath, type] of relativePathSet) { + const duplicatePath = this.#pathUtils.normalToAbsolute(relativePath); + duplicates.set(duplicatePath, type); + } + + throw new DuplicateHasteCandidatesError(name, platform, supportsNativePlatform, duplicates); + } + + onChanged(delta: ReadonlyFileSystemChanges): void { + // Process removals first so that moves aren't treated as duplicates. + for (const [canonicalPath, maybeHasteId] of delta.removedFiles) { + this.#onRemovedFile(canonicalPath, maybeHasteId); + } + for (const [canonicalPath, maybeHasteId] of delta.addedFiles) { + this.#onNewFile(canonicalPath, maybeHasteId); + } + } + + #onNewFile(canonicalPath: string, id: string | null | undefined) { + if (id == null) { + // Not a Haste module or package + return; + } + + const module: HasteMapItemMetadata = [ + canonicalPath, + this.#enableHastePackages && path.basename(canonicalPath) === 'package.json' + ? H.PACKAGE + : H.MODULE, + ]; + + this.setModule(id, module); + } + + setModule(id: string, module: HasteMapItemMetadata) { + let hasteMapItem = this.#map.get(id); + if (!hasteMapItem) { + hasteMapItem = Object.create(null) as HasteMapItem; + this.#map.set(id, hasteMapItem); + } + const platform = getPlatformExtension(module[H.PATH], this.#platforms) || H.GENERIC_PLATFORM; + + const existingModule = hasteMapItem[platform]; + + if (existingModule && existingModule[H.PATH] !== module[H.PATH]) { + if (this.#console) { + this.#console.warn( + [ + 'metro-file-map: Haste module naming collision: ' + id, + ' The following files share their name; please adjust your hasteImpl:', + ' * ' + path.sep + existingModule[H.PATH], + ' * ' + path.sep + module[H.PATH], + '', + ].join('\n') + ); + } + + // We do NOT want consumers to use a module that is ambiguous. + delete hasteMapItem[platform]; + + if (Object.keys(hasteMapItem).length === 0) { + this.#map.delete(id); + } + + let dupsByPlatform = this.#duplicates.get(id); + if (dupsByPlatform == null) { + dupsByPlatform = new Map(); + this.#duplicates.set(id, dupsByPlatform); + } + + const dups = new Map([ + [module[H.PATH], module[H.TYPE]], + [existingModule[H.PATH], existingModule[H.TYPE]], + ]); + dupsByPlatform.set(platform, dups); + + return; + } + + const dupsByPlatform = this.#duplicates.get(id); + if (dupsByPlatform != null) { + const dups = dupsByPlatform.get(platform); + if (dups != null) { + dups.set(module[H.PATH], module[H.TYPE]); + } + return; + } + + hasteMapItem[platform] = module; + } + + #onRemovedFile(canonicalPath: string, moduleName: string | null | undefined) { + if (moduleName == null) { + // Not a Haste module or package + return; + } + + const platform = getPlatformExtension(canonicalPath, this.#platforms) || H.GENERIC_PLATFORM; + + const hasteMapItem = this.#map.get(moduleName); + if (hasteMapItem != null) { + delete hasteMapItem[platform]; + if (Object.keys(hasteMapItem).length === 0) { + this.#map.delete(moduleName); + } else { + this.#map.set(moduleName, hasteMapItem); + } + } + + this.#recoverDuplicates(moduleName, canonicalPath); + } + + assertValid(): void { + if (!this.#failValidationOnConflicts) { + return; + } + const conflicts = this.computeConflicts(); + if (conflicts.length > 0) { + throw new HasteConflictsError(conflicts); + } + } + + /** + * This function should be called when the file under `filePath` is removed + * or changed. When that happens, we want to figure out if that file was + * part of a group of files that had the same ID. If it was, we want to + * remove it from the group. Furthermore, if there is only one file + * remaining in the group, then we want to restore that single file as the + * correct resolution for its ID, and cleanup the duplicates index. + */ + #recoverDuplicates(moduleName: string, relativeFilePath: string) { + let dupsByPlatform = this.#duplicates.get(moduleName); + if (dupsByPlatform == null) { + return; + } + + const platform = getPlatformExtension(relativeFilePath, this.#platforms) || H.GENERIC_PLATFORM; + let dups = dupsByPlatform.get(platform); + if (dups == null) { + return; + } + + dupsByPlatform = new Map(dupsByPlatform); + this.#duplicates.set(moduleName, dupsByPlatform); + + dups = new Map(dups); + dupsByPlatform.set(platform, dups); + dups.delete(relativeFilePath); + + if (dups.size !== 1) { + return; + } + + const uniqueModule = dups.entries().next().value; + + if (!uniqueModule) { + return; + } + + let dedupMap: HasteMapItem | undefined | null = this.#map.get(moduleName); + + if (dedupMap == null) { + dedupMap = Object.create(null) as HasteMapItem; + this.#map.set(moduleName, dedupMap); + } + dedupMap[platform] = uniqueModule; + dupsByPlatform.delete(platform); + if (dupsByPlatform.size === 0) { + this.#duplicates.delete(moduleName); + } + } + + computeConflicts(): HasteConflict[] { + const conflicts: HasteConflict[] = []; + + // Add literal duplicates tracked in the #duplicates map + for (const [id, dupsByPlatform] of this.#duplicates.entries()) { + for (const [platform, conflictingModules] of dupsByPlatform) { + conflicts.push({ + absolutePaths: [...conflictingModules.keys()] + .map((modulePath) => this.#pathUtils.normalToAbsolute(modulePath)) + // Sort for ease of testing + .sort(), + id, + platform: platform === H.GENERIC_PLATFORM ? null : platform, + type: 'duplicate', + }); + } + } + + // Add cases of "shadowing at a distance": a module with a platform suffix and + // a module with a lower priority platform suffix (or no suffix), in different + // directories. + for (const [id, data] of this.#map) { + const conflictPaths = new Set(); + const basePaths: string[] = []; + for (const basePlatform of [H.NATIVE_PLATFORM, H.GENERIC_PLATFORM]) { + if (data[basePlatform] == null) { + continue; + } + const basePath = data[basePlatform]![0]; + basePaths.push(basePath); + const basePathDir = path.dirname(basePath); + // Find all platforms that can shadow basePlatform + // Given that X.(specific platform).js > x.native.js > X.js + // and basePlatform is either 'native' or generic (no platform). + for (const platform of Object.keys(data)) { + if (platform === basePlatform || platform === H.GENERIC_PLATFORM /* lowest priority */) { + continue; + } + const platformPath = data[platform]![0]; + if (path.dirname(platformPath) !== basePathDir) { + conflictPaths.add(platformPath); + } + } + } + if (conflictPaths.size) { + conflicts.push({ + absolutePaths: [...new Set([...conflictPaths, ...basePaths])] + .map((modulePath) => this.#pathUtils.normalToAbsolute(modulePath)) + // Sort for ease of testing + .sort(), + id, + platform: null, + type: 'shadowing', + }); + } + } + + // Sort for ease of testing + conflicts.sort( + chainComparators( + (a, b) => compareStrings(a.type, b.type), + (a, b) => compareStrings(a.id, b.id), + (a, b) => compareStrings(a.platform, b.platform) + ) + ); + + return conflicts; + } + + getCacheKey(): string { + return JSON.stringify([ + this.#enableHastePackages, + this.#hasteImplModulePath != null ? require(this.#hasteImplModulePath).getCacheKey() : null, + [...this.#platforms].sort(), + ]); + } + + getWorker(): FileMapPluginWorker { + return { + worker: { + modulePath: require.resolve('./haste/worker.js'), + setupArgs: { + hasteImplModulePath: this.#hasteImplModulePath ?? null, + }, + }, + filter: ({ isNodeModules, normalPath }) => { + if (isNodeModules) { + return false; + } + if (PACKAGE_JSON.test(normalPath)) { + return this.#enableHastePackages; + } + return this.#hasteImplModulePath != null; + }, + }; + } +} diff --git a/packages/@expo/metro-file-map/src/plugins/MockPlugin.ts b/packages/@expo/metro-file-map/src/plugins/MockPlugin.ts new file mode 100644 index 00000000000000..fa848c4d85aa74 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/MockPlugin.ts @@ -0,0 +1,198 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +import { RootPathUtils } from '../lib/RootPathUtils'; +import normalizePathSeparatorsToPosix from '../lib/normalizePathSeparatorsToPosix'; +import normalizePathSeparatorsToSystem from '../lib/normalizePathSeparatorsToSystem'; +import type { + Console, + FileMapPlugin, + FileMapPluginInitOptions, + FileMapPluginWorker, + MockMap as IMockMap, + Path, + RawMockMap, + ReadonlyFileSystemChanges, +} from '../types'; +import getMockName from './mocks/getMockName'; + +export const CACHE_VERSION = 2; + +export interface MockMapOptions { + readonly console: Console; + readonly mocksPattern: RegExp; + readonly rawMockMap?: RawMockMap; + readonly rootDir: Path; + readonly throwOnModuleCollision: boolean; +} + +export default class MockPlugin implements FileMapPlugin, IMockMap { + readonly name: 'mocks' = 'mocks'; + + readonly #mocksPattern: RegExp; + #raw: RawMockMap; + readonly #pathUtils: RootPathUtils; + readonly #console: typeof console; + #throwOnModuleCollision: boolean; + + constructor({ + console, + mocksPattern, + rawMockMap = { + duplicates: new Map(), + mocks: new Map(), + version: CACHE_VERSION, + }, + rootDir, + throwOnModuleCollision, + }: MockMapOptions) { + this.#mocksPattern = mocksPattern; + if (rawMockMap.version !== CACHE_VERSION) { + throw new Error('Incompatible state passed to MockPlugin'); + } + this.#raw = rawMockMap; + this.#console = console; + this.#pathUtils = new RootPathUtils(rootDir); + this.#throwOnModuleCollision = throwOnModuleCollision; + } + + async initialize({ files, pluginState }: FileMapPluginInitOptions): Promise { + if (pluginState != null && (pluginState as RawMockMap).version === this.#raw.version) { + // Use cached state directly if available + this.#raw = pluginState as RawMockMap; + } else { + // Otherwise, traverse all files to rebuild + for (const { canonicalPath } of files.fileIterator({ + includeNodeModules: false, + includeSymlinks: false, + })) { + this.#onFileAdded(canonicalPath); + } + } + } + + getMockModule(name: string): Path | undefined | null { + const mockPosixRelativePath = this.#raw.mocks.get(name) || this.#raw.mocks.get(name + '/index'); + if (typeof mockPosixRelativePath !== 'string') { + return null; + } + return this.#pathUtils.normalToAbsolute(normalizePathSeparatorsToSystem(mockPosixRelativePath)); + } + + onChanged(delta: ReadonlyFileSystemChanges): void { + // Process removals first so that moves aren't treated as duplicates. + for (const [canonicalPath] of delta.removedFiles) { + this.#onFileRemoved(canonicalPath); + } + for (const [canonicalPath] of delta.addedFiles) { + this.#onFileAdded(canonicalPath); + } + } + + #onFileAdded(canonicalPath: Path): void { + const absoluteFilePath = this.#pathUtils.normalToAbsolute(canonicalPath); + if (!this.#mocksPattern.test(absoluteFilePath)) { + return; + } + + const mockName = getMockName(absoluteFilePath); + const posixRelativePath = normalizePathSeparatorsToPosix(canonicalPath); + + const existingMockPosixPath = this.#raw.mocks.get(mockName); + if (existingMockPosixPath != null) { + if (existingMockPosixPath !== posixRelativePath) { + let duplicates = this.#raw.duplicates.get(mockName); + if (duplicates == null) { + duplicates = new Set([existingMockPosixPath, posixRelativePath]); + this.#raw.duplicates.set(mockName, duplicates); + } else { + duplicates.add(posixRelativePath); + } + + this.#console.warn(this.#getMessageForDuplicates(mockName, duplicates)); + } + } + + // If there are duplicates and we don't throw, the latest mock wins. + // This is to preserve backwards compatibility, but it's unpredictable. + this.#raw.mocks.set(mockName, posixRelativePath); + } + + #onFileRemoved(canonicalPath: Path): void { + const absoluteFilePath = this.#pathUtils.normalToAbsolute(canonicalPath); + if (!this.#mocksPattern.test(absoluteFilePath)) { + return; + } + const mockName = getMockName(absoluteFilePath); + const duplicates = this.#raw.duplicates.get(mockName); + if (duplicates != null) { + const posixRelativePath = normalizePathSeparatorsToPosix(canonicalPath); + duplicates.delete(posixRelativePath); + if (duplicates.size === 1) { + this.#raw.duplicates.delete(mockName); + } + // Set the mock to a remaining duplicate. Should never be empty. + // Size was checked as 1 above, so this is always defined + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const remaining = duplicates.values().next().value!; + this.#raw.mocks.set(mockName, remaining); + } else { + this.#raw.mocks.delete(mockName); + } + } + + getSerializableSnapshot(): RawMockMap { + return { + duplicates: new Map([...this.#raw.duplicates].map(([k, v]) => [k, new Set(v)])), + mocks: new Map(this.#raw.mocks), + version: this.#raw.version, + }; + } + + assertValid(): void { + if (!this.#throwOnModuleCollision) { + return; + } + // Throw an aggregate error for each duplicate. + const errors: string[] = []; + for (const [mockName, relativePosixPaths] of this.#raw.duplicates) { + errors.push(this.#getMessageForDuplicates(mockName, relativePosixPaths)); + } + if (errors.length > 0) { + throw new Error( + `Mock map has ${errors.length} error${errors.length > 1 ? 's' : ''}:\n${errors.join('\n')}` + ); + } + } + + #getMessageForDuplicates(mockName: string, relativePosixPaths: ReadonlySet): string { + return ( + 'Duplicate manual mock found for `' + + mockName + + '`:\n' + + [...relativePosixPaths] + .map( + (relativePosixPath) => + ' * ' + + path.sep + + this.#pathUtils.absoluteToNormal(normalizePathSeparatorsToSystem(relativePosixPath)) + + '\n' + ) + .join('') + ); + } + + getCacheKey(): string { + return this.#mocksPattern.source.replaceAll('\\\\', '\\/') + ',' + this.#mocksPattern.flags; + } + + getWorker(): FileMapPluginWorker | undefined | null { + return null; + } +} diff --git a/packages/@expo/metro-file-map/src/plugins/dependencies/__tests__/dependencyExtractor.test.ts b/packages/@expo/metro-file-map/src/plugins/dependencies/__tests__/dependencyExtractor.test.ts new file mode 100644 index 00000000000000..0d293f50abd051 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/dependencies/__tests__/dependencyExtractor.test.ts @@ -0,0 +1,238 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { extract } from '../dependencyExtractor'; + +describe('dependencyExtractor', () => { + test('should not extract dependencies inside comments', () => { + const code = ` + // import a from 'ignore-line-comment'; + // import 'ignore-line-comment'; + // import './ignore-line-comment'; + // require('ignore-line-comment'); + /* + * import a from 'ignore-block-comment'; + * import './ignore-block-comment'; + * import 'ignore-block-comment'; + * require('ignore-block-comment'); + */ + `; + expect(extract(code)).toEqual(new Set()); + }); + + test('should not extract dependencies inside comments (windows line endings)', () => { + const code = [ + '// const module1 = require("module1");', + '/**', + ' * const module2 = require("module2");', + ' */', + ].join('\r\n'); + + expect(extract(code)).toEqual(new Set([])); + }); + + test('should not extract dependencies inside comments (unicode line endings)', () => { + const code = [ + '// const module1 = require("module1");\u2028', + '// const module1 = require("module2");\u2029', + '/*\u2028', + 'const module2 = require("module3");\u2029', + ' */', + ].join(''); + + expect(extract(code)).toEqual(new Set([])); + }); + + test('should extract dependencies from `import` statements', () => { + const code = ` + // Good + import * as depNS from 'dep1'; + import { + a as aliased_a, + b, + } from 'dep2'; + import depDefault from 'dep3'; + import * as depNS, { + a as aliased_a, + b, + }, depDefault from 'dep4'; + + // Bad + foo . import ('inv1'); + foo . export ('inv2'); + `; + expect(extract(code)).toEqual(new Set(['dep1', 'dep2', 'dep3', 'dep4'])); + }); + + test('should extract dependencies from side-effect only `import` statements', () => { + const code = ` + // Good + import './side-effect-dep1'; + import 'side-effect-dep2'; + + // Bad + import ./inv1; + import inv2 + `; + expect(extract(code)).toEqual(new Set(['./side-effect-dep1', 'side-effect-dep2'])); + }); + + test('should not extract dependencies from `import type/typeof` statements', () => { + const code = ` + // Bad + import typeof {foo} from 'inv1'; + import type {foo} from 'inv2'; + `; + expect(extract(code)).toEqual(new Set([])); + }); + + test('should extract dependencies from `export` statements', () => { + const code = ` + // Good + export * as depNS from 'dep1'; + export { + a as aliased_a, + b, + } from 'dep2'; + export depDefault from 'dep3'; + export * as depNS, { + a as aliased_a, + b, + }, depDefault from 'dep4'; + + // Bad + foo . export ('inv1'); + foo . export ('inv2'); + `; + expect(extract(code)).toEqual(new Set(['dep1', 'dep2', 'dep3', 'dep4'])); + }); + + test('should not extract dependencies from `export type/typeof` statements', () => { + const code = ` + // Bad + export typeof {foo} from 'inv1'; + export type {foo} from 'inv2'; + `; + expect(extract(code)).toEqual(new Set([])); + }); + + test('should extract dependencies from dynamic `import` calls', () => { + const code = ` + // Good + import('dep1').then(); + const dep2 = await import( + "dep2", + ); + if (await import(\`dep3\`)) {} + + // Bad + await foo . import('inv1') + await ximport('inv2'); + importx('inv3'); + import('inv4', 'inv5'); + `; + expect(extract(code)).toEqual(new Set(['dep1', 'dep2', 'dep3'])); + }); + + test('should extract dependencies from `require` calls', () => { + const code = ` + // Good + require('dep1'); + const dep2 = require( + "dep2", + ); + if (require(\`dep3\`).cond) {} + + // Bad + foo . require('inv1') + xrequire('inv2'); + requirex('inv3'); + require('inv4', 'inv5'); + `; + expect(extract(code)).toEqual(new Set(['dep1', 'dep2', 'dep3'])); + }); + + test('should extract dependencies from `jest.requireActual` calls', () => { + const code = ` + // Good + jest.requireActual('dep1'); + const dep2 = jest.requireActual( + "dep2", + ); + if (jest.requireActual(\`dep3\`).cond) {} + jest + .requireActual('dep4'); + + // Bad + foo . jest.requireActual('inv1') + xjest.requireActual('inv2'); + jest.requireActualx('inv3'); + jest.requireActual('inv4', 'inv5'); + `; + expect(extract(code)).toEqual(new Set(['dep1', 'dep2', 'dep3', 'dep4'])); + }); + + test('should extract dependencies from `jest.requireMock` calls', () => { + const code = ` + // Good + jest.requireMock('dep1'); + const dep2 = jest.requireMock( + "dep2", + ); + if (jest.requireMock(\`dep3\`).cond) {} + jest + .requireMock('dep4'); + + // Bad + foo . jest.requireMock('inv1') + xjest.requireMock('inv2'); + jest.requireMockx('inv3'); + jest.requireMock('inv4', 'inv5'); + `; + expect(extract(code)).toEqual(new Set(['dep1', 'dep2', 'dep3', 'dep4'])); + }); + + test('should extract dependencies from `jest.genMockFromModule` calls', () => { + const code = ` + // Good + jest.genMockFromModule('dep1'); + const dep2 = jest.genMockFromModule( + "dep2", + ); + if (jest.genMockFromModule(\`dep3\`).cond) {} + jest + .requireMock('dep4'); + + // Bad + foo . jest.genMockFromModule('inv1') + xjest.genMockFromModule('inv2'); + jest.genMockFromModulex('inv3'); + jest.genMockFromModule('inv4', 'inv5'); + `; + expect(extract(code)).toEqual(new Set(['dep1', 'dep2', 'dep3', 'dep4'])); + }); + + test('should extract dependencies from `jest.createMockFromModule` calls', () => { + const code = ` + // Good + jest.createMockFromModule('dep1'); + const dep2 = jest.createMockFromModule( + "dep2", + ); + if (jest.createMockFromModule(\`dep3\`).cond) {} + jest + .requireMock('dep4'); + + // Bad + foo . jest.createMockFromModule('inv1') + xjest.createMockFromModule('inv2'); + jest.createMockFromModulex('inv3'); + jest.createMockFromModule('inv4', 'inv5'); + `; + expect(extract(code)).toEqual(new Set(['dep1', 'dep2', 'dep3', 'dep4'])); + }); +}); diff --git a/packages/@expo/metro-file-map/src/plugins/dependencies/dependencyExtractor.ts b/packages/@expo/metro-file-map/src/plugins/dependencies/dependencyExtractor.ts new file mode 100644 index 00000000000000..35af8f10d4073f --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/dependencies/dependencyExtractor.ts @@ -0,0 +1,86 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +const NOT_A_DOT = '(? `([\`'"])([^'"\`]*?)(?:\\${pos})`; +const WORD_SEPARATOR = '\\b'; +const LEFT_PARENTHESIS = '\\('; +const RIGHT_PARENTHESIS = '\\)'; +const WHITESPACE = '\\s*'; +const OPTIONAL_COMMA = '(:?,\\s*)?'; + +function createRegExp(parts: readonly string[], flags: string) { + return new RegExp(parts.join(''), flags); +} + +function alternatives(...parts: readonly string[]) { + return `(?:${parts.join('|')})`; +} + +function functionCallStart(...names: readonly string[]) { + return [ + NOT_A_DOT, + WORD_SEPARATOR, + alternatives(...names), + WHITESPACE, + LEFT_PARENTHESIS, + WHITESPACE, + ]; +} + +const BLOCK_COMMENT_RE = /\/\*[^]*?\*\//g; +const LINE_COMMENT_RE = /\/\/.*/g; + +const REQUIRE_OR_DYNAMIC_IMPORT_RE = createRegExp( + [ + ...functionCallStart('require', 'import'), + CAPTURE_STRING_LITERAL(1), + WHITESPACE, + OPTIONAL_COMMA, + RIGHT_PARENTHESIS, + ], + 'g' +); + +const IMPORT_OR_EXPORT_RE = createRegExp( + [ + '\\b(?:import|export)\\s+(?!type(?:of)?\\s+)(?:[^\'"]+\\s+from\\s+)?', + CAPTURE_STRING_LITERAL(1), + ], + 'g' +); + +const JEST_EXTENSIONS_RE = createRegExp( + [ + ...functionCallStart( + 'jest\\s*\\.\\s*(?:requireActual|requireMock|genMockFromModule|createMockFromModule)' + ), + CAPTURE_STRING_LITERAL(1), + WHITESPACE, + OPTIONAL_COMMA, + RIGHT_PARENTHESIS, + ], + 'g' +); + +export function extract(code: string): Set { + const dependencies: Set = new Set(); + + const addDependency = (match: string, _: string, dep: string) => { + dependencies.add(dep); + return match; + }; + + code + .replace(BLOCK_COMMENT_RE, '') + .replace(LINE_COMMENT_RE, '') + .replace(IMPORT_OR_EXPORT_RE, addDependency) + .replace(REQUIRE_OR_DYNAMIC_IMPORT_RE, addDependency) + .replace(JEST_EXTENSIONS_RE, addDependency); + + return dependencies; +} diff --git a/packages/@expo/metro-file-map/src/plugins/dependencies/worker.ts b/packages/@expo/metro-file-map/src/plugins/dependencies/worker.ts new file mode 100644 index 00000000000000..553fcf5aa9182a --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/dependencies/worker.ts @@ -0,0 +1,42 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ + +'use strict'; + +import { extract } from './dependencyExtractor'; +import type { + DependencyExtractor, + MetadataWorker, + V8Serializable, + WorkerMessage, +} from '../../types'; + +export default class DependencyExtractorWorker implements MetadataWorker { + readonly #dependencyExtractor: DependencyExtractor | undefined | null; + + constructor({ dependencyExtractor }: Readonly<{ dependencyExtractor: string | null }>) { + if (dependencyExtractor != null) { + const mod = require(dependencyExtractor); + this.#dependencyExtractor = mod.__esModule === true && 'default' in mod ? mod.default : mod; + } + } + + processFile(data: WorkerMessage, utils: Readonly<{ getContent: () => Buffer }>): V8Serializable { + const content = utils.getContent().toString(); + const { filePath } = data; + + const dependencies = + this.#dependencyExtractor != null + ? this.#dependencyExtractor.extract(content, filePath, extract) + : extract(content); + + // Return as array (PerFileData type) + return Array.from(dependencies); + } +} diff --git a/packages/@expo/metro-file-map/src/plugins/haste/DuplicateHasteCandidatesError.ts b/packages/@expo/metro-file-map/src/plugins/haste/DuplicateHasteCandidatesError.ts new file mode 100644 index 00000000000000..49c37906d75ebc --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/haste/DuplicateHasteCandidatesError.ts @@ -0,0 +1,60 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import H from '../../constants'; +import type { DuplicatesSet } from '../../types'; + +export class DuplicateHasteCandidatesError extends Error { + hasteName: string; + platform: string | null; + supportsNativePlatform: boolean; + duplicatesSet: DuplicatesSet; + + constructor( + name: string, + platform: string, + supportsNativePlatform: boolean, + duplicatesSet: DuplicatesSet + ) { + const platformMessage = getPlatformMessage(platform); + super( + `The name \`${name}\` was looked up in the Haste module map. It ` + + 'cannot be resolved, because there exists several different ' + + 'files, or packages, that provide a module for ' + + `that particular name and platform. ${platformMessage} You must ` + + 'delete or exclude files until there remains only one of these:\n\n' + + Array.from(duplicatesSet) + .map( + ([dupFilePath, dupFileType]) => + ` * \`${dupFilePath}\` (${getTypeMessage(dupFileType)})\n` + ) + .sort() + .join('') + ); + this.hasteName = name; + this.platform = platform; + this.supportsNativePlatform = supportsNativePlatform; + this.duplicatesSet = duplicatesSet; + } +} + +function getPlatformMessage(platform: string) { + if (platform === H.GENERIC_PLATFORM) { + return 'The platform is generic (no extension).'; + } + return `The platform extension is \`${platform}\`.`; +} + +function getTypeMessage(type: number) { + switch (type) { + case H.MODULE: + return 'module'; + case H.PACKAGE: + return 'package'; + } + return 'unknown'; +} diff --git a/packages/@expo/metro-file-map/src/plugins/haste/HasteConflictsError.ts b/packages/@expo/metro-file-map/src/plugins/haste/HasteConflictsError.ts new file mode 100644 index 00000000000000..9cb0d8ea735bc4 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/haste/HasteConflictsError.ts @@ -0,0 +1,58 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +import type { HasteConflict } from '../../types'; + +export class HasteConflictsError extends Error { + #conflicts: readonly HasteConflict[]; + + constructor(conflicts: readonly HasteConflict[]) { + super( + `Found ${conflicts.length} Haste conflict(s). Haste module IDs must be globally unique in the codebase.` + ); + this.#conflicts = conflicts; + } + + getDetailedMessage(pathsRelativeToRoot: string | null): string { + const messages: string[] = []; + const conflicts = this.#conflicts; + if (conflicts.some((conflict) => conflict.type === 'duplicate')) { + messages.push( + 'Advice: Resolve conflicts of type "duplicate" by renaming one or both of the conflicting modules, or by excluding conflicting paths from Haste.' + ); + } + if (conflicts.some((conflict) => conflict.type === 'shadowing')) { + messages.push( + 'Advice: Resolve conflicts of type "shadowing" by moving the modules to the same folder, or by excluding conflicting paths from Haste.' + ); + } + let index = 0; + for (const conflict of conflicts) { + const itemHeader = index + 1 + '. '; + const indent = ' '.repeat(itemHeader.length + 2); + messages.push( + '\n' + + itemHeader + + conflict.id + + (conflict.platform != null ? `.${conflict.platform}` : '') + + ` (${conflict.type})` + ); + for (const modulePath of conflict.absolutePaths) { + messages.push( + indent + + (pathsRelativeToRoot != null + ? path.relative(pathsRelativeToRoot, modulePath) + : modulePath) + ); + } + ++index; + } + return messages.join('\n'); + } +} diff --git a/packages/@expo/metro-file-map/src/plugins/haste/__tests__/getPlatformExtension.test.ts b/packages/@expo/metro-file-map/src/plugins/haste/__tests__/getPlatformExtension.test.ts new file mode 100644 index 00000000000000..4e77ec4064548a --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/haste/__tests__/getPlatformExtension.test.ts @@ -0,0 +1,22 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import getPlatformExtension from '../getPlatformExtension'; + +const PLATFORMS = new Set(['ios', 'android']); + +describe('getPlatformExtension', () => { + test('should get platform ext', () => { + expect(getPlatformExtension('a.ios.js', PLATFORMS)).toBe('ios'); + expect(getPlatformExtension('a.android.js', PLATFORMS)).toBe('android'); + expect(getPlatformExtension('c.android/a.ios.js', PLATFORMS)).toBe('ios'); + expect(getPlatformExtension('/b/c/a.ios.js', PLATFORMS)).toBe('ios'); + expect(getPlatformExtension('/b/c/a@1.5x.ios.png', PLATFORMS)).toBe('ios'); + expect(getPlatformExtension('/b/c/a@1.5x.lol.png', PLATFORMS)).toBe(null); + expect(getPlatformExtension('/b/c/a.lol.png', PLATFORMS)).toBe(null); + }); +}); diff --git a/packages/@expo/metro-file-map/src/plugins/haste/computeConflicts.ts b/packages/@expo/metro-file-map/src/plugins/haste/computeConflicts.ts new file mode 100644 index 00000000000000..a355b1ba567bf0 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/haste/computeConflicts.ts @@ -0,0 +1,93 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +import H from '../../constants'; +import { chainComparators, compareStrings } from '../../lib/sorting'; +import type { HasteMapItem } from '../../types'; + +interface Conflict { + id: string; + platform: string | null; + absolutePaths: string[]; + type: 'duplicate' | 'shadowing'; +} + +export function computeHasteConflicts(options: { + readonly duplicates: ReadonlyMap>>; + readonly map: ReadonlyMap; + readonly rootDir: string; +}): Conflict[] { + const { duplicates, map, rootDir } = options; + const conflicts: Conflict[] = []; + + // Add duplicates reported by metro-file-map + for (const [id, dupsByPlatform] of duplicates.entries()) { + for (const [platform, conflictingModules] of dupsByPlatform) { + conflicts.push({ + id, + platform: platform === H.GENERIC_PLATFORM ? null : platform, + absolutePaths: [...conflictingModules.keys()] + .map((modulePath) => path.resolve(rootDir, modulePath)) + // Sort for ease of testing + .sort(), + type: 'duplicate', + }); + } + } + + // Add cases of "shadowing at a distance": a module with a platform suffix and + // a module with a lower priority platform suffix (or no suffix), in different + // directories. + for (const [id, data] of map) { + const conflictPaths = new Set(); + const basePaths: string[] = []; + for (const basePlatform of [H.NATIVE_PLATFORM, H.GENERIC_PLATFORM]) { + if (data[basePlatform] == null) { + continue; + } + const basePath = data[basePlatform]![0]; + basePaths.push(basePath); + const basePathDir = path.dirname(basePath); + // Find all platforms that can shadow basePlatform + // Given that X.(specific platform).js > x.native.js > X.js + // and basePlatform is either 'native' or generic (no platform). + for (const platform of Object.keys(data)) { + if (platform === basePlatform || platform === H.GENERIC_PLATFORM /* lowest priority */) { + continue; + } + const platformPath = data[platform]![0]; + if (path.dirname(platformPath) !== basePathDir) { + conflictPaths.add(platformPath); + } + } + } + if (conflictPaths.size) { + conflicts.push({ + id, + platform: null, + absolutePaths: [...new Set([...conflictPaths, ...basePaths])] + .map((modulePath) => path.resolve(rootDir, modulePath)) + // Sort for ease of testing + .sort(), + type: 'shadowing', + }); + } + } + + // Sort for ease of testing + conflicts.sort( + chainComparators( + (a, b) => compareStrings(a.type, b.type), + (a, b) => compareStrings(a.id, b.id), + (a, b) => compareStrings(a.platform, b.platform) + ) + ); + + return conflicts; +} diff --git a/packages/@expo/metro-file-map/src/plugins/haste/getPlatformExtension.ts b/packages/@expo/metro-file-map/src/plugins/haste/getPlatformExtension.ts new file mode 100644 index 00000000000000..62bac2d7df4904 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/haste/getPlatformExtension.ts @@ -0,0 +1,20 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// Extract platform extension: index.ios.js -> ios +export default function getPlatformExtension( + file: string, + platforms: ReadonlySet +): string | null { + const last = file.lastIndexOf('.'); + const secondToLast = file.lastIndexOf('.', last - 1); + if (secondToLast === -1) { + return null; + } + const platform = file.substring(secondToLast + 1, last); + return platforms.has(platform) ? platform : null; +} diff --git a/packages/@expo/metro-file-map/src/plugins/haste/worker.ts b/packages/@expo/metro-file-map/src/plugins/haste/worker.ts new file mode 100644 index 00000000000000..b5a3f1a8495080 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/haste/worker.ts @@ -0,0 +1,48 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +import type { MetadataWorker, V8Serializable, WorkerMessage } from '../../types'; +import excludedExtensions from '../../workerExclusionList'; + +const PACKAGE_JSON: string = path.sep + 'package.json'; + +export default class Worker implements MetadataWorker { + #hasteImpl: { readonly getHasteName: (filePath: string) => string | null | undefined } | null = + null; + + constructor({ hasteImplModulePath }: Readonly<{ hasteImplModulePath: string | null }>) { + if (hasteImplModulePath != null) { + const mod = require(hasteImplModulePath); + this.#hasteImpl = mod.__esModule === true && 'default' in mod ? mod.default : mod; + } + } + + processFile(data: WorkerMessage, utils: Readonly<{ getContent: () => Buffer }>): V8Serializable { + let hasteName: string | null = null; + const { filePath } = data; + if (filePath.endsWith(PACKAGE_JSON)) { + // Process a package.json that is returned as a PACKAGE type with its name. + try { + const fileData = JSON.parse(utils.getContent().toString()); + if (fileData.name) { + hasteName = fileData.name; + } + } catch (err: any) { + throw new Error(`Cannot parse ${filePath} as JSON: ${err.message}`); + } + } else if (!excludedExtensions.has(filePath.substr(filePath.lastIndexOf('.')))) { + if (!this.#hasteImpl) { + throw new Error('computeHaste is true but hasteImplModulePath not set'); + } + // Process a random file that is returned as a MODULE. + hasteName = this.#hasteImpl.getHasteName(filePath) || null; + } + return hasteName; + } +} diff --git a/packages/@expo/metro-file-map/src/plugins/mocks/__tests__/getMockName.test.ts b/packages/@expo/metro-file-map/src/plugins/mocks/__tests__/getMockName.test.ts new file mode 100644 index 00000000000000..94affd66a04306 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/mocks/__tests__/getMockName.test.ts @@ -0,0 +1,20 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +import getMockName from '../getMockName'; + +describe('getMockName', () => { + test('extracts mock name from file path', () => { + expect(getMockName(path.join('a', '__mocks__', 'c.js'))).toBe('c'); + + expect(getMockName(path.join('a', '__mocks__', 'c', 'd.js'))).toBe( + path.join('c', 'd').replace(/\\/g, '/') + ); + }); +}); diff --git a/packages/@expo/metro-file-map/src/plugins/mocks/getMockName.ts b/packages/@expo/metro-file-map/src/plugins/mocks/getMockName.ts new file mode 100644 index 00000000000000..58ba427da2a622 --- /dev/null +++ b/packages/@expo/metro-file-map/src/plugins/mocks/getMockName.ts @@ -0,0 +1,15 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import path from 'path'; + +const MOCKS_PATTERN = path.sep + '__mocks__' + path.sep; + +export default (filePath: string): string => { + const mockPath = filePath.split(MOCKS_PATTERN)[1]!; + return mockPath.substring(0, mockPath.lastIndexOf(path.extname(mockPath))).replaceAll('\\', '/'); +}; diff --git a/packages/@expo/metro-file-map/src/ts-declarations/fb-watchman.ts b/packages/@expo/metro-file-map/src/ts-declarations/fb-watchman.ts new file mode 100644 index 00000000000000..d39da2e1913c8f --- /dev/null +++ b/packages/@expo/metro-file-map/src/ts-declarations/fb-watchman.ts @@ -0,0 +1,22 @@ +import * as __fbWatchman from 'fb-watchman'; + +declare module 'fb-watchman' { + /** Information about a changed file */ + export interface FileChange { + // See: https://github.com/facebook/metro/blob/6a63a34/flow-typed/fb-watchman.js#L77-L97 + dev?: number; + cclock?: string; + gid?: number; + ino?: number; + mode?: number; + mtime?: number; + mtime_us?: number; + mtime_ns?: number; + mtime_f?: number; + new?: boolean; + nlink?: number; + uid?: number; + 'content.sha1hex'?: string; + symlink_target?: string; + } +} diff --git a/packages/@expo/metro-file-map/src/types.ts b/packages/@expo/metro-file-map/src/types.ts new file mode 100644 index 00000000000000..2ef10914c13dd2 --- /dev/null +++ b/packages/@expo/metro-file-map/src/types.ts @@ -0,0 +1,539 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { PerfLogger, RootPerfLogger } from '@expo/metro/metro-config'; + +import type { HType, HTypeValue } from './constants'; + +export type { HType, HTypeValue }; + +export type { PerfLoggerFactory, PerfLogger } from '@expo/metro/metro-config'; + +// These inputs affect the internal data collected for a given filesystem +// state, and changes may invalidate a cache. +export interface BuildParameters { + readonly computeSha1: boolean; + readonly enableSymlinks: boolean; + readonly extensions: readonly string[]; + readonly forceNodeFilesystemAPI: boolean; + readonly ignorePattern: RegExp; + readonly plugins: readonly InputFileMapPlugin[]; + readonly retainAllFiles: boolean; + readonly rootDir: string; + readonly roots: readonly string[]; + readonly cacheBreaker: string; +} + +export interface BuildResult { + fileSystem: FileSystem; +} + +export interface CacheData { + readonly clocks: WatchmanClocks; + readonly fileSystemData: unknown; + readonly plugins: ReadonlyMap; +} + +export interface CacheManager { + /** + * Called during startup to load initial state, if available. Provided to + * a crawler, which will return the delta between the initial state and the + * current file system state. + */ + read(): Promise; + + /** + * Called when metro-file-map `build()` has applied changes returned by the + * crawler - i.e. internal state reflects the current file system state. + * + * getSnapshot may be retained and called at any time before end(), such as + * in response to eventSource 'change' events. + */ + write(getSnapshot: () => CacheData, opts: CacheManagerWriteOptions): Promise; + + /** + * The last call that will be made to this CacheManager. Any handles should + * be closed by the time this settles. + */ + end(): Promise; +} + +export interface CacheManagerEventSource { + onChange(listener: () => void): () => void /* unsubscribe */; +} + +export type CacheManagerFactory = (options: CacheManagerFactoryOptions) => CacheManager; + +export interface CacheManagerFactoryOptions { + readonly buildParameters: BuildParameters; +} + +export interface CacheManagerWriteOptions { + readonly changedSinceCacheRead: boolean; + readonly eventSource: CacheManagerEventSource; + readonly onWriteError: (error: Error) => void; +} + +// A path that is +// - Relative to the contextual `rootDir` +// - Normalised (no extraneous '.' or '..') +// - Real (no symlinks in path, though the path itself may be a symlink) +export type CanonicalPath = string; + +export interface ChangedFileMetadata { + readonly isSymlink: boolean; + readonly modifiedTime?: number | undefined | null; +} + +export interface ChangeEvent { + readonly logger: RootPerfLogger | undefined | null; + readonly changes: ReadonlyFileSystemChanges; + readonly rootDir: string; +} + +export interface ChangeEventMetadata { + modifiedTime: number | undefined | null; // Epoch ms + size: number | undefined | null; // Bytes + type: 'f' | 'd' | 'l'; // Regular file / Directory / Symlink +} + +export type Console = typeof globalThis.console; + +interface CrawlerPreviousState { + readonly clocks: ReadonlyMap; + readonly fileSystem: FileSystem; +} + +export interface CrawlerOptions { + abortSignal: AbortSignal | undefined | null; + computeSha1: boolean; + console: Console; + extensions: readonly string[]; + forceNodeFilesystemAPI: boolean; + ignore: IgnoreMatcher; + includeSymlinks: boolean; + perfLogger?: PerfLogger | null | undefined; + previousState: CrawlerPreviousState; + rootDir: string; + roots: readonly string[]; + onStatus: (status: WatcherStatus) => void; + // Only consider files under this normalized subdirectory when computing + // removedFiles. If not provided, all files in the file system are considered. + subpath?: string; +} + +export type CrawlResult = + | { + changedFiles: FileData; + removedFiles: Set; + clocks: WatchmanClocks; + } + | { + changedFiles: FileData; + removedFiles: Set; + }; + +export type DependencyExtractor = { + extract: ( + content: string, + absoluteFilePath: string, + defaultExtractor?: DependencyExtractor['extract'] + ) => Set; + getCacheKey: () => string; +}; + +export type WatcherStatus = + | { + type: 'watchman_slow_command'; + timeElapsed: number; + command: 'watch-project' | 'query'; + } + | { + type: 'watchman_slow_command_complete'; + timeElapsed: number; + command: 'watch-project' | 'query'; + } + | { + type: 'watchman_warning'; + warning: unknown; + command: 'watch-project' | 'query'; + }; + +export type DuplicatesSet = Map; +export type DuplicatesIndex = Map>; + +interface FileMapPluginInitOptionsFiles { + fileIterator( + opts: Readonly<{ + includeNodeModules: boolean; + includeSymlinks: boolean; + }> + ): Iterable<{ + baseName: string; + canonicalPath: string; + readonly pluginData: PerFileData | null | undefined; + }>; + lookup( + mixedPath: string + ): + | { exists: false } + | { exists: true; type: 'f'; readonly pluginData: PerFileData } + | { exists: true; type: 'd' }; +} + +export interface FileMapPluginInitOptions { + readonly files: FileMapPluginInitOptionsFiles; + readonly pluginState: SerializableState | undefined | null; +} + +interface FileMapPluginWorkerOptions { + readonly modulePath: string; + readonly setupArgs: JsonData; +} + +export interface FileMapPluginWorker { + readonly worker: FileMapPluginWorkerOptions; + readonly filter: (input: { normalPath: string; isNodeModules: boolean }) => boolean; +} + +export type V8Serializable = + | string + | number + | boolean + | null + | readonly V8Serializable[] + | ReadonlySet + | ReadonlyMap + | Readonly<{ [key: string]: V8Serializable }>; + +export interface FileMapPlugin< + SerializableState extends undefined | V8Serializable = undefined | V8Serializable, + PerFileData extends undefined | V8Serializable = undefined | V8Serializable, +> { + readonly name: string; + initialize(initOptions: FileMapPluginInitOptions): Promise; + assertValid(): void; + onChanged(changes: ReadonlyFileSystemChanges): void; + getSerializableSnapshot(): void | V8Serializable; + getCacheKey(): string; + getWorker(): FileMapPluginWorker | undefined | null; +} + +export type InputFileMapPlugin = FileMapPlugin; + +export interface MetadataWorkerParams { + getContent(): Buffer; +} + +export interface MetadataWorker { + processFile(message: WorkerMessage, params: MetadataWorkerParams): V8Serializable; +} + +export type IgnoreMatcher = (item: string) => boolean; + +export type FileData = Map; + +export type FileMetadata = [ + mtime: number | null, + size: number, + visited: 0 | 1, + sha1: string | null, + symlink: 0 | 1 | string, // string specifies target, if known + /* plugindata */ + ...any[], +]; + +export interface FileStats { + readonly fileType: 'f' | 'l'; + readonly modifiedTime: number | undefined | null; + readonly size: number | undefined | null; +} + +export interface FileSystem { + exists(file: Path): boolean; + getAllFiles(): Path[]; + + /** + * Given a map of files, determine which of them are new or modified + * (changedFiles), and which of them are missing from the input + * (removedFiles), vs the current state of this instance of FileSystem. + */ + getDifference( + files: FileData, + options?: Readonly<{ + /** + * Only consider files under this subpath (which should be a directory) + * when computing removedFiles. If not provided, all files in the file + * system are considered. + */ + subpath?: string; + }> + ): { + changedFiles: FileData; + removedFiles: Set; + }; + + getSerializableSnapshot(): CacheData['fileSystemData']; + getSha1(file: Path): string | undefined | null; + getOrComputeSha1(file: Path): Promise<{ sha1: string; content?: Buffer } | undefined | null>; + + /** + * Given a start path (which need not exist), a subpath and type, and + * optionally a 'breakOnSegment', performs the following: + * + * X = mixedStartPath + * do + * if basename(X) === opts.breakOnSegment + * return null + * if X + subpath exists and has type opts.subpathType + * return { + * absolutePath: realpath(X + subpath) + * containerRelativePath: relative(mixedStartPath, X) + * } + * X = dirname(X) + * while X !== dirname(X) + * + * If opts.invalidatedBy is given, collects all absolute, real paths that if + * added or removed may invalidate this result. + * + * Useful for finding the closest package scope (subpath: package.json, + * type f, breakOnSegment: node_modules) or closest potential package root + * (subpath: node_modules/pkg, type: d) in Node.js resolution. + */ + hierarchicalLookup( + mixedStartPath: string, + subpath: string, + opts: { + breakOnSegment: string | undefined | null; + invalidatedBy: Set | undefined | null; + subpathType: 'f' | 'd'; + } + ): + | { + absolutePath: string; + containerRelativePath: string; + } + | undefined + | null; + + /** + * Analogous to posix lstat. If the file at `file` is a symlink, return + * information about the symlink without following it. + */ + linkStats(file: Path): FileStats | undefined | null; + + /** + * Return information about the given path, whether a directory or file. + * Always follow symlinks, and return a real path if it exists. + */ + lookup(mixedPath: Path): LookupResult; + + matchFiles(opts: { + /* Filter relative paths against a pattern. */ + filter?: RegExp | null | undefined; + /* `filter` is applied against absolute paths, vs rootDir-relative. (default: false) */ + filterCompareAbsolute?: boolean | undefined; + /* `filter` is applied against posix-delimited paths, even on Windows. (default: false) */ + filterComparePosix?: boolean | undefined; + /* Follow symlinks when enumerating paths. (default: false) */ + follow?: boolean | undefined; + /* Should search for files recursively. (default: true) */ + recursive?: boolean | undefined; + /* Match files under a given root, or null for all files */ + rootDir?: Path | null | undefined; + }): Iterable; +} + +export type Glob = string; + +export type JsonData = string | number | boolean | null | JsonData[] | { [key: string]: JsonData }; + +export type LookupResult = + | { + // The node is missing from the FileSystem implementation (note this + // could indicate an unwatched path, or a directory containing no watched + // files). + exists: false; + // The real, normal, absolute paths of any symlinks traversed. + links: ReadonlySet; + // The real, normal, absolute path of the first path segment + // encountered that does not exist, or cannot be navigated through. + missing: string; + } + | { + exists: true; + // The real, normal, absolute paths of any symlinks traversed. + links: ReadonlySet; + // The real, normal, absolute path of the directory. + realPath: string; + // Currently lookup always follows symlinks, so can only return + // directories or regular files, but this may be extended. + type: 'd'; + } + | { + exists: true; + // The real, normal, absolute paths of any symlinks traversed. + links: ReadonlySet; + // The real, normal, absolute path of the file. + realPath: string; + // Currently lookup always follows symlinks, so can only return + // directories or regular files, but this may be extended. + type: 'f'; + // The file's metadata tuple. Must only be mutated via FileProcessor. + metadata: FileMetadata; + }; + +export interface MockMap { + getMockModule(name: string): Path | undefined | null; +} + +export interface HasteConflict { + id: string; + platform: string | null; + absolutePaths: string[]; + type: 'duplicate' | 'shadowing'; +} + +export interface HasteMap { + getModule( + name: string, + platform?: string | undefined | null, + supportsNativePlatform?: boolean | undefined | null, + type?: HTypeValue | undefined | null + ): Path | undefined | null; + + getModuleNameByPath(file: Path): string | undefined | null; + + getPackage( + name: string, + platform: string | undefined | null, + _supportsNativePlatform: boolean | undefined | null + ): Path | undefined | null; + + computeConflicts(): HasteConflict[]; +} + +export type HasteMapData = Map; + +// In the Flow source, this type includes `__proto__: null` to indicate that +// instances are created via `Object.create(null)` and have no prototype. +// TypeScript has no equivalent syntax, but this contract is maintained at +// runtime in HastePlugin.ts via `Object.create(null)`. +export type HasteMapItem = { + [platform: string]: HasteMapItemMetadata; +}; + +export type HasteMapItemMetadata = [/* path */ string, /* type */ number]; + +export interface FileSystemListener { + directoryAdded(canonicalPath: CanonicalPath): void; + directoryRemoved(canonicalPath: CanonicalPath): void; + + fileAdded(canonicalPath: CanonicalPath, data: FileMetadata): void; + fileModified(canonicalPath: CanonicalPath, oldData: FileMetadata, newData: FileMetadata): void; + fileRemoved(canonicalPath: CanonicalPath, data: FileMetadata): void; +} + +export interface ReadonlyFileSystemChanges { + readonly addedDirectories: Iterable; + readonly removedDirectories: Iterable; + + readonly addedFiles: Iterable>; + readonly modifiedFiles: Iterable>; + readonly removedFiles: Iterable>; +} + +export interface MutableFileSystem extends FileSystem { + remove(filePath: Path, listener?: FileSystemListener | undefined): void; + addOrModify( + filePath: Path, + fileMetadata: FileMetadata, + listener?: FileSystemListener | undefined + ): void; + bulkAddOrModify(addedOrModifiedFiles: FileData, listener?: FileSystemListener | undefined): void; +} + +export type Path = string; + +export type ProcessFileFunction = ( + normalPath: string, + metadata: FileMetadata, + request: Readonly<{ computeSha1: boolean }> +) => Buffer | undefined | null; + +export type RawMockMap = { + /** posix-separated mock name to posix-separated project-relative paths */ + readonly duplicates: Map>; + /** posix-separated mock name to posix-separated project-relative path */ + readonly mocks: Map; + readonly version: number; +}; + +export interface ReadOnlyRawMockMap { + readonly duplicates: ReadonlyMap>; + readonly mocks: ReadonlyMap; + readonly version: number; +} + +export interface WatcherBackend { + getPauseReason(): string | undefined | null; + onError(listener: (error: Error) => void): () => void; + onFileEvent(listener: (event: WatcherBackendChangeEvent) => void): () => void; + startWatching(): Promise; + stopWatching(): Promise; +} + +export type ChangeEventClock = [absoluteWatchRoot: string, opaqueClock: string]; + +export type WatcherBackendChangeEvent = + | { + readonly event: 'touch'; + readonly clock?: ChangeEventClock | undefined; + readonly relativePath: string; + readonly root: string; + readonly metadata: ChangeEventMetadata; + } + | { + readonly event: 'delete'; + readonly clock?: ChangeEventClock | undefined; + readonly relativePath: string; + readonly root: string; + readonly metadata?: undefined; + } + | { + readonly event: 'recrawl'; + readonly clock?: ChangeEventClock | undefined; + readonly relativePath: string; + readonly root: string; + }; + +export interface WatcherBackendOptions { + readonly ignored: RegExp | undefined | null; + readonly globs: readonly string[]; + readonly dot: boolean; +} + +export type WatchmanClockSpec = string | { readonly scm: { readonly 'mergebase-with': string } }; + +export type WatchmanClocks = Map; + +export interface WorkerMessage { + readonly computeSha1: boolean; + readonly filePath: string; + readonly maybeReturnContent: boolean; + readonly pluginsToRun: readonly number[]; +} + +export interface WorkerMetadata { + readonly sha1?: string | undefined | null; + readonly content?: Buffer | undefined | null; + readonly pluginData?: readonly V8Serializable[]; +} + +export interface WorkerSetupArgs { + readonly plugins?: readonly FileMapPluginWorker['worker'][]; +} diff --git a/packages/@expo/metro-file-map/src/watchers/AbstractWatcher.ts b/packages/@expo/metro-file-map/src/watchers/AbstractWatcher.ts new file mode 100644 index 00000000000000..3aff1e816a6537 --- /dev/null +++ b/packages/@expo/metro-file-map/src/watchers/AbstractWatcher.ts @@ -0,0 +1,80 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import EventEmitter from 'events'; +import * as path from 'path'; + +import type { WatcherBackend, WatcherBackendChangeEvent, WatcherBackendOptions } from '../types'; +import { posixPathMatchesPattern } from './common'; + +// Distributive Omit that works correctly with union types +type EachOmit = T extends any ? Omit : never; +export type WatcherBackendChangeEventWithoutRoot = EachOmit; + +export interface Listeners { + onFileEvent(event: WatcherBackendChangeEvent): void; + onError(error: Error): void; +} + +export class AbstractWatcher implements WatcherBackend { + readonly root: string; + readonly ignored: RegExp | undefined | null; + readonly globs: readonly string[]; + readonly dot: boolean; + readonly doIgnore: (path: string) => boolean; + + #emitter: EventEmitter = new EventEmitter(); + + constructor(dir: string, opts: WatcherBackendOptions) { + const { ignored, globs, dot } = opts; + this.dot = dot || false; + this.ignored = ignored; + this.globs = globs; + this.doIgnore = ignored + ? (filePath: string) => posixPathMatchesPattern(ignored, filePath) + : () => false; + + this.root = path.resolve(dir); + } + + onFileEvent(listener: (event: WatcherBackendChangeEvent) => void): () => void { + this.#emitter.on('fileevent', listener); + return () => { + this.#emitter.removeListener('fileevent', listener); + }; + } + + onError(listener: (error: Error) => void): () => void { + this.#emitter.on('error', listener); + return () => { + this.#emitter.removeListener('error', listener); + }; + } + + async startWatching(): Promise { + // Must be implemented by subclasses + } + + async stopWatching(): Promise { + this.#emitter.removeAllListeners(); + } + + emitFileEvent(event: WatcherBackendChangeEventWithoutRoot) { + this.#emitter.emit('fileevent', { + ...event, + root: this.root, + }); + } + + emitError(error: Error) { + this.#emitter.emit('error', error); + } + + getPauseReason(): string | undefined | null { + return null; + } +} diff --git a/packages/@expo/metro-file-map/src/watchers/FallbackWatcher.ts b/packages/@expo/metro-file-map/src/watchers/FallbackWatcher.ts new file mode 100644 index 00000000000000..7f3fd78fe121dc --- /dev/null +++ b/packages/@expo/metro-file-map/src/watchers/FallbackWatcher.ts @@ -0,0 +1,451 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + */ + +/** + * Originally vendored from https://github.com/amasad/sane/blob/64ff3a870c42e84f744086884bf55a4f9c22d376/src/node_watcher.js + */ + +import type { FSWatcher, Stats } from 'fs'; +import fs from 'fs'; +import os from 'os'; +import path from 'path'; + +import { AbstractWatcher, type WatcherBackendChangeEventWithoutRoot } from './AbstractWatcher'; +import * as common from './common'; +import type { ChangeEventMetadata } from '../types'; + +// NOTE(@kitten): No typings +const walker = require('walker'); + +const platform = os.platform(); + +const fsPromises = fs.promises; + +const TOUCH_EVENT = common.TOUCH_EVENT; +const DELETE_EVENT = common.DELETE_EVENT; + +/** + * This setting delays all events. It suppresses 'change' events that + * immediately follow an 'add', and debounces successive 'change' events to + * only emit the latest. + */ +const DEBOUNCE_MS = 100; + +export default class FallbackWatcher extends AbstractWatcher { + readonly #changeTimers: Map> = new Map(); + readonly #dirRegistry: { + [directory: string]: { [file: string]: true }; + } = Object.create(null); + readonly #watched: { [key: string]: FSWatcher } = Object.create(null); + + async startWatching(): Promise { + this.#watchdir(this.root); + + await new Promise((resolve) => { + recReaddir( + this.root, + (dir) => { + this.#watchdir(dir); + }, + (filename) => { + this.#register(filename, 'f'); + }, + (symlink) => { + this.#register(symlink, 'l'); + }, + () => { + resolve(); + }, + this.#checkedEmitError, + this.ignored + ); + }); + } + + /** + * Register files that matches our globs to know what to type of event to + * emit in the future. + * + * Registry looks like the following: + * + * dirRegister => Map { + * dirpath => Map { + * filename => true + * } + * } + * + * Return false if ignored or already registered. + */ + #register(filepath: string, type: ChangeEventMetadata['type']): boolean { + const dir = path.dirname(filepath); + const filename = path.basename(filepath); + if (this.#dirRegistry[dir] && this.#dirRegistry[dir][filename]) { + return false; + } + + const relativePath = path.relative(this.root, filepath); + if ( + this.doIgnore(relativePath) || + (type === 'f' && !common.includedByGlob('f', this.globs, this.dot, relativePath)) + ) { + return false; + } + + if (!this.#dirRegistry[dir]) { + this.#dirRegistry[dir] = Object.create(null); + } + + this.#dirRegistry[dir]![filename] = true; + + return true; + } + + /** + * Removes a file from the registry. + */ + #unregister(filepath: string) { + const dir = path.dirname(filepath); + if (this.#dirRegistry[dir]) { + const filename = path.basename(filepath); + delete this.#dirRegistry[dir][filename]; + } + } + + /** + * Removes a dir from the registry, returning all files that were registered + * under it (recursively). + */ + #unregisterDir(dirpath: string): string[] { + const removedFiles: string[] = []; + + // Find and remove all entries under this directory + for (const registeredDir of Object.keys(this.#dirRegistry)) { + if (registeredDir === dirpath || registeredDir.startsWith(dirpath + path.sep)) { + // Collect all files in this directory + for (const filename of Object.keys(this.#dirRegistry[registeredDir]!)) { + removedFiles.push(path.join(registeredDir, filename)); + } + delete this.#dirRegistry[registeredDir]; + } + } + + return removedFiles; + } + + /** + * Checks if a file or directory exists in the registry. + */ + #registered(fullpath: string): boolean { + const dir = path.dirname(fullpath); + return !!( + this.#dirRegistry[fullpath] || + (this.#dirRegistry[dir] && this.#dirRegistry[dir][path.basename(fullpath)]) + ); + } + + /** + * Emit "error" event if it's not an ignorable event + */ + #checkedEmitError: (error: Error) => void = (error) => { + if (!isIgnorableFileError(error)) { + this.emitError(error); + } + }; + + /** + * Watch a directory. + */ + #watchdir: (dir: string) => boolean = (dir: string) => { + if (this.#watched[dir]) { + return false; + } + const watcher = fs.watch(dir, { persistent: true }, (event, filename) => + this.#normalizeChange(dir, event, filename as string) + ); + this.#watched[dir] = watcher; + + watcher.on('error', this.#checkedEmitError); + + if (this.root !== dir) { + this.#register(dir, 'd'); + } + return true; + }; + + /** + * Stop watching a directory. + */ + async #stopWatching(dir: string): Promise { + const watcher = this.#watched[dir]; + if (watcher) { + await new Promise((resolve) => { + watcher.once('close', () => process.nextTick(resolve)); + watcher.close(); + delete this.#watched[dir]; + }); + } + } + + /** + * End watching. + */ + async stopWatching(): Promise { + await super.stopWatching(); + const promises = Object.keys(this.#watched).map((dir) => this.#stopWatching(dir)); + await Promise.all(promises); + } + + /** + * On some platforms, as pointed out on the fs docs (most likely just win32) + * the file argument might be missing from the fs event. Try to detect what + * change by detecting if something was deleted or the most recent file change. + */ + #detectChangedFile(dir: string, event: string, callback: (file: string) => void) { + if (!this.#dirRegistry[dir]) { + return; + } + + let found = false; + let closest: Readonly<{ file: string; mtime: Stats['mtime'] }> | null = null; + let c = 0; + Object.keys(this.#dirRegistry[dir]).forEach((file, i, arr) => { + fs.lstat(path.join(dir, file), (error, stat) => { + if (found) { + return; + } + + if (error) { + if (isIgnorableFileError(error)) { + found = true; + callback(file); + } else { + this.emitError(error); + } + } else { + if (closest == null || stat.mtime > closest.mtime) { + closest = { file, mtime: stat.mtime }; + } + if (arr.length === ++c) { + callback(closest.file); + } + } + }); + }); + } + + /** + * Normalize fs events and pass it on to be processed. + */ + #normalizeChange(dir: string, event: string, file: string) { + if (!file) { + this.#detectChangedFile(dir, event, (actualFile) => { + if (actualFile) { + this.#processChange(dir, event, actualFile).catch((error) => { + this.emitError(error); + }); + } + }); + } else { + this.#processChange(dir, event, path.normalize(file)).catch((error) => { + this.emitError(error); + }); + } + } + + /** + * Process changes. + */ + async #processChange(dir: string, event: string, file: string) { + const fullPath = path.join(dir, file); + const relativePath = path.join(path.relative(this.root, dir), file); + + const registered = this.#registered(fullPath); + + try { + const stat = await fsPromises.lstat(fullPath); + if (stat.isDirectory()) { + // win32 emits usless change events on dirs. + if (event === 'change') { + return; + } + + if ( + this.doIgnore(relativePath) || + !common.includedByGlob('d', this.globs, this.dot, relativePath) + ) { + return; + } + recReaddir( + path.resolve(this.root, relativePath), + (dir, stats) => { + if (this.#watchdir(dir)) { + this.#emitEvent({ + event: TOUCH_EVENT, + relativePath: path.relative(this.root, dir), + metadata: { + modifiedTime: stats.mtime.getTime(), + size: stats.size, + type: 'd', + }, + }); + } + }, + (file, stats) => { + if (this.#register(file, 'f')) { + this.#emitEvent({ + event: TOUCH_EVENT, + relativePath: path.relative(this.root, file), + metadata: { + modifiedTime: stats.mtime.getTime(), + size: stats.size, + type: 'f', + }, + }); + } + }, + (symlink, stats) => { + if (this.#register(symlink, 'l')) { + this.emitFileEvent({ + event: TOUCH_EVENT, + relativePath: path.relative(this.root, symlink), + metadata: { + modifiedTime: stats.mtime.getTime(), + size: stats.size, + type: 'l', + }, + }); + } + }, + function endCallback() {}, + this.#checkedEmitError, + this.ignored + ); + } else { + const type = common.typeFromStat(stat); + if (type == null) { + return; + } + const metadata: ChangeEventMetadata = { + modifiedTime: stat.mtime.getTime(), + size: stat.size, + type, + }; + if (registered) { + this.#emitEvent({ event: TOUCH_EVENT, relativePath, metadata }); + } else { + if (this.#register(fullPath, type)) { + this.#emitEvent({ event: TOUCH_EVENT, relativePath, metadata }); + } + } + } + } catch (error: any) { + if (!isIgnorableFileError(error)) { + this.emitError(error); + return; + } + this.#unregister(fullPath); + // When a directory is deleted, emit delete events for all files we + // knew about under that directory + const removedFiles = this.#unregisterDir(fullPath); + for (const removedFile of removedFiles) { + this.#emitEvent({ + event: DELETE_EVENT, + relativePath: path.relative(this.root, removedFile), + }); + } + if (registered) { + this.#emitEvent({ event: DELETE_EVENT, relativePath }); + } + await this.#stopWatching(fullPath); + } + } + + /** + * Emits the given event after debouncing, to emit only the latest + * information when we receive several events in quick succession. E.g., + * Linux emits two events for every new file. + * + * See also note above for DEBOUNCE_MS. + */ + #emitEvent(change: WatcherBackendChangeEventWithoutRoot) { + const { event, relativePath } = change; + const key = event + '-' + relativePath; + const existingTimer = this.#changeTimers.get(key); + if (existingTimer) { + clearTimeout(existingTimer); + } + this.#changeTimers.set( + key, + setTimeout(() => { + this.#changeTimers.delete(key); + this.emitFileEvent(change); + }, DEBOUNCE_MS) + ); + } + + getPauseReason(): string | undefined | null { + return null; + } +} + +/** + * Determine if a given FS error can be ignored + */ +function isIgnorableFileError(error: Error & { code?: string }) { + return ( + error.code === 'ENOENT' || + // Workaround Windows EPERM on watched folder deletion, and when + // reading locked files (pending further writes or pending deletion). + // In such cases, we'll receive a subsequent event when the file is + // deleted or ready to read. + // https://github.com/facebook/metro/issues/1001 + // https://github.com/nodejs/node-v0.x-archive/issues/4337 + (error.code === 'EPERM' && platform === 'win32') + ); +} + +/** + * Traverse a directory recursively calling `callback` on every directory. + */ +function recReaddir( + dir: string, + dirCallback: (dir: string, stats: Stats) => void, + fileCallback: (file: string, stats: Stats) => void, + symlinkCallback: (symlink: string, stats: Stats) => void, + endCallback: () => void, + errorCallback: (error: Error) => void, + ignored: RegExp | undefined | null +) { + const walk = walker(dir); + if (ignored) { + walk.filterDir((currentDir: string) => !common.posixPathMatchesPattern(ignored, currentDir)); + } + walk + .on('dir', normalizeProxy(dirCallback)) + .on('file', normalizeProxy(fileCallback)) + .on('symlink', normalizeProxy(symlinkCallback)) + .on('error', errorCallback) + .on('end', () => { + if (platform === 'win32') { + setTimeout(endCallback, 1000); + } else { + endCallback(); + } + }); +} + +/** + * Returns a callback that when called will normalize a path and call the + * original callback + */ +function normalizeProxy( + callback: (filepath: string, stats: Stats) => T +): (filepath: string, stats: Stats) => T { + return (filepath: string, stats: Stats) => callback(path.normalize(filepath), stats); +} diff --git a/packages/@expo/metro-file-map/src/watchers/NativeWatcher.ts b/packages/@expo/metro-file-map/src/watchers/NativeWatcher.ts new file mode 100644 index 00000000000000..af902492ddd02b --- /dev/null +++ b/packages/@expo/metro-file-map/src/watchers/NativeWatcher.ts @@ -0,0 +1,142 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { FSWatcher } from 'fs'; +import { promises as fsPromises, watch } from 'fs'; +import { platform } from 'os'; +import * as path from 'path'; + +import type { WatcherBackendOptions } from '../types'; +import { AbstractWatcher } from './AbstractWatcher'; +import { includedByGlob, typeFromStat } from './common'; + +const debug = require('debug')('Metro:NativeWatcher'); + +const TOUCH_EVENT = 'touch'; +const DELETE_EVENT = 'delete'; +const RECRAWL_EVENT = 'recrawl'; + +/** + * NativeWatcher uses Node's native fs.watch API with recursive: true. + * + * Supported on macOS (and potentially Windows), because both natively have a + * concept of recurisve watching, via FSEvents and ReadDirectoryChangesW + * respectively. Notably Linux lacks this capability at the OS level. + * + * Node.js has at times supported the `recursive` option to fs.watch on Linux + * by walking the directory tree and creating a watcher on each directory, but + * this fits poorly with the synchronous `watch` API - either it must block for + * arbitrarily large IO, or it may drop changes after `watch` returns. See: + * https://github.com/nodejs/node/issues/48437 + * + * Therefore, we retain a fallback to our own application-level recursive + * FallbackWatcher for Linux, which has async `startWatching`. + * + * On Windows, this watcher could be used in principle, but needs work around + * some Windows-specific edge cases handled in FallbackWatcher, like + * deduping file change events, ignoring directory changes, and handling EPERM. + */ +export default class NativeWatcher extends AbstractWatcher { + #fsWatcher: FSWatcher | undefined | null; + + static isSupported(): boolean { + return platform() === 'darwin'; + } + + // eslint-disable-next-line @typescript-eslint/no-useless-constructor + constructor(dir: string, opts: WatcherBackendOptions) { + // NOTE(@kitten): `!NativeWatcher.isSupported` was always truthy, so omitting check here + super(dir, opts); + } + + async startWatching(): Promise { + this.#fsWatcher = watch( + this.root, + { + // Don't hold the process open if we forget to close() + persistent: false, + // FSEvents or ReadDirectoryChangesW should mean this is cheap and + // ~instant on macOS or Windows. + recursive: true, + }, + (event, relativePath) => { + this._handleEvent(event, relativePath).catch((error) => { + this.emitError(error); + }); + } + ); + + debug('Watching %s', this.root); + } + + /** + * End watching. + */ + async stopWatching(): Promise { + await super.stopWatching(); + if (this.#fsWatcher) { + this.#fsWatcher.close(); + } + } + + async _handleEvent(event: string, relativePath: string | null) { + if (relativePath == null) { + return; + } + const absolutePath = path.resolve(this.root, relativePath); + if (this.doIgnore(relativePath)) { + debug('Ignoring event "%s" on %s (root: %s)', event, relativePath, this.root); + return; + } + debug('Handling event "%s" on %s (root: %s)', event, relativePath, this.root); + + try { + const stat = await fsPromises.lstat(absolutePath); + const type = typeFromStat(stat); + + // Ignore files of an unrecognized type + if (!type) { + return; + } + + if (!includedByGlob(type, this.globs, this.dot, relativePath)) { + return; + } + + // For directory "rename" events, notify that we need a recrawl since we + // wont' receive events for unmodified files underneath a moved (or + // cloned) directory. Renames are fired by the OS on moves, clones, and + // creations. We ignore "change" events because they indiciate a change + // to directory metadata, rather than its path or existence. + if (type === 'd' && event === 'rename') { + debug('Directory rename detected on %s, requesting recrawl', relativePath); + this.emitFileEvent({ + event: RECRAWL_EVENT, + relativePath, + }); + return; + } + + this.emitFileEvent({ + event: TOUCH_EVENT, + relativePath, + metadata: { + type, + modifiedTime: stat.mtime.getTime(), + size: stat.size, + }, + }); + } catch (error: any) { + if (error?.code !== 'ENOENT') { + this.emitError(error); + return; + } + + this.emitFileEvent({ event: DELETE_EVENT, relativePath }); + } + } +} diff --git a/packages/@expo/metro-file-map/src/watchers/RecrawlWarning.ts b/packages/@expo/metro-file-map/src/watchers/RecrawlWarning.ts new file mode 100644 index 00000000000000..31f1c724e7c635 --- /dev/null +++ b/packages/@expo/metro-file-map/src/watchers/RecrawlWarning.ts @@ -0,0 +1,62 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * Originally vendored from + * https://github.com/amasad/sane/blob/64ff3a870c42e84f744086884bf55a4f9c22d376/src/utils/recrawl-warning-dedupe.js + */ + +export default class RecrawlWarning { + static RECRAWL_WARNINGS: RecrawlWarning[] = []; + static REGEXP: RegExp = /Recrawled this watch (\d+) times?, most recently because:\n([^:]+)/; + + root: string; + count: number; + + constructor(root: string, count: number) { + this.root = root; + this.count = count; + } + + static findByRoot(root: string): RecrawlWarning | undefined { + for (let i = 0; i < this.RECRAWL_WARNINGS.length; i++) { + const warning = this.RECRAWL_WARNINGS[i]!; + if (warning.root === root) { + return warning; + } + } + + return undefined; + } + + static isRecrawlWarningDupe(warningMessage: unknown): boolean { + if (typeof warningMessage !== 'string') { + return false; + } + const match = warningMessage.match(this.REGEXP); + if (!match) { + return false; + } + const count = Number(match[1]); + const root = match[2]!; + + const warning = this.findByRoot(root); + + if (warning) { + // only keep the highest count, assume count to either stay the same or + // increase. + if (warning.count >= count) { + return true; + } else { + // update the existing warning to the latest (highest) count + warning.count = count; + return false; + } + } else { + this.RECRAWL_WARNINGS.push(new RecrawlWarning(root, count)); + return false; + } + } +} diff --git a/packages/@expo/metro-file-map/src/watchers/WatchmanWatcher.ts b/packages/@expo/metro-file-map/src/watchers/WatchmanWatcher.ts new file mode 100644 index 00000000000000..ebca8a9ed1bb46 --- /dev/null +++ b/packages/@expo/metro-file-map/src/watchers/WatchmanWatcher.ts @@ -0,0 +1,358 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import assert from 'assert'; +import { createHash } from 'crypto'; +import type { Client, FileChange, SubscribeResponse, WatchProjectResponse } from 'fb-watchman'; +import watchman from 'fb-watchman'; +import invariant from 'invariant'; + +import { AbstractWatcher } from './AbstractWatcher'; +import RecrawlWarning from './RecrawlWarning'; +import * as common from './common'; +import type { WatcherOptions } from './common'; +import normalizePathSeparatorsToSystem from '../lib/normalizePathSeparatorsToSystem'; + +// NOTE(@kitten): Local type aliases for Watchman types not exported by @types/fb-watchman +interface WatchmanClockResponse { + clock: string; + warning?: string; +} + +interface WatchmanFileChange extends FileChange { + new?: boolean; +} + +interface WatchmanQuery { + fields?: string[]; + expression?: any; + since?: string; + defer?: string[]; + relative_root?: string; +} + +interface WatchmanSubscribeResponse extends SubscribeResponse { + 'asserted-states'?: string[]; + warning?: string; +} + +interface WatchmanSubscriptionEvent { + subscription: string; + is_fresh_instance?: boolean; + files?: WatchmanFileChange[]; + 'state-enter'?: string; + 'state-leave'?: string; + clock?: string; +} + +const debug = require('debug')('Metro:WatchmanWatcher'); + +const DELETE_EVENT = common.DELETE_EVENT; +const TOUCH_EVENT = common.TOUCH_EVENT; +const SUB_PREFIX = 'metro-file-map'; + +/** + * Watches `dir`. + */ +export default class WatchmanWatcher extends AbstractWatcher { + #client: Client | undefined; + readonly subscriptionName: string; + #watchProjectInfo: + | Readonly<{ + relativePath: string; + root: string; + }> + | undefined + | null; + readonly #watchmanDeferStates: readonly string[]; + #deferringStates: Set | null = null; + + constructor(dir: string, opts: WatcherOptions) { + const { watchmanDeferStates, ...baseOpts } = opts; + super(dir, baseOpts); + + this.#watchmanDeferStates = watchmanDeferStates; + + // Use a unique subscription name per process per watched directory + const watchKey = createHash('md5').update(this.root).digest('hex'); + const readablePath = this.root + .replace(/[/\\]/g, '-') // \ and / to - + .replace(/[^\-\w]/g, ''); // Remove non-word/hyphen + this.subscriptionName = `${SUB_PREFIX}-${process.pid}-${readablePath}-${watchKey}`; + } + + async startWatching(): Promise { + await new Promise((resolve, reject) => this.#init(resolve, reject)); + } + + /** + * Run the watchman `watch` command on the root and subscribe to changes. + */ + #init(onReady: () => void, onError: (error: Error) => void) { + if (this.#client) { + this.#client.removeAllListeners(); + } + + const self = this; + this.#client = new watchman.Client(); + this.#client.on('error', (error: Error) => { + this.emitError(error); + }); + this.#client.on('subscription', (changeEvent: WatchmanSubscriptionEvent) => + this.#handleChangeEvent(changeEvent) + ); + this.#client.on('end', () => { + console.warn('[metro-file-map] Warning: Lost connection to Watchman, reconnecting..'); + self.#init( + () => {}, + (error) => self.emitError(error) + ); + }); + + this.#watchProjectInfo = null; + + function getWatchRoot() { + return self.#watchProjectInfo ? self.#watchProjectInfo.root : self.root; + } + + function onWatchProject(error: Error | null | undefined, resp: WatchProjectResponse) { + if (error) { + onError(error); + return; + } + debug('Received watch-project response: %s', resp.relative_path); + + handleWarning(resp); + + // NB: Watchman outputs posix-separated paths even on Windows, convert + // them to system-native separators. + self.#watchProjectInfo = { + relativePath: resp.relative_path ? normalizePathSeparatorsToSystem(resp.relative_path) : '', + root: normalizePathSeparatorsToSystem(resp.watch), + }; + + self.#client!.command(['clock', getWatchRoot()], onClock); + } + + function onClock(error: Error | null | undefined, resp: WatchmanClockResponse) { + if (error) { + onError(error); + return; + } + + debug('Received clock response: %s', resp.clock); + const watchProjectInfo = self.#watchProjectInfo; + + invariant( + watchProjectInfo != null, + 'watch-project response should have been set before clock response' + ); + + handleWarning(resp); + + const options: WatchmanQuery = { + fields: ['name', 'exists', 'new', 'type', 'size', 'mtime_ms'], + since: resp.clock, + defer: self.#watchmanDeferStates as string[], + relative_root: watchProjectInfo.relativePath, + }; + + // Make sure we honor the dot option if even we're not using globs. + if (self.globs.length === 0 && !self.dot) { + options.expression = [ + 'match', + '**', + 'wholename', + { + includedotfiles: false, + }, + ]; + } + + (self.#client!.command as Function)( + ['subscribe', getWatchRoot(), self.subscriptionName, options], + onSubscribe + ); + } + + const onSubscribe = (error: Error | null | undefined, resp: WatchmanSubscribeResponse) => { + if (error) { + onError(error); + return; + } + debug('Received subscribe response: %s', resp.subscribe); + + handleWarning(resp); + + if (resp['asserted-states'] != null) { + this.#deferringStates = new Set(resp['asserted-states']); + } + + onReady(); + }; + + self.#client!.command(['watch-project', getWatchRoot()], onWatchProject); + } + + /** + * Handles a change event coming from the subscription. + */ + #handleChangeEvent(resp: WatchmanSubscriptionEvent) { + debug( + 'Received subscription response: %s (fresh: %s, files: %s, enter: %s, leave: %s, clock: %s)', + resp.subscription, + resp.is_fresh_instance, + resp.files?.length, + resp['state-enter'], + resp['state-leave'], + resp.clock + ); + + assert.equal(resp.subscription, this.subscriptionName, 'Invalid subscription event.'); + + if (Array.isArray(resp.files)) { + resp.files.forEach((change: WatchmanFileChange) => + this.#handleFileChange(change, resp.clock) + ); + } + const { 'state-enter': stateEnter, 'state-leave': stateLeave } = resp; + if (stateEnter != null && (this.#watchmanDeferStates ?? []).includes(stateEnter)) { + this.#deferringStates?.add(stateEnter); + debug('Watchman reports "%s" just started. Filesystem notifications are paused.', stateEnter); + } + if (stateLeave != null && (this.#watchmanDeferStates ?? []).includes(stateLeave)) { + this.#deferringStates?.delete(stateLeave); + debug('Watchman reports "%s" ended. Filesystem notifications resumed.', stateLeave); + } + } + + /** + * Handles a single change event record. + */ + #handleFileChange( + changeDescriptor: WatchmanFileChange, + rawClock: WatchmanSubscriptionEvent['clock'] + ) { + const self = this; + const watchProjectInfo = self.#watchProjectInfo; + + invariant( + watchProjectInfo != null, + 'watch-project response should have been set before receiving subscription events' + ); + + const { + name: relativePosixPath, + new: isNew = false, + exists = false, + type, + mtime_ms, + size, + } = changeDescriptor; + + // Watchman emits posix-separated paths on Windows, which is inconsistent + // with other watchers. Normalize to system-native separators. + const relativePath = normalizePathSeparatorsToSystem(relativePosixPath); + + debug( + 'Handling change to: %s (new: %s, exists: %s, type: %s)', + relativePath, + isNew, + exists, + type + ); + + // Ignore files of an unrecognized type + if (type != null && !(type === 'f' || type === 'd' || type === 'l')) { + return; + } + + if ( + this.doIgnore(relativePath) || + !common.includedByGlob(type, this.globs, this.dot, relativePath) + ) { + return; + } + + const clock = + typeof rawClock === 'string' && this.#watchProjectInfo != null + ? ([this.#watchProjectInfo.root, rawClock] as [string, string]) + : undefined; + + if (!exists) { + self.emitFileEvent({ event: DELETE_EVENT, clock, relativePath }); + } else { + invariant( + type != null && mtime_ms != null && size != null, + 'Watchman file change event for "%s" missing some requested metadata. ' + + 'Got type: %s, mtime_ms: %s, size: %s', + relativePath, + type, + mtime_ms, + size + ); + + if ( + // Change event on dirs are mostly useless. + !(type === 'd' && !isNew) + ) { + const mtime = Number(mtime_ms); + self.emitFileEvent({ + event: TOUCH_EVENT, + clock, + relativePath, + metadata: { + modifiedTime: mtime !== 0 ? mtime : null, + size, + type, + }, + }); + } + } + } + + /** + * Closes the watcher. + */ + async stopWatching(): Promise { + await super.stopWatching(); + if (this.#client) { + this.#client.removeAllListeners(); + this.#client.end(); + } + this.#deferringStates = null; + } + + getPauseReason(): string | undefined | null { + if (this.#deferringStates == null || this.#deferringStates.size === 0) { + return null; + } + const states = [...this.#deferringStates]; + if (states.length === 1) { + return `The watch is in the '${states[0]}' state.`; + } + return `The watch is in the ${states + .slice(0, -1) + .map((s) => `'${s}'`) + .join(', ')} and '${states[states.length - 1]}' states.`; + } +} + +/** + * Handles a warning in the watchman resp object. + */ +function handleWarning(resp: Readonly<{ warning?: unknown }>) { + if ('warning' in resp) { + if (RecrawlWarning.isRecrawlWarningDupe(resp.warning)) { + return true; + } + console.warn(resp.warning); + return true; + } else { + return false; + } +} diff --git a/packages/@expo/metro-file-map/src/watchers/common.ts b/packages/@expo/metro-file-map/src/watchers/common.ts new file mode 100644 index 00000000000000..48452082b38c5a --- /dev/null +++ b/packages/@expo/metro-file-map/src/watchers/common.ts @@ -0,0 +1,75 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * Originally vendored from + * https://github.com/amasad/sane/blob/64ff3a870c42e84f744086884bf55a4f9c22d376/src/common.js + */ + +import type { Stats } from 'fs'; +import micromatch from 'micromatch'; +import path from 'path'; + +import type { ChangeEventMetadata } from '../types'; + +export const DELETE_EVENT = 'delete'; +export const TOUCH_EVENT = 'touch'; +export const RECRAWL_EVENT = 'recrawl'; +export const ALL_EVENT = 'all'; + +export interface WatcherOptions { + readonly globs: readonly string[]; + readonly dot: boolean; + readonly ignored: RegExp | null | undefined; + readonly watchmanDeferStates: readonly string[]; + readonly watchman?: unknown; + readonly watchmanPath?: string; +} + +/** + * Checks a file relative path against the globs array. + */ +export function includedByGlob( + type: 'f' | 'l' | 'd' | null | undefined, + globs: readonly string[], + dot: boolean, + relativePath: string +): boolean { + // For non-regular files or if there are no glob matchers, just respect the + // `dot` option to filter dotfiles if dot === false. + if (globs.length === 0 || type !== 'f') { + return dot || micromatch.some(relativePath, '**/*'); + } + return micromatch.some(relativePath, globs, { dot }); +} + +/** + * Whether the given filePath matches the given RegExp, after converting + * (on Windows only) system separators to posix separators. + * + * Conversion to posix is for backwards compatibility with the previous + * anymatch matcher, which normlises all inputs[1]. This may not be consistent + * with other parts of metro-file-map. + * + * [1]: https://github.com/micromatch/anymatch/blob/3.1.1/index.js#L50 + */ +export const posixPathMatchesPattern: (pattern: RegExp, filePath: string) => boolean = + path.sep === '/' + ? (pattern, filePath) => pattern.test(filePath) + : (pattern, filePath) => pattern.test(filePath.replaceAll(path.sep, '/')); + +export function typeFromStat(stat: Stats): ChangeEventMetadata['type'] | null { + // Note: These tests are not mutually exclusive - a symlink passes isFile + if (stat.isSymbolicLink()) { + return 'l'; + } + if (stat.isDirectory()) { + return 'd'; + } + if (stat.isFile()) { + return 'f'; // "Regular" file + } + return null; +} diff --git a/packages/@expo/metro-file-map/src/worker.ts b/packages/@expo/metro-file-map/src/worker.ts new file mode 100644 index 00000000000000..a6d99a8845cadc --- /dev/null +++ b/packages/@expo/metro-file-map/src/worker.ts @@ -0,0 +1,82 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { createHash } from 'crypto'; +import fs from 'graceful-fs'; + +import type { MetadataWorker, WorkerMessage, WorkerMetadata, WorkerSetupArgs } from './types'; + +function sha1hex(content: string | Buffer): string { + return createHash('sha1').update(content).digest('hex'); +} + +/** + * Exposed for use outside a jest-worker context, ie when processing in-band. + */ +export class Worker { + #plugins: readonly MetadataWorker[]; + + constructor({ plugins = [] }: WorkerSetupArgs) { + this.#plugins = plugins.map(({ modulePath, setupArgs }) => { + const mod = require(modulePath); + const PluginWorker = mod.__esModule === true && 'default' in mod ? mod.default : mod; + return new PluginWorker(setupArgs); + }); + } + + processFile(data: WorkerMessage): WorkerMetadata { + let content: Buffer | undefined; + let sha1: WorkerMetadata['sha1']; + + const { computeSha1, filePath, pluginsToRun } = data; + + const getContent = (): Buffer => { + if (content == null) { + content = fs.readFileSync(filePath) as Buffer; + } + + return content!; + }; + + const workerUtils = { getContent }; + const pluginData = pluginsToRun.map((pluginIdx) => + this.#plugins[pluginIdx]!.processFile(data, workerUtils) + ); + + // If a SHA-1 is requested on update, compute it. + if (computeSha1) { + sha1 = sha1hex(getContent()); + } + + return content && data.maybeReturnContent + ? { content, pluginData, sha1 } + : { pluginData, sha1 }; + } +} + +let singletonWorker: Worker | undefined; + +/** + * Called automatically by jest-worker before the first call to `worker` when + * this module is used as worker thread or child process. + */ +export function setup(args: WorkerSetupArgs): void { + if (singletonWorker) { + throw new Error('metro-file-map: setup() should only be called once'); + } + singletonWorker = new Worker(args); +} + +/** + * Called by jest-worker with each workload + */ +export function processFile(data: WorkerMessage): WorkerMetadata { + if (!singletonWorker) { + throw new Error('metro-file-map: setup() must be called before processFile()'); + } + return singletonWorker.processFile(data); +} diff --git a/packages/@expo/metro-file-map/src/workerExclusionList.ts b/packages/@expo/metro-file-map/src/workerExclusionList.ts new file mode 100644 index 00000000000000..e9ad62947e6217 --- /dev/null +++ b/packages/@expo/metro-file-map/src/workerExclusionList.ts @@ -0,0 +1,62 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// This list is compiled after the MDN list of the most common MIME types (see +// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/ +// Complete_list_of_MIME_types). +// +// Only MIME types starting with "image/", "video/", "audio/" and "font/" are +// reflected in the list. Adding "application/" is too risky since some text +// file formats (like ".js" and ".json") have an "application/" MIME type. +// +// Feel free to add any extensions that cannot be a Haste module. + +const extensions: ReadonlySet = new Set([ + // JSONs are never haste modules, except for "package.json", which is handled. + '.json', + + // Image extensions. + '.bmp', + '.gif', + '.ico', + '.jpeg', + '.jpg', + '.png', + '.svg', + '.tiff', + '.tif', + '.webp', + + // Video extensions. + '.avi', + '.mp4', + '.mpeg', + '.mpg', + '.ogv', + '.webm', + '.3gp', + '.3g2', + + // Audio extensions. + '.aac', + '.midi', + '.mid', + '.mp3', + '.oga', + '.wav', + '.3gp', + '.3g2', + + // Font extensions. + '.eot', + '.otf', + '.ttf', + '.woff', + '.woff2', +]); + +export default extensions; diff --git a/packages/@expo/metro-file-map/tsconfig.json b/packages/@expo/metro-file-map/tsconfig.json new file mode 100644 index 00000000000000..b6dc711bba653d --- /dev/null +++ b/packages/@expo/metro-file-map/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "expo-module-scripts/tsconfig.node", + "include": ["./src"], + "exclude": ["**/__mocks__/*", "**/__tests__/*"], + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build", + "esModuleInterop": true, + "sourceMap": false + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1fdfdfde279eb7..c4ddc766b66e47 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1809,6 +1809,9 @@ importers: '@expo/metro-config': specifier: workspace:~55.0.9 version: link:../metro-config + '@expo/metro-file-map': + specifier: workspace:55.0.0-0 + version: link:../metro-file-map '@expo/osascript': specifier: workspace:^2.4.2 version: link:../osascript @@ -2603,6 +2606,58 @@ importers: specifier: ^1.60.0 version: 1.98.0 + packages/@expo/metro-file-map: + dependencies: + debug: + specifier: ^4.3.4 + version: 4.4.3 + fb-watchman: + specifier: ^2.0.2 + version: 2.0.2 + graceful-fs: + specifier: ^4.2.4 + version: 4.2.11 + invariant: + specifier: ^2.2.4 + version: 2.2.4 + jest-worker: + specifier: ^29.7.0 + version: 29.7.0 + micromatch: + specifier: ^4.0.4 + version: 4.0.8 + walker: + specifier: ^1.0.8 + version: 1.0.8 + devDependencies: + '@expo/metro': + specifier: 56.0.0-rc.2 + version: 56.0.0-rc.2 + '@types/debug': + specifier: ^4.1.7 + version: 4.1.12 + '@types/fb-watchman': + specifier: ^2.0.6 + version: 2.0.6 + '@types/graceful-fs': + specifier: ^4.1.9 + version: 4.1.9 + '@types/invariant': + specifier: ^2.2.37 + version: 2.2.37 + '@types/micromatch': + specifier: ^4.0.10 + version: 4.0.10 + '@types/node': + specifier: ^22.14.0 + version: 22.19.15 + expo-module-scripts: + specifier: workspace:* + version: link:../../expo-module-scripts + memfs: + specifier: ^3.6.0 + version: 3.6.0 + packages/@expo/metro-runtime: dependencies: '@expo/log-box': @@ -7409,7 +7464,7 @@ packages: '@expo/bunyan@4.0.1': resolution: {integrity: sha512-+Lla7nYSiHZirgK+U/uYzsLv/X+HaJienbD5AKX1UQZHYfWaP+9uuQluRB4GrEVWF0GZ7vEVp/jzaOT9k/SQlg==} - engines: {node: '>=0.10.0'} + engines: {'0': node >=0.10.0} '@expo/code-signing-certificates@0.0.6': resolution: {integrity: sha512-iNe0puxwBNEcuua9gmTGzq+SuMDa0iATai1FlFTMHJ/vUmKvN/V//drXoLJkVb5i5H3iE/n/qIJxyoBnXouD0w==} @@ -9158,6 +9213,9 @@ packages: '@types/express@5.0.6': resolution: {integrity: sha512-sKYVuV7Sv9fbPIt/442koC7+IIwK5olP1KWeD88e/idgoJqDm3JV/YUiPwkoKK92ylff2MGxSz1CSjsXelx0YA==} + '@types/fb-watchman@2.0.6': + resolution: {integrity: sha512-1uLrko2lamBq7v5gZQOEnHa+GzK7wkPeGndFijhhK5kjqqXLVfzfeL6bVLiKCqVNaM5BVVcKRQ2Z57AzH4HcyA==} + '@types/fbemitter@2.0.35': resolution: {integrity: sha512-Xem6d7qUfmouCHntCrRYgDBwbf+WWRd6G+7WEFlEZFZ67LZXiYRvT2LV8wcZa6mIaAil95+ABQdKgB6hPIsnng==} @@ -19390,6 +19448,10 @@ snapshots: '@types/express-serve-static-core': 5.1.1 '@types/serve-static': 2.2.0 + '@types/fb-watchman@2.0.6': + dependencies: + '@types/node': 22.19.15 + '@types/fbemitter@2.0.35': {} '@types/folder-hash@4.0.4': {} From a16e978558d39e4e432952a44136665e07bb031e Mon Sep 17 00:00:00 2001 From: Phil Pluckthun Date: Tue, 5 May 2026 17:02:33 +0100 Subject: [PATCH 02/26] feat(metro-file-map): Port metro-file-map changes filed upstream to fork (#45378) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Why Stacked on #45373 This ports the open PRs that have been filed to Metro, as referenced in the commit history. The changes are: - Drop `graceful-fs` dependency - Drop native find binary crawler (it's generally just slower, and can be accidentally re-enabled by users modifying the Metro config) - Tweak Node crawler hot-path (generic speed-up refactor) - Lazily stat files and populate symlinks for Node crawled file trees (avoids larger chunks of work for unaccessed files when diffing the file tree) - Pre-resolve symlink targets and store normal posix paths (moves work off of TreeFS to the symlink resolution, where it makes more sense) Additional changes are: - Disable watchman by default — we'd like the Node crawler to be the default now - Refactor worker and plugin file processors to be non-blocking — this is a slight uplift in the file processor during concurrent in-band file processor calls # How - Pick changes from upstream branches and cross-check with #44567 - Disable watchman by default by flipping `useWatchman` default to `false` - Convert worker and file processor `getContent` and return value to be promises # Test Plan - Unit tests updated accordingly - CI should pass unchanged - Tested manually against `apps/router-e2e` - Delete `$TMPDIR/metro-file-map-*` - Start `apps/router-e2e` with `expo start --clear` - Observe that a `$TMPDIR/metro-file-map-expo-*` file gets created # Checklist - [x] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- packages/@expo/cli/CHANGELOG.md | 1 + .../start/server/metro/createFileMap-fork.ts | 5 +- packages/@expo/metro-file-map/CHANGELOG.md | 11 + .../@expo/metro-file-map/build/Watcher.d.ts | 3 +- .../crawlers/node/hasNativeFindSupport.js | 28 - .../build/crawlers/node/index.js | 146 ++--- .../build/crawlers/watchman/index.js | 3 + packages/@expo/metro-file-map/build/index.js | 17 +- .../build/lib/FileProcessor.d.ts | 4 +- .../metro-file-map/build/lib/FileProcessor.js | 4 +- .../build/lib/RootPathUtils.d.ts | 1 + .../metro-file-map/build/lib/RootPathUtils.js | 17 + .../metro-file-map/build/lib/TreeFS.d.ts | 1 + .../@expo/metro-file-map/build/lib/TreeFS.js | 86 ++- .../removeOverlappingRoots.d.ts} | 5 +- .../build/lib/removeOverlappingRoots.js | 37 ++ .../build/lib/rootRelativeCacheKeys.js | 3 +- .../build/plugins/dependencies/worker.d.ts | 6 +- .../build/plugins/dependencies/worker.js | 4 +- .../build/plugins/haste/worker.d.ts | 6 +- .../build/plugins/haste/worker.js | 4 +- .../@expo/metro-file-map/build/types.d.ts | 13 +- .../@expo/metro-file-map/build/worker.d.ts | 4 +- packages/@expo/metro-file-map/build/worker.js | 33 +- packages/@expo/metro-file-map/jest.setup.ts | 1 - packages/@expo/metro-file-map/package.json | 2 - packages/@expo/metro-file-map/src/Watcher.ts | 3 +- .../src/crawlers/node/__tests__/index.test.ts | 183 +++--- .../src/crawlers/node/hasNativeFindSupport.ts | 26 - .../metro-file-map/src/crawlers/node/index.ts | 181 +++--- .../src/crawlers/watchman/index.ts | 5 + packages/@expo/metro-file-map/src/index.ts | 19 +- .../metro-file-map/src/lib/FileProcessor.ts | 6 +- .../metro-file-map/src/lib/RootPathUtils.ts | 20 + .../@expo/metro-file-map/src/lib/TreeFS.ts | 98 ++- .../src/lib/__tests__/FileProcessor.test.ts | 20 +- .../src/lib/__tests__/RootPathUtils.test.ts | 29 + .../src/lib/__tests__/TreeFS.test.ts | 574 ++++++++++++++++-- .../__tests__/removeOverlappingRoots.test.ts | 92 +++ .../__tests__/rootRelativeCacheKeys.test.ts | 1 - .../src/lib/removeOverlappingRoots.ts | 33 + .../src/lib/rootRelativeCacheKeys.ts | 3 +- .../src/plugins/dependencies/worker.ts | 7 +- .../src/plugins/haste/worker.ts | 7 +- packages/@expo/metro-file-map/src/types.ts | 16 +- packages/@expo/metro-file-map/src/worker.ts | 38 +- pnpm-lock.yaml | 6 - 47 files changed, 1285 insertions(+), 527 deletions(-) delete mode 100644 packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.js rename packages/@expo/metro-file-map/build/{crawlers/node/hasNativeFindSupport.d.ts => lib/removeOverlappingRoots.d.ts} (60%) create mode 100644 packages/@expo/metro-file-map/build/lib/removeOverlappingRoots.js delete mode 100644 packages/@expo/metro-file-map/src/crawlers/node/hasNativeFindSupport.ts create mode 100644 packages/@expo/metro-file-map/src/lib/__tests__/removeOverlappingRoots.test.ts create mode 100644 packages/@expo/metro-file-map/src/lib/removeOverlappingRoots.ts diff --git a/packages/@expo/cli/CHANGELOG.md b/packages/@expo/cli/CHANGELOG.md index caaa315981a9da..f37e82202635c0 100644 --- a/packages/@expo/cli/CHANGELOG.md +++ b/packages/@expo/cli/CHANGELOG.md @@ -65,6 +65,7 @@ - Implement freestanding, faster TypeScript resolver ([#45227](https://github.com/expo/expo/pull/45227) by [@kitten](https://github.com/kitten)) - Provide Babel config path hint to Expo Metro transformer ([#45260](https://github.com/expo/expo/pull/45260) by [@kitten](https://github.com/kitten)) - Add `@expo/metro-file-map` fork ([#45373](https://github.com/expo/expo/pull/45373) by [@kitten](https://github.com/kitten)) +- Disable watchman by default ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) ## 55.0.12 — 2026-02-25 diff --git a/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts b/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts index 0ee10cd4b2a720..ae8fc80854c573 100644 --- a/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts +++ b/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts @@ -96,7 +96,8 @@ export default function createFileMap(config: ConfigT, options?: CreateFileMapOp ...config.watcher.additionalExts, ]) ), - forceNodeFilesystemAPI: !config.resolver.useWatchman, + // NOTE(@kitten): Native find crawler support has been dropped + forceNodeFilesystemAPI: true, healthCheck: config.watcher.healthCheck, ignorePattern: getIgnorePattern(config), maxWorkers: config.maxWorkers, @@ -105,7 +106,7 @@ export default function createFileMap(config: ConfigT, options?: CreateFileMapOp resetCache: config.resetCache, rootDir: config.projectRoot, roots: config.watchFolders, - useWatchman: config.resolver.useWatchman, + useWatchman: config.resolver.useWatchman ?? false, watch, watchmanDeferStates: config.watcher.watchman.deferStates, }); diff --git a/packages/@expo/metro-file-map/CHANGELOG.md b/packages/@expo/metro-file-map/CHANGELOG.md index a2ba70da60600e..932357fcee3406 100644 --- a/packages/@expo/metro-file-map/CHANGELOG.md +++ b/packages/@expo/metro-file-map/CHANGELOG.md @@ -4,8 +4,19 @@ ### 🛠 Breaking changes +- Convert worker and plugin file processors to be non-blocking ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) +- Disable watchman by default ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) + ### 🎉 New features +- Lazily stat files and populate symlinks for Node crawled file trees ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) +- Pre-resolve symlink targets and store normal POSIX paths ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) + ### 🐛 Bug fixes ### 💡 Others + +- Initial fork/implementation ([#45373](https://github.com/expo/expo/pull/45373) by [@kitten](https://github.com/kitten)) +- Drop `graceful-fs` ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) +- Drop native `find` binary crawler ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) +- [Performance] Tweak Node crawler hot-path and remove overlapping roots ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) diff --git a/packages/@expo/metro-file-map/build/Watcher.d.ts b/packages/@expo/metro-file-map/build/Watcher.d.ts index 78a366ae0a2f4e..ec54ba94458931 100644 --- a/packages/@expo/metro-file-map/build/Watcher.d.ts +++ b/packages/@expo/metro-file-map/build/Watcher.d.ts @@ -12,7 +12,8 @@ interface WatcherOptions { console: Console; enableSymlinks: boolean; extensions: readonly string[]; - forceNodeFilesystemAPI: boolean; + /** @deprecated */ + forceNodeFilesystemAPI?: boolean; healthCheckFilePrefix: string; ignoreForCrawl: (filePath: string) => boolean; ignorePatternForWatch: RegExp; diff --git a/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.js b/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.js deleted file mode 100644 index 2466394e6a7538..00000000000000 --- a/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = hasNativeFindSupport; -const child_process_1 = require("child_process"); -async function hasNativeFindSupport() { - try { - return await new Promise((resolve) => { - // Check the find binary supports the non-POSIX -iname parameter wrapped in parens. - const args = ['.', '-type', 'f', '(', '-iname', '*.ts', '-o', '-iname', '*.js', ')']; - const child = (0, child_process_1.spawn)('find', args, { cwd: __dirname }); - child.on('error', () => { - resolve(false); - }); - child.on('exit', (code) => { - resolve(code === 0); - }); - }); - } - catch { - return false; - } -} diff --git a/packages/@expo/metro-file-map/build/crawlers/node/index.js b/packages/@expo/metro-file-map/build/crawlers/node/index.js index 1ab4585093ce96..bce92137bc3c31 100644 --- a/packages/@expo/metro-file-map/build/crawlers/node/index.js +++ b/packages/@expo/metro-file-map/build/crawlers/node/index.js @@ -40,23 +40,20 @@ var __importStar = (this && this.__importStar) || (function () { return result; }; })(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = nodeCrawl; -const child_process_1 = require("child_process"); -const fs = __importStar(require("graceful-fs")); -const os_1 = require("os"); +const fs = __importStar(require("fs")); const path = __importStar(require("path")); -const hasNativeFindSupport_1 = __importDefault(require("./hasNativeFindSupport")); const RootPathUtils_1 = require("../../lib/RootPathUtils"); -const debug = require('debug')('Metro:NodeCrawler'); -function find(roots, extensions, ignore, includeSymlinks, rootDir, console, callback) { +function find(roots, extensions, ignore, includeSymlinks, rootDir, console, previousFileSystem, callback) { const result = new Map(); let activeCalls = 0; const pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); - function search(directory) { + const exts = extensions.reduce((acc, ext) => { + acc[ext] = true; + return acc; + }, {}); + function search(directory, dirNormal, isWithinRoot) { activeCalls++; fs.readdir(directory, { withFileTypes: true }, (err, entries) => { activeCalls--; @@ -64,39 +61,56 @@ function find(roots, extensions, ignore, includeSymlinks, rootDir, console, call console.warn(`Error "${err.code ?? err.message}" reading contents of "${directory}", skipping. Add this directory to your ignore list to exclude it.`); } else { - entries.forEach((entry) => { - const file = path.join(directory, entry.name.toString()); - if (ignore(file)) { - return; - } - if (entry.isSymbolicLink() && !includeSymlinks) { - return; + for (let idx = 0; idx < entries.length; idx++) { + const entry = entries[idx]; + const name = entry.name.toString(); + const file = directory + path.sep + name; + const isSymbolicLink = entry.isSymbolicLink(); + if (ignore(file) || (!includeSymlinks && isSymbolicLink)) { + continue; } + // Deriving a normal path above the root dir requires slicing off an up-fragment + // then checking if the target matches the next segment of the root dir. It's therefore + // easier to fall back to `pathUtils.absoluteToNormal` + const childNormal = !isWithinRoot + ? pathUtils.absoluteToNormal(file) + : dirNormal === '' + ? name + : dirNormal + path.sep + name; if (entry.isDirectory()) { - search(file); - return; + search(file, childNormal, isWithinRoot || childNormal === ''); + continue; + } + const ext = path.extname(file).substr(1); + if (!isSymbolicLink && !exts[ext]) { + continue; + } + const mtime = previousFileSystem?.getMtimeByNormalPath(childNormal); + if (mtime == null || mtime === 0) { + // When we're in a cold start or a previous file doesn't exist, we can skip + // the mtime/size lstat now and treat the file as new + result.set(childNormal, [null, 0, 0, null, isSymbolicLink ? 1 : 0, null]); } - activeCalls++; - fs.lstat(file, (err, stat) => { - activeCalls--; - if (!err && stat) { - const ext = path.extname(file).substr(1); - if (stat.isSymbolicLink() || extensions.includes(ext)) { - result.set(pathUtils.absoluteToNormal(file), [ + else { + activeCalls++; + fs.lstat(file, (err, stat) => { + activeCalls--; + if (!err && stat) { + result.set(childNormal, [ stat.mtime.getTime(), stat.size, 0, null, - stat.isSymbolicLink() ? 1 : 0, + isSymbolicLink ? 1 : 0, null, ]); } - } - if (activeCalls === 0) { - callback(result); - } - }); - }); + if (activeCalls === 0) { + callback(result); + } + }); + } + } } if (activeCalls === 0) { callback(result); @@ -104,67 +118,20 @@ function find(roots, extensions, ignore, includeSymlinks, rootDir, console, call }); } if (roots.length > 0) { - roots.forEach(search); + for (const root of roots) { + const rootNormal = pathUtils.absoluteToNormal(root); + const isWithinRoot = !rootNormal.startsWith('..' + path.sep); + search(root, rootNormal, isWithinRoot); + } } else { callback(result); } } -function findNative(roots, extensions, ignore, includeSymlinks, rootDir, console, callback) { - // Examples: - // ( ( -type f ( -iname *.js ) ) ) - // ( ( -type f ( -iname *.js -o -iname *.ts ) ) ) - // ( ( -type f ( -iname *.js ) ) -o -type l ) - // ( ( -type f ) -o -type l ) - const extensionClause = extensions.length - ? `( ${extensions.map((ext) => `-iname *.${ext}`).join(' -o ')} )` - : ''; // Empty inner expressions eg "( )" are not allowed - const expression = `( ( -type f ${extensionClause} ) ${includeSymlinks ? '-o -type l ' : ''})`; - const pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); - const child = (0, child_process_1.spawn)('find', [...roots, ...expression.split(' ')]); - let stdout = ''; - if (child.stdout == null) { - throw new Error('stdout is null - this should never happen. Please open up an issue at https://github.com/facebook/metro'); - } - child.stdout.setEncoding('utf-8'); - child.stdout.on('data', (data) => (stdout += data)); - child.stdout.on('close', () => { - const lines = stdout - .trim() - .split('\n') - .filter((x) => !ignore(x)); - const result = new Map(); - let count = lines.length; - if (!count) { - callback(new Map()); - } - else { - lines.forEach((filePath) => { - fs.lstat(filePath, (err, stat) => { - if (!err && stat) { - result.set(pathUtils.absoluteToNormal(filePath), [ - stat.mtime.getTime(), - stat.size, - 0, - null, - stat.isSymbolicLink() ? 1 : 0, - null, - ]); - } - if (--count === 0) { - callback(result); - } - }); - }); - } - }); -} async function nodeCrawl(options) { - const { console, previousState, extensions, forceNodeFilesystemAPI, ignore, rootDir, includeSymlinks, perfLogger, roots, abortSignal, subpath, } = options; + const { console, previousState, extensions, ignore, rootDir, includeSymlinks, perfLogger, roots, abortSignal, subpath, } = options; abortSignal?.throwIfAborted(); perfLogger?.point('nodeCrawl_start'); - const useNativeFind = !forceNodeFilesystemAPI && (0, os_1.platform)() !== 'win32' && (await (0, hasNativeFindSupport_1.default)()); - debug('Using system find: %s', useNativeFind); return new Promise((resolve, reject) => { const callback = (fileData) => { const difference = previousState.fileSystem.getDifference(fileData, { @@ -180,11 +147,6 @@ async function nodeCrawl(options) { } resolve(difference); }; - if (useNativeFind) { - findNative(roots, extensions, ignore, includeSymlinks, rootDir, console, callback); - } - else { - find(roots, extensions, ignore, includeSymlinks, rootDir, console, callback); - } + find(roots, extensions, ignore, includeSymlinks, rootDir, console, previousState.fileSystem, callback); }); } diff --git a/packages/@expo/metro-file-map/build/crawlers/watchman/index.js b/packages/@expo/metro-file-map/build/crawlers/watchman/index.js index dac9241b02f15d..3432701efcbe47 100644 --- a/packages/@expo/metro-file-map/build/crawlers/watchman/index.js +++ b/packages/@expo/metro-file-map/build/crawlers/watchman/index.js @@ -259,6 +259,9 @@ async function watchmanCrawl({ abortSignal, computeSha1, extensions, ignore, inc if (fileData.type === 'l') { symlinkInfo = fileData['symlink_target'] ?? 1; } + if (typeof symlinkInfo === 'string') { + symlinkInfo = (0, normalizePathSeparatorsToPosix_1.default)(pathUtils.resolveSymlinkToNormal(relativeFilePath, symlinkInfo)); + } const nextData = [mtime, size, 0, sha1hex ?? null, symlinkInfo, null]; // If watchman is fresh, the removed files map starts with all files // and we remove them as we verify they still exist. diff --git a/packages/@expo/metro-file-map/build/index.js b/packages/@expo/metro-file-map/build/index.js index cc9b698f821f86..f4a7abb9cc08bb 100644 --- a/packages/@expo/metro-file-map/build/index.js +++ b/packages/@expo/metro-file-map/build/index.js @@ -58,6 +58,7 @@ const TreeFS_1 = __importDefault(require("./lib/TreeFS")); const checkWatchmanCapabilities_1 = __importDefault(require("./lib/checkWatchmanCapabilities")); const normalizePathSeparatorsToPosix_1 = __importDefault(require("./lib/normalizePathSeparatorsToPosix")); const normalizePathSeparatorsToSystem_1 = __importDefault(require("./lib/normalizePathSeparatorsToSystem")); +const removeOverlappingRoots_1 = __importDefault(require("./lib/removeOverlappingRoots")); const debug = require('debug')('Metro:FileMap'); var DiskCacheManager_2 = require("./cache/DiskCacheManager"); Object.defineProperty(exports, "DiskCacheManager", { enumerable: true, get: function () { return DiskCacheManager_2.DiskCacheManager; } }); @@ -72,7 +73,7 @@ Object.defineProperty(exports, "HastePlugin", { enumerable: true, get: function // This should be bumped whenever a code change to `metro-file-map` itself // would cause a change to the cache data structure and/or content (for a given // filesystem state and build parameters). -const CACHE_BREAKER = '11'; +const CACHE_BREAKER = '12'; const CHANGE_INTERVAL = 30; const NODE_MODULES = path.sep + 'node_modules' + path.sep; const VCS_DIRECTORIES = /[/\\]\.(git|hg)[/\\]/.source; @@ -227,14 +228,14 @@ class FileMap extends events_1.default { plugins, retainAllFiles: options.retainAllFiles, rootDir: options.rootDir, - roots: Array.from(new Set(options.roots)), + roots: (0, removeOverlappingRoots_1.default)(options.roots), }; this.#options = { ...buildParameters, healthCheck: options.healthCheck, perfLoggerFactory: options.perfLoggerFactory, resetCache: options.resetCache, - useWatchman: options.useWatchman == null ? true : options.useWatchman, + useWatchman: options.useWatchman ?? false, watch: !!options.watch, watchmanDeferStates: options.watchmanDeferStates ?? [], }; @@ -273,8 +274,8 @@ class FileMap extends events_1.default { } const rootDir = this.#options.rootDir; this.#startupPerfLogger?.point('constructFileSystem_start'); - const processFile = (normalPath, metadata, opts) => { - const result = this.#fileProcessor.processRegularFile(normalPath, metadata, { + const processFile = async (normalPath, metadata, opts) => { + const result = await this.#fileProcessor.processRegularFile(normalPath, metadata, { computeSha1: opts.computeSha1, maybeReturnContent: true, }); @@ -405,7 +406,7 @@ class FileMap extends events_1.default { .readlink(this.#pathUtils.normalToAbsolute(normalPath)) .then((symlinkTarget) => { fileMetadata[constants_1.default.VISITED] = 1; - fileMetadata[constants_1.default.SYMLINK] = symlinkTarget; + fileMetadata[constants_1.default.SYMLINK] = (0, normalizePathSeparatorsToPosix_1.default)(this.#pathUtils.resolveSymlinkToNormal(normalPath, symlinkTarget)); }); } return null; @@ -434,7 +435,9 @@ class FileMap extends events_1.default { if (fileData[constants_1.default.SYMLINK] === 0) { filesToProcess.push([normalFilePath, fileData]); } - else { + else if (fileData[constants_1.default.MTIME] != null && fileData[constants_1.default.MTIME] !== 0) { + // The symlink will only be updated, if it's been accessed before + // If this is a newly crawled entry, it's skipped const maybeReadLink = this.#maybeReadLink(normalFilePath, fileData); if (maybeReadLink) { readLinkPromises.push(maybeReadLink.catch((error) => { diff --git a/packages/@expo/metro-file-map/build/lib/FileProcessor.d.ts b/packages/@expo/metro-file-map/build/lib/FileProcessor.d.ts index 29a970f11a7a8b..559e22ea0a9105 100644 --- a/packages/@expo/metro-file-map/build/lib/FileProcessor.d.ts +++ b/packages/@expo/metro-file-map/build/lib/FileProcessor.d.ts @@ -34,9 +34,9 @@ export declare class FileProcessor { error: MaybeCodedError; }[]; }>; - processRegularFile(normalPath: string, fileMetadata: FileMetadata, req: ProcessFileRequest): { + processRegularFile(normalPath: string, fileMetadata: FileMetadata, req: ProcessFileRequest): Promise<{ content: Buffer | undefined | null; - } | null; + } | null>; end(): Promise; } export {}; diff --git a/packages/@expo/metro-file-map/build/lib/FileProcessor.js b/packages/@expo/metro-file-map/build/lib/FileProcessor.js index b4b23c7d0a91b9..51b64da7cdd07c 100644 --- a/packages/@expo/metro-file-map/build/lib/FileProcessor.js +++ b/packages/@expo/metro-file-map/build/lib/FileProcessor.js @@ -63,11 +63,11 @@ class FileProcessor { await batchWorker.end(); return { errors }; } - processRegularFile(normalPath, fileMetadata, req) { + async processRegularFile(normalPath, fileMetadata, req) { const workerInput = this.#getWorkerInput(normalPath, fileMetadata, req); return workerInput ? { - content: processWorkerReply(this.#inBandWorker.processFile(workerInput), workerInput.pluginsToRun, fileMetadata), + content: processWorkerReply(await this.#inBandWorker.processFile(workerInput), workerInput.pluginsToRun, fileMetadata), } : null; } diff --git a/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts b/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts index 946f6475090fbb..bd6eb7cdca906f 100644 --- a/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts +++ b/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts @@ -12,6 +12,7 @@ export declare class RootPathUtils { absoluteToNormal(absolutePath: string): string; normalToAbsolute(normalPath: string): string; relativeToNormal(relativePath: string): string; + resolveSymlinkToNormal(symlinkNormalPath: string, readlinkResult: string): string; getAncestorOfRootIdx(normalPath: string): number | null; joinNormalToRelative(normalPath: string, relativePath: string): { normalPath: string; diff --git a/packages/@expo/metro-file-map/build/lib/RootPathUtils.js b/packages/@expo/metro-file-map/build/lib/RootPathUtils.js index dd96b48e80caf3..fee7dfdcf8a04c 100644 --- a/packages/@expo/metro-file-map/build/lib/RootPathUtils.js +++ b/packages/@expo/metro-file-map/build/lib/RootPathUtils.js @@ -12,6 +12,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.RootPathUtils = void 0; const invariant_1 = __importDefault(require("invariant")); const path_1 = __importDefault(require("path")); +const normalizePathSeparatorsToSystem_1 = __importDefault(require("./normalizePathSeparatorsToSystem")); /** * This module provides path utility functions - similar to `node:path` - * optimised for Metro's use case (many paths, few roots) under assumptions @@ -129,6 +130,22 @@ class RootPathUtils { return (this.#tryCollapseIndirectionsInSuffix(relativePath, 0, 0)?.collapsedPath ?? path_1.default.relative(this.#rootDir, path_1.default.join(this.#rootDir, relativePath))); } + resolveSymlinkToNormal(symlinkNormalPath, readlinkResult) { + let target = (0, normalizePathSeparatorsToSystem_1.default)(readlinkResult); + // WARN: This only applies to Windows + Node 20 case, where the value is completely + // unnormalized and a trailing slash may be returned + if (target[target.length - 1] === path_1.default.sep) { + target = target.slice(0, -1); + } + if (path_1.default.isAbsolute(target)) { + return this.absoluteToNormal(target); + } + // Resolve relative to the symlink's containing directory, expressed as + // a root-relative (possibly non-normal) path, then normalize + const sepIdx = symlinkNormalPath.lastIndexOf(path_1.default.sep); + const rootRelativeTarget = sepIdx === -1 ? target : symlinkNormalPath.slice(0, sepIdx) + path_1.default.sep + target; + return this.relativeToNormal(rootRelativeTarget); + } // If a path is a direct ancestor of the project root (or the root itself), // return a number with the degrees of separation, e.g. root=0, parent=1,.. // or null otherwise. diff --git a/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts b/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts index 5929d40345072e..a42b73e39ba5a3 100644 --- a/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts +++ b/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts @@ -96,6 +96,7 @@ export default class TreeFS implements MutableFileSystem { changedFiles: FileData; removedFiles: Set; }; + getMtimeByNormalPath(normalPath: Path): number | null; getSha1(mixedPath: Path): string | null; getOrComputeSha1(mixedPath: Path): Promise<{ sha1: string; diff --git a/packages/@expo/metro-file-map/build/lib/TreeFS.js b/packages/@expo/metro-file-map/build/lib/TreeFS.js index 7605a51ce50df3..7fa238df8b2e52 100644 --- a/packages/@expo/metro-file-map/build/lib/TreeFS.js +++ b/packages/@expo/metro-file-map/build/lib/TreeFS.js @@ -9,9 +9,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); +const fs_1 = __importDefault(require("fs")); const invariant_1 = __importDefault(require("invariant")); const path_1 = __importDefault(require("path")); const constants_1 = __importDefault(require("../constants")); +const normalizePathSeparatorsToPosix_1 = __importDefault(require("./normalizePathSeparatorsToPosix")); +const normalizePathSeparatorsToSystem_1 = __importDefault(require("./normalizePathSeparatorsToSystem")); const RootPathUtils_1 = require("./RootPathUtils"); function isDirectory(node) { return node instanceof Map; @@ -69,7 +72,6 @@ function isRegularFile(node) { * a trailing slash */ class TreeFS { - #cachedNormalSymlinkTargets = new WeakMap(); #pathUtils; #processFile; #rootDir; @@ -125,12 +127,16 @@ class TreeFS { continue; } if (newMetadata[constants_1.default.MTIME] != null && - // TODO: Remove when mtime is null if not populated newMetadata[constants_1.default.MTIME] !== 0 && newMetadata[constants_1.default.MTIME] === metadata[constants_1.default.MTIME]) { // Types and modified time match - not changed. changedFiles.delete(canonicalPath); } + else if ((newMetadata[constants_1.default.MTIME] == null || newMetadata[constants_1.default.MTIME] === 0) && + (metadata[constants_1.default.MTIME] == null || metadata[constants_1.default.MTIME] === 0)) { + // If file is still untouched then mark it as unchanged + changedFiles.delete(canonicalPath); + } else if (newMetadata[constants_1.default.SHA1] != null && newMetadata[constants_1.default.SHA1] === metadata[constants_1.default.SHA1] && metadata[constants_1.default.VISITED] === 1) { @@ -149,6 +155,12 @@ class TreeFS { removedFiles, }; } + getMtimeByNormalPath(normalPath) { + const result = this.#lookupByNormalPath(normalPath, { + followLeaf: false, + }); + return result.exists && !isDirectory(result.node) ? result.node[constants_1.default.MTIME] : null; + } getSha1(mixedPath) { const fileMetadata = this.#getFileData(mixedPath); return (fileMetadata && fileMetadata[constants_1.default.SHA1]) ?? null; @@ -162,6 +174,18 @@ class TreeFS { return null; } const { canonicalPath, node: fileMetadata } = result; + // Populate mtime and size on demand + if (fileMetadata[constants_1.default.MTIME] == null || fileMetadata[constants_1.default.MTIME] === 0) { + fileMetadata[constants_1.default.SHA1] = null; + const absolutePath = this.#pathUtils.normalToAbsolute(canonicalPath); + try { + const stat = await fs_1.default.promises.lstat(absolutePath); + const diskMtime = stat.mtime.getTime(); + fileMetadata[constants_1.default.MTIME] = diskMtime; + fileMetadata[constants_1.default.SIZE] = stat.size; + } + catch { } + } // Empty strings const existing = fileMetadata[constants_1.default.SHA1]; if (existing != null && existing.length > 0) { @@ -486,13 +510,20 @@ class TreeFS { } // Symlink in a directory path const normalSymlinkTarget = this.#resolveSymlinkTargetToNormalPath(segmentNode, currentPath); + if (normalSymlinkTarget == null) { + return { + canonicalMissingPath: currentPath, + exists: false, + missingSegmentName: segmentName, + }; + } if (opts.collectLinkPaths) { opts.collectLinkPaths.add(this.#pathUtils.normalToAbsolute(currentPath)); } const remainingTargetPath = isLastSegment ? '' : targetNormalPath.slice(fromIdx); // Append any subsequent path segments to the symlink target, and reset // with our new target. - const joinedResult = this.#pathUtils.joinNormalToRelative(normalSymlinkTarget.normalPath, remainingTargetPath); + const joinedResult = this.#pathUtils.joinNormalToRelative(normalSymlinkTarget, remainingTargetPath); targetNormalPath = joinedResult.normalPath; // Two special cases (covered by unit tests): // @@ -507,7 +538,8 @@ class TreeFS { if (collectAncestors && !isLastSegment && // No-op optimisation to bail out the common case of nothing to do. - (normalSymlinkTarget.ancestorOfRootIdx === 0 || joinedResult.collapsedSegments > 0)) { + ((ancestorOfRootIdx = this.#pathUtils.getAncestorOfRootIdx(normalSymlinkTarget)) === 0 || + joinedResult.collapsedSegments > 0)) { let node = this.#rootNode; let collapsedPath = ''; const reverseAncestors = []; @@ -516,7 +548,7 @@ class TreeFS { // Add the root only if the target is the root or we have // collapsed segments. i > 0 || - normalSymlinkTarget.ancestorOfRootIdx === 0 || + ancestorOfRootIdx === 0 || joinedResult.collapsedSegments > 0) { reverseAncestors.push({ ancestorOfRootIdx: i, @@ -534,7 +566,7 @@ class TreeFS { // the symlink target, and start collecting ancestors only // from the target itself (ie, the basename of the normal target path) // onwards. - unseenPathFromIdx = normalSymlinkTarget.startOfBasenameIdx; + unseenPathFromIdx = normalSymlinkTarget.lastIndexOf(path_1.default.sep) + 1; if (seen == null) { // Optimisation: set this lazily only when we've encountered a symlink seen = new Set([requestedNormalPath]); @@ -794,23 +826,31 @@ class TreeFS { } } #resolveSymlinkTargetToNormalPath(symlinkNode, canonicalPathOfSymlink) { - const cachedResult = this.#cachedNormalSymlinkTargets.get(symlinkNode); - if (cachedResult != null) { - return cachedResult; - } - const literalSymlinkTarget = symlinkNode[constants_1.default.SYMLINK]; - (0, invariant_1.default)(typeof literalSymlinkTarget === 'string', 'Expected symlink target to be populated.'); - const absoluteSymlinkTarget = path_1.default.resolve(this.#rootDir, canonicalPathOfSymlink, '..', // Symlink target is relative to its containing directory. - literalSymlinkTarget // May be absolute, in which case the above are ignored - ); - const normalSymlinkTarget = path_1.default.relative(this.#rootDir, absoluteSymlinkTarget); - const result = { - ancestorOfRootIdx: this.#pathUtils.getAncestorOfRootIdx(normalSymlinkTarget), - normalPath: normalSymlinkTarget, - startOfBasenameIdx: normalSymlinkTarget.lastIndexOf(path_1.default.sep) + 1, - }; - this.#cachedNormalSymlinkTargets.set(symlinkNode, result); - return result; + const symlinkTarget = symlinkNode[constants_1.default.SYMLINK]; + if (symlinkTarget === 1) { + // Symlink target not yet resolved — read it lazily on first traversal + const absoluteSymlink = this.#pathUtils.normalToAbsolute(canonicalPathOfSymlink); + try { + const literalSymlinkTarget = fs_1.default.readlinkSync(absoluteSymlink); + const normalTarget = this.#pathUtils.resolveSymlinkToNormal(canonicalPathOfSymlink, literalSymlinkTarget); + symlinkNode[constants_1.default.SYMLINK] = (0, normalizePathSeparatorsToPosix_1.default)(normalTarget); + symlinkNode[constants_1.default.VISITED] = 1; + return normalTarget; + } + catch { + return null; + } + } + else if (symlinkTarget === 0 || symlinkTarget == null) { + // WARN: We shouldn't call this method on non-symlinks. Outside of tests + // this condition shouldn't trigger. It's fine not to resolve a symlink if + // it does trigger however + return null; + } + else { + (0, invariant_1.default)(typeof symlinkTarget === 'string', 'Expected symlink target to be populated.'); + return (0, normalizePathSeparatorsToSystem_1.default)(symlinkTarget); + } } #getFileData(filePath, opts = { followLeaf: true }) { const normalPath = this.#normalizePath(filePath); diff --git a/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.d.ts b/packages/@expo/metro-file-map/build/lib/removeOverlappingRoots.d.ts similarity index 60% rename from packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.d.ts rename to packages/@expo/metro-file-map/build/lib/removeOverlappingRoots.d.ts index 88de38315757e8..0471e9e41d425b 100644 --- a/packages/@expo/metro-file-map/build/crawlers/node/hasNativeFindSupport.d.ts +++ b/packages/@expo/metro-file-map/build/lib/removeOverlappingRoots.d.ts @@ -3,5 +3,8 @@ * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. + * + * @flow strict + * @format */ -export default function hasNativeFindSupport(): Promise; +export default function removeOverlappingRoots(roots: readonly string[]): readonly string[]; diff --git a/packages/@expo/metro-file-map/build/lib/removeOverlappingRoots.js b/packages/@expo/metro-file-map/build/lib/removeOverlappingRoots.js new file mode 100644 index 00000000000000..f5458081f7302b --- /dev/null +++ b/packages/@expo/metro-file-map/build/lib/removeOverlappingRoots.js @@ -0,0 +1,37 @@ +"use strict"; +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict + * @format + */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = removeOverlappingRoots; +const path_1 = __importDefault(require("path")); +function removeOverlappingRoots(roots) { + const sorted = roots + .map((r) => path_1.default.resolve(r)) + .sort((a, b) => { + const aRoot = a + path_1.default.sep; + const bRoot = b + path_1.default.sep; + return aRoot < bRoot ? -1 : aRoot > bRoot ? 1 : 0; + }); + if (sorted.length === 0) { + return sorted; + } + const result = [sorted[0]]; + for (let i = 1; i < sorted.length; i++) { + const rootPath = sorted[i] + path_1.default.sep; + const prevPath = result[result.length - 1] + path_1.default.sep; + if (!rootPath.startsWith(prevPath)) { + result.push(sorted[i]); + } + } + return result; +} diff --git a/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.js b/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.js index 7df40e303b4d03..21277bab7a3926 100644 --- a/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.js +++ b/packages/@expo/metro-file-map/build/lib/rootRelativeCacheKeys.js @@ -29,11 +29,12 @@ function rootRelativeCacheKeys(buildParameters) { case 'extensions': case 'computeSha1': case 'enableSymlinks': - case 'forceNodeFilesystemAPI': case 'retainAllFiles': return buildParameters[key] ?? null; case 'ignorePattern': return buildParameters[key].toString(); + case 'forceNodeFilesystemAPI': + return null; default: key; throw new Error('Unrecognised key in build parameters: ' + key); diff --git a/packages/@expo/metro-file-map/build/plugins/dependencies/worker.d.ts b/packages/@expo/metro-file-map/build/plugins/dependencies/worker.d.ts index a983c644aa03e2..226712410e2ba7 100644 --- a/packages/@expo/metro-file-map/build/plugins/dependencies/worker.d.ts +++ b/packages/@expo/metro-file-map/build/plugins/dependencies/worker.d.ts @@ -12,7 +12,7 @@ export default class DependencyExtractorWorker implements MetadataWorker { constructor({ dependencyExtractor }: Readonly<{ dependencyExtractor: string | null; }>); - processFile(data: WorkerMessage, utils: Readonly<{ - getContent: () => Buffer; - }>): V8Serializable; + processFile(data: WorkerMessage, utils: { + readonly getContent: () => Promise; + }): Promise; } diff --git a/packages/@expo/metro-file-map/build/plugins/dependencies/worker.js b/packages/@expo/metro-file-map/build/plugins/dependencies/worker.js index 67096ac6ef3899..a3075309c1853c 100644 --- a/packages/@expo/metro-file-map/build/plugins/dependencies/worker.js +++ b/packages/@expo/metro-file-map/build/plugins/dependencies/worker.js @@ -17,8 +17,8 @@ class DependencyExtractorWorker { this.#dependencyExtractor = mod.__esModule === true && 'default' in mod ? mod.default : mod; } } - processFile(data, utils) { - const content = utils.getContent().toString(); + async processFile(data, utils) { + const content = (await utils.getContent()).toString(); const { filePath } = data; const dependencies = this.#dependencyExtractor != null ? this.#dependencyExtractor.extract(content, filePath, dependencyExtractor_1.extract) diff --git a/packages/@expo/metro-file-map/build/plugins/haste/worker.d.ts b/packages/@expo/metro-file-map/build/plugins/haste/worker.d.ts index 0f258f62bb727e..aff19cf9453e0d 100644 --- a/packages/@expo/metro-file-map/build/plugins/haste/worker.d.ts +++ b/packages/@expo/metro-file-map/build/plugins/haste/worker.d.ts @@ -10,7 +10,7 @@ export default class Worker implements MetadataWorker { constructor({ hasteImplModulePath }: Readonly<{ hasteImplModulePath: string | null; }>); - processFile(data: WorkerMessage, utils: Readonly<{ - getContent: () => Buffer; - }>): V8Serializable; + processFile(data: WorkerMessage, utils: { + readonly getContent: () => Promise; + }): Promise; } diff --git a/packages/@expo/metro-file-map/build/plugins/haste/worker.js b/packages/@expo/metro-file-map/build/plugins/haste/worker.js index 99418e2f289c83..5989598f38af44 100644 --- a/packages/@expo/metro-file-map/build/plugins/haste/worker.js +++ b/packages/@expo/metro-file-map/build/plugins/haste/worker.js @@ -20,13 +20,13 @@ class Worker { this.#hasteImpl = mod.__esModule === true && 'default' in mod ? mod.default : mod; } } - processFile(data, utils) { + async processFile(data, utils) { let hasteName = null; const { filePath } = data; if (filePath.endsWith(PACKAGE_JSON)) { // Process a package.json that is returned as a PACKAGE type with its name. try { - const fileData = JSON.parse(utils.getContent().toString()); + const fileData = JSON.parse((await utils.getContent()).toString()); if (fileData.name) { hasteName = fileData.name; } diff --git a/packages/@expo/metro-file-map/build/types.d.ts b/packages/@expo/metro-file-map/build/types.d.ts index 01ffed0cd62a55..ff61f2372bb77b 100644 --- a/packages/@expo/metro-file-map/build/types.d.ts +++ b/packages/@expo/metro-file-map/build/types.d.ts @@ -12,7 +12,8 @@ export interface BuildParameters { readonly computeSha1: boolean; readonly enableSymlinks: boolean; readonly extensions: readonly string[]; - readonly forceNodeFilesystemAPI: boolean; + /** @deprecated */ + readonly forceNodeFilesystemAPI?: boolean; readonly ignorePattern: RegExp; readonly plugins: readonly InputFileMapPlugin[]; readonly retainAllFiles: boolean; @@ -86,7 +87,8 @@ export interface CrawlerOptions { computeSha1: boolean; console: Console; extensions: readonly string[]; - forceNodeFilesystemAPI: boolean; + /** @deprecated */ + forceNodeFilesystemAPI?: boolean; ignore: IgnoreMatcher; includeSymlinks: boolean; perfLogger?: PerfLogger | null | undefined; @@ -172,10 +174,10 @@ export interface FileMapPlugin; export interface MetadataWorkerParams { - getContent(): Buffer; + getContent(): Promise; } export interface MetadataWorker { - processFile(message: WorkerMessage, params: MetadataWorkerParams): V8Serializable; + processFile(message: WorkerMessage, params: MetadataWorkerParams): V8Serializable | Promise; } export type IgnoreMatcher = (item: string) => boolean; export type FileData = Map; @@ -212,6 +214,7 @@ export interface FileSystem { removedFiles: Set; }; getSerializableSnapshot(): CacheData['fileSystemData']; + getMtimeByNormalPath(file: Path): number | undefined | null; getSha1(file: Path): string | undefined | null; getOrComputeSha1(file: Path): Promise<{ sha1: string; @@ -329,7 +332,7 @@ export interface MutableFileSystem extends FileSystem { export type Path = string; export type ProcessFileFunction = (normalPath: string, metadata: FileMetadata, request: Readonly<{ computeSha1: boolean; -}>) => Buffer | undefined | null; +}>) => Promise; export type RawMockMap = { /** posix-separated mock name to posix-separated project-relative paths */ readonly duplicates: Map>; diff --git a/packages/@expo/metro-file-map/build/worker.d.ts b/packages/@expo/metro-file-map/build/worker.d.ts index dce26c934a76c2..4d88b7483237f1 100644 --- a/packages/@expo/metro-file-map/build/worker.d.ts +++ b/packages/@expo/metro-file-map/build/worker.d.ts @@ -11,7 +11,7 @@ import type { WorkerMessage, WorkerMetadata, WorkerSetupArgs } from './types'; export declare class Worker { #private; constructor({ plugins }: WorkerSetupArgs); - processFile(data: WorkerMessage): WorkerMetadata; + processFile(data: WorkerMessage): Promise; } /** * Called automatically by jest-worker before the first call to `worker` when @@ -21,4 +21,4 @@ export declare function setup(args: WorkerSetupArgs): void; /** * Called by jest-worker with each workload */ -export declare function processFile(data: WorkerMessage): WorkerMetadata; +export declare function processFile(data: WorkerMessage): Promise; diff --git a/packages/@expo/metro-file-map/build/worker.js b/packages/@expo/metro-file-map/build/worker.js index 17548368f4f10c..1dd3dcb1a76358 100644 --- a/packages/@expo/metro-file-map/build/worker.js +++ b/packages/@expo/metro-file-map/build/worker.js @@ -13,7 +13,7 @@ exports.Worker = void 0; exports.setup = setup; exports.processFile = processFile; const crypto_1 = require("crypto"); -const graceful_fs_1 = __importDefault(require("graceful-fs")); +const fs_1 = __importDefault(require("fs")); function sha1hex(content) { return (0, crypto_1.createHash)('sha1').update(content).digest('hex'); } @@ -29,25 +29,32 @@ class Worker { return new PluginWorker(setupArgs); }); } - processFile(data) { - let content; - let sha1; + async processFile(data) { + let contentPromise; + let sha1Promise; const { computeSha1, filePath, pluginsToRun } = data; const getContent = () => { - if (content == null) { - content = graceful_fs_1.default.readFileSync(filePath); + if (contentPromise == null) { + contentPromise = fs_1.default.promises.readFile(filePath); } - return content; + return contentPromise; }; const workerUtils = { getContent }; - const pluginData = pluginsToRun.map((pluginIdx) => this.#plugins[pluginIdx].processFile(data, workerUtils)); + const pluginDataPromise = Promise.all(pluginsToRun.map((pluginIdx) => this.#plugins[pluginIdx].processFile(data, workerUtils))); // If a SHA-1 is requested on update, compute it. if (computeSha1) { - sha1 = sha1hex(getContent()); + sha1Promise = getContent().then(sha1hex); } - return content && data.maybeReturnContent - ? { content, pluginData, sha1 } - : { pluginData, sha1 }; + return contentPromise != null && data.maybeReturnContent + ? { + content: await contentPromise, + pluginData: await pluginDataPromise, + sha1: await sha1Promise, + } + : { + pluginData: await pluginDataPromise, + sha1: await sha1Promise, + }; } } exports.Worker = Worker; @@ -65,7 +72,7 @@ function setup(args) { /** * Called by jest-worker with each workload */ -function processFile(data) { +async function processFile(data) { if (!singletonWorker) { throw new Error('metro-file-map: setup() must be called before processFile()'); } diff --git a/packages/@expo/metro-file-map/jest.setup.ts b/packages/@expo/metro-file-map/jest.setup.ts index 626a566fff3d1d..b664909ddadc76 100644 --- a/packages/@expo/metro-file-map/jest.setup.ts +++ b/packages/@expo/metro-file-map/jest.setup.ts @@ -1,7 +1,6 @@ // Prevent real filesystem access in tests — use memfs via __mocks__/ jest.mock('fs'); jest.mock('fs/promises'); -jest.mock('graceful-fs'); // Redirect node: prefixed built-in modules to their mocked equivalents jest.mock('node:fs', () => require('fs')); diff --git a/packages/@expo/metro-file-map/package.json b/packages/@expo/metro-file-map/package.json index a29bc0f64c83a7..290dc68849010e 100644 --- a/packages/@expo/metro-file-map/package.json +++ b/packages/@expo/metro-file-map/package.json @@ -33,7 +33,6 @@ "dependencies": { "debug": "^4.3.4", "fb-watchman": "^2.0.2", - "graceful-fs": "^4.2.4", "invariant": "^2.2.4", "jest-worker": "^29.7.0", "micromatch": "^4.0.4", @@ -43,7 +42,6 @@ "@expo/metro": "56.0.0-rc.2", "@types/debug": "^4.1.7", "@types/fb-watchman": "^2.0.6", - "@types/graceful-fs": "^4.1.9", "@types/invariant": "^2.2.37", "@types/micromatch": "^4.0.10", "@types/node": "^22.14.0", diff --git a/packages/@expo/metro-file-map/src/Watcher.ts b/packages/@expo/metro-file-map/src/Watcher.ts index 3a85bd51571233..8adbaa6e67278f 100644 --- a/packages/@expo/metro-file-map/src/Watcher.ts +++ b/packages/@expo/metro-file-map/src/Watcher.ts @@ -44,7 +44,8 @@ interface WatcherOptions { console: Console; enableSymlinks: boolean; extensions: readonly string[]; - forceNodeFilesystemAPI: boolean; + /** @deprecated */ + forceNodeFilesystemAPI?: boolean; healthCheckFilePrefix: string; ignoreForCrawl: (filePath: string) => boolean; ignorePatternForWatch: RegExp; diff --git a/packages/@expo/metro-file-map/src/crawlers/node/__tests__/index.test.ts b/packages/@expo/metro-file-map/src/crawlers/node/__tests__/index.test.ts index 4bf377fe7e9609..91fc01cd820c3c 100644 --- a/packages/@expo/metro-file-map/src/crawlers/node/__tests__/index.test.ts +++ b/packages/@expo/metro-file-map/src/crawlers/node/__tests__/index.test.ts @@ -5,32 +5,15 @@ * LICENSE file in the root directory of this source tree. */ -import { spawn } from 'child_process'; -import { EventEmitter } from 'events'; import { vol } from 'memfs'; import H from '../../../constants'; import TreeFS from '../../../lib/TreeFS'; import type { CrawlerOptions, FileData, FileMetadata, PerfLogger } from '../../../types'; -import hasNativeFindSupport from '../hasNativeFindSupport'; import nodeCrawl from '../index'; -jest.mock('../hasNativeFindSupport', () => ({ - __esModule: true, - default: jest.fn().mockResolvedValue(false), -})); -jest.mock('child_process', () => ({ - spawn: jest.fn(), -})); -jest.mock('os', () => ({ - ...jest.requireActual('os'), - platform: () => 'linux', -})); - const rootDir = '/project'; -const processFile = () => null; -const mockedSpawn = jest.mocked(spawn); -const mockedHasNativeFindSupport = jest.mocked(hasNativeFindSupport); +const processFile = async () => null; function makeTreeFS(files?: FileData): TreeFS { return new TreeFS({ rootDir, files, processFile }); @@ -65,8 +48,6 @@ function sorted(iter: IterableIterator): string[] { describe('node crawler', () => { beforeEach(() => { vol.reset(); - mockedHasNativeFindSupport.mockResolvedValue(false); - mockedSpawn.mockReset(); }); test('discovers files by extension', async () => { @@ -133,12 +114,16 @@ describe('node crawler', () => { '/project/fruits/tomato.js': 'same', }); - // Get the mtime that memfs assigned to tomato so we can match it + // Get the mtime that memfs assigned to each file const tomatoStat = vol.statSync('/project/fruits/tomato.js'); + const strawberryStat = vol.statSync('/project/fruits/strawberry.js'); const previousFiles: FileData = new Map([ // strawberry has a different mtime → will be reported as changed - ['fruits/strawberry.js', [0, 0, 1, null, 0, null] as FileMetadata], + [ + 'fruits/strawberry.js', + [strawberryStat.mtime.getTime() - 1000, 0, 1, null, 0, null] as FileMetadata, + ], // tomato has matching mtime → unchanged, excluded from changedFiles [ 'fruits/tomato.js', @@ -234,7 +219,7 @@ describe('node crawler', () => { expect(changedFiles.get('fruits/apple.js')![H.SYMLINK]).toBe(0); }); - test('populates file metadata correctly', async () => { + test('populates file metadata with null mtime on cold start', async () => { vol.fromJSON({ '/project/fruits/apple.js': 'hello', }); @@ -243,85 +228,119 @@ describe('node crawler', () => { const meta = changedFiles.get('fruits/apple.js')!; expect(meta).toBeDefined(); - expect(meta[H.MTIME]).toBeGreaterThan(0); - expect(meta[H.SIZE]).toBe(5); // 'hello'.length + // On cold start (empty previous FS), stat is deferred + expect(meta[H.MTIME]).toBeNull(); + expect(meta[H.SIZE]).toBe(0); expect(meta[H.VISITED]).toBe(0); expect(meta[H.SHA1]).toBeNull(); expect(meta[H.SYMLINK]).toBe(0); }); - describe('native find', () => { - function mockSpawnFind(filePaths: string[]) { - mockedSpawn.mockImplementation((() => { - const stdout = new EventEmitter() as EventEmitter & { - setEncoding: jest.Mock; - }; - stdout.setEncoding = jest.fn(); - process.nextTick(() => { - stdout.emit('data', filePaths.join('\n')); - process.nextTick(() => stdout.emit('close')); - }); - return { stdout, on: jest.fn() }; - }) as any); - } - - beforeEach(() => { - mockedHasNativeFindSupport.mockResolvedValue(true); + test('skips lstat for files with no prior mtime', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/banana.js': 'b', }); - test('uses native find when available', async () => { - vol.fromJSON({ - '/project/fruits/apple.js': 'a', - '/project/fruits/pear.js': 'b', - '/project/fruits/tomato.js': 'c', - }); - - mockSpawnFind([ - '/project/fruits/apple.js', - '/project/fruits/pear.js', - '/project/fruits/tomato.js', - ]); + const previousFiles: FileData = new Map([ + ['fruits/apple.js', [null, 0, 0, null, 0, null] as FileMetadata], + ['fruits/banana.js', [null, 0, 0, null, 0, null] as FileMetadata], + ]); - const { changedFiles } = await crawl({ - forceNodeFilesystemAPI: false, - ignore: (p: string) => /pear/.test(p), - }); + const { changedFiles, removedFiles } = await crawl({ + previousState: { + fileSystem: makeTreeFS(previousFiles), + clocks: new Map(), + }, + }); - expect(mockedSpawn).toHaveBeenCalledWith('find', expect.arrayContaining(['/project/fruits'])); + // Both files had null mtime → stat deferred, getDifference treats as unchanged + expect(changedFiles).toEqual(new Map()); + expect(removedFiles).toEqual(new Set()); + }); - expect(sorted(changedFiles.keys())).toEqual(['fruits/apple.js', 'fruits/tomato.js']); + test('calls lstat only for files with existing mtime', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/banana.js': 'b', }); - test('constructs correct find expression for extensions', async () => { - vol.fromJSON({ - '/project/fruits/apple.js': 'a', - }); + const appleStat = vol.statSync('/project/fruits/apple.js'); - mockSpawnFind(['/project/fruits/apple.js']); + const previousFiles: FileData = new Map([ + // apple has a real mtime → will be lstat'd, mtime differs → changed + ['fruits/apple.js', [appleStat.mtime.getTime() - 1000, 0, 1, null, 0, null] as FileMetadata], + // banana has null mtime → stat is deferred + ['fruits/banana.js', [null, 0, 0, null, 0, null] as FileMetadata], + ]); - await crawl({ - forceNodeFilesystemAPI: false, - extensions: ['js', 'json'], - }); + const { changedFiles, removedFiles } = await crawl({ + previousState: { + fileSystem: makeTreeFS(previousFiles), + clocks: new Map(), + }, + }); + + // apple was lstat'd (real mtime in result), banana was not (absent from changedFiles) + expect(changedFiles).toEqual( + new Map([ + [ + 'fruits/apple.js', + [appleStat.mtime.getTime(), appleStat.size, 0, null, 0, null] as FileMetadata, + ], + ]) + ); + expect(removedFiles).toEqual(new Set()); + }); - const spawnArgs = mockedSpawn.mock.calls[0]![1] as string[]; - expect(spawnArgs).toContain('-iname'); - expect(spawnArgs).toContain('*.js'); - expect(spawnArgs).toContain('*.json'); + test('excludes unchanged files when lstat mtime matches cache', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/banana.js': 'b', }); - test('falls back to node fs when forceNodeFilesystemAPI is true', async () => { - vol.fromJSON({ - '/project/fruits/apple.js': 'a', - }); + const appleStat = vol.statSync('/project/fruits/apple.js'); + const bananaStat = vol.statSync('/project/fruits/banana.js'); - const { changedFiles } = await crawl({ - forceNodeFilesystemAPI: true, - }); + const previousFiles: FileData = new Map([ + [ + 'fruits/apple.js', + [appleStat.mtime.getTime(), appleStat.size, 1, null, 0, null] as FileMetadata, + ], + [ + 'fruits/banana.js', + [bananaStat.mtime.getTime(), bananaStat.size, 1, null, 0, null] as FileMetadata, + ], + ]); - expect(mockedSpawn).not.toHaveBeenCalled(); - expect(sorted(changedFiles.keys())).toEqual(['fruits/apple.js']); + const { changedFiles, removedFiles } = await crawl({ + previousState: { + fileSystem: makeTreeFS(previousFiles), + clocks: new Map(), + }, }); + + // Both files have matching mtime → lstat'd but unchanged + expect(changedFiles).toEqual(new Map()); + expect(removedFiles).toEqual(new Set()); + }); + + test('marks symlinks correctly when stat is skipped', async () => { + vol.fromJSON({ + '/project/fruits/apple.js': 'a', + '/project/fruits/target.js': 'target', + }); + vol.symlinkSync('/project/fruits/target.js', '/project/fruits/link.js'); + + const { changedFiles } = await crawl({ includeSymlinks: true }); + + const linkMeta = changedFiles.get('fruits/link.js')!; + expect(linkMeta).toBeDefined(); + // On cold start, mtime is deferred + expect(linkMeta[H.MTIME]).toBeNull(); + expect(linkMeta[H.SIZE]).toBe(0); + // But symlink flag is still correctly set + expect(linkMeta[H.SYMLINK]).toBe(1); }); describe('abort signal', () => { diff --git a/packages/@expo/metro-file-map/src/crawlers/node/hasNativeFindSupport.ts b/packages/@expo/metro-file-map/src/crawlers/node/hasNativeFindSupport.ts deleted file mode 100644 index 13dce3ae9bb667..00000000000000 --- a/packages/@expo/metro-file-map/src/crawlers/node/hasNativeFindSupport.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -import { spawn } from 'child_process'; - -export default async function hasNativeFindSupport(): Promise { - try { - return await new Promise((resolve) => { - // Check the find binary supports the non-POSIX -iname parameter wrapped in parens. - const args = ['.', '-type', 'f', '(', '-iname', '*.ts', '-o', '-iname', '*.js', ')']; - const child = spawn('find', args, { cwd: __dirname }); - child.on('error', () => { - resolve(false); - }); - child.on('exit', (code) => { - resolve(code === 0); - }); - }); - } catch { - return false; - } -} diff --git a/packages/@expo/metro-file-map/src/crawlers/node/index.ts b/packages/@expo/metro-file-map/src/crawlers/node/index.ts index fbb3b02c91d5f1..b5405e5888c01d 100644 --- a/packages/@expo/metro-file-map/src/crawlers/node/index.ts +++ b/packages/@expo/metro-file-map/src/crawlers/node/index.ts @@ -7,16 +7,18 @@ * @format */ -import { spawn } from 'child_process'; -import * as fs from 'graceful-fs'; -import { platform } from 'os'; +import * as fs from 'fs'; import * as path from 'path'; -import hasNativeFindSupport from './hasNativeFindSupport'; import { RootPathUtils } from '../../lib/RootPathUtils'; -import type { Console, CrawlerOptions, CrawlResult, FileData, IgnoreMatcher } from '../../types'; - -const debug = require('debug')('Metro:NodeCrawler'); +import type { + Console, + CrawlerOptions, + CrawlResult, + FileData, + FileSystem, + IgnoreMatcher, +} from '../../types'; type Callback = (result: FileData) => void; @@ -27,13 +29,22 @@ function find( includeSymlinks: boolean, rootDir: string, console: Console, + previousFileSystem: FileSystem | null, callback: Callback ): void { const result: FileData = new Map(); let activeCalls = 0; const pathUtils = new RootPathUtils(rootDir); - function search(directory: string): void { + const exts = extensions.reduce( + (acc, ext) => { + acc[ext] = true; + return acc; + }, + {} as Record + ); + + function search(directory: string, dirNormal: string, isWithinRoot: boolean): void { activeCalls++; fs.readdir(directory, { withFileTypes: true }, (err, entries) => { activeCalls--; @@ -42,46 +53,62 @@ function find( `Error "${(err as any).code ?? err.message}" reading contents of "${directory}", skipping. Add this directory to your ignore list to exclude it.` ); } else { - entries.forEach((entry: fs.Dirent) => { - const file = path.join(directory, entry.name.toString()); - - if (ignore(file)) { - return; + for (let idx = 0; idx < entries.length; idx++) { + const entry = entries[idx]!; + const name = entry.name.toString(); + const file = directory + path.sep + name; + + const isSymbolicLink = entry.isSymbolicLink(); + if (ignore(file) || (!includeSymlinks && isSymbolicLink)) { + continue; } - if (entry.isSymbolicLink() && !includeSymlinks) { - return; - } + // Deriving a normal path above the root dir requires slicing off an up-fragment + // then checking if the target matches the next segment of the root dir. It's therefore + // easier to fall back to `pathUtils.absoluteToNormal` + const childNormal = !isWithinRoot + ? pathUtils.absoluteToNormal(file) + : dirNormal === '' + ? name + : dirNormal + path.sep + name; if (entry.isDirectory()) { - search(file); - return; + search(file, childNormal, isWithinRoot || childNormal === ''); + continue; } - activeCalls++; - - fs.lstat(file, (err, stat) => { - activeCalls--; + const ext = path.extname(file).substr(1); + if (!isSymbolicLink && !exts[ext]) { + continue; + } - if (!err && stat) { - const ext = path.extname(file).substr(1); - if (stat.isSymbolicLink() || extensions.includes(ext)) { - result.set(pathUtils.absoluteToNormal(file), [ + const mtime = previousFileSystem?.getMtimeByNormalPath(childNormal); + if (mtime == null || mtime === 0) { + // When we're in a cold start or a previous file doesn't exist, we can skip + // the mtime/size lstat now and treat the file as new + result.set(childNormal, [null, 0, 0, null, isSymbolicLink ? 1 : 0, null]); + } else { + activeCalls++; + fs.lstat(file, (err, stat) => { + activeCalls--; + + if (!err && stat) { + result.set(childNormal, [ stat.mtime.getTime(), stat.size, 0, null, - stat.isSymbolicLink() ? 1 : 0, + isSymbolicLink ? 1 : 0, null, ]); } - } - if (activeCalls === 0) { - callback(result); - } - }); - }); + if (activeCalls === 0) { + callback(result); + } + }); + } + } } if (activeCalls === 0) { @@ -91,80 +118,21 @@ function find( } if (roots.length > 0) { - roots.forEach(search); + for (const root of roots) { + const rootNormal = pathUtils.absoluteToNormal(root); + const isWithinRoot = !rootNormal.startsWith('..' + path.sep); + search(root, rootNormal, isWithinRoot); + } } else { callback(result); } } -function findNative( - roots: readonly string[], - extensions: readonly string[], - ignore: IgnoreMatcher, - includeSymlinks: boolean, - rootDir: string, - console: Console, - callback: Callback -): void { - // Examples: - // ( ( -type f ( -iname *.js ) ) ) - // ( ( -type f ( -iname *.js -o -iname *.ts ) ) ) - // ( ( -type f ( -iname *.js ) ) -o -type l ) - // ( ( -type f ) -o -type l ) - const extensionClause = extensions.length - ? `( ${extensions.map((ext) => `-iname *.${ext}`).join(' -o ')} )` - : ''; // Empty inner expressions eg "( )" are not allowed - const expression = `( ( -type f ${extensionClause} ) ${includeSymlinks ? '-o -type l ' : ''})`; - - const pathUtils = new RootPathUtils(rootDir); - - const child = spawn('find', [...roots, ...expression.split(' ')]); - let stdout = ''; - if (child.stdout == null) { - throw new Error( - 'stdout is null - this should never happen. Please open up an issue at https://github.com/facebook/metro' - ); - } - child.stdout.setEncoding('utf-8'); - child.stdout.on('data', (data) => (stdout += data)); - - child.stdout.on('close', () => { - const lines = stdout - .trim() - .split('\n') - .filter((x) => !ignore(x)); - const result: FileData = new Map(); - let count = lines.length; - if (!count) { - callback(new Map()); - } else { - lines.forEach((filePath) => { - fs.lstat(filePath, (err, stat) => { - if (!err && stat) { - result.set(pathUtils.absoluteToNormal(filePath), [ - stat.mtime.getTime(), - stat.size, - 0, - null, - stat.isSymbolicLink() ? 1 : 0, - null, - ]); - } - if (--count === 0) { - callback(result); - } - }); - }); - } - }); -} - export default async function nodeCrawl(options: CrawlerOptions): Promise { const { console, previousState, extensions, - forceNodeFilesystemAPI, ignore, rootDir, includeSymlinks, @@ -177,10 +145,6 @@ export default async function nodeCrawl(options: CrawlerOptions): Promise { const callback: Callback = (fileData) => { @@ -199,10 +163,15 @@ export default async function nodeCrawl(options: CrawlerOptions): Promise { - const result = this.#fileProcessor.processRegularFile(normalPath, metadata, { + const processFile: ProcessFileFunction = async (normalPath, metadata, opts) => { + const result = await this.#fileProcessor.processRegularFile(normalPath, metadata, { computeSha1: opts.computeSha1, maybeReturnContent: true, }); @@ -532,7 +533,9 @@ export default class FileMap extends EventEmitter { .readlink(this.#pathUtils.normalToAbsolute(normalPath)) .then((symlinkTarget) => { fileMetadata[H.VISITED] = 1; - fileMetadata[H.SYMLINK] = symlinkTarget; + fileMetadata[H.SYMLINK] = normalizePathSeparatorsToPosix( + this.#pathUtils.resolveSymlinkToNormal(normalPath, symlinkTarget) + ); }); } return null; @@ -575,7 +578,9 @@ export default class FileMap extends EventEmitter { if (fileData[H.SYMLINK] === 0) { filesToProcess.push([normalFilePath, fileData]); - } else { + } else if (fileData[H.MTIME] != null && fileData[H.MTIME] !== 0) { + // The symlink will only be updated, if it's been accessed before + // If this is a newly crawled entry, it's skipped const maybeReadLink = this.#maybeReadLink(normalFilePath, fileData); if (maybeReadLink) { readLinkPromises.push( diff --git a/packages/@expo/metro-file-map/src/lib/FileProcessor.ts b/packages/@expo/metro-file-map/src/lib/FileProcessor.ts index b5cd3fa1705a50..7c6ecd138db510 100644 --- a/packages/@expo/metro-file-map/src/lib/FileProcessor.ts +++ b/packages/@expo/metro-file-map/src/lib/FileProcessor.ts @@ -121,16 +121,16 @@ export class FileProcessor { return { errors }; } - processRegularFile( + async processRegularFile( normalPath: string, fileMetadata: FileMetadata, req: ProcessFileRequest - ): { content: Buffer | undefined | null } | null { + ): Promise<{ content: Buffer | undefined | null } | null> { const workerInput = this.#getWorkerInput(normalPath, fileMetadata, req); return workerInput ? { content: processWorkerReply( - this.#inBandWorker.processFile(workerInput), + await this.#inBandWorker.processFile(workerInput), workerInput.pluginsToRun, fileMetadata ), diff --git a/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts b/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts index 6b86965a28be07..fc287ff1254827 100644 --- a/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts +++ b/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts @@ -8,6 +8,8 @@ import invariant from 'invariant'; import path from 'path'; +import normalizePathSeparatorsToSystem from './normalizePathSeparatorsToSystem'; + /** * This module provides path utility functions - similar to `node:path` - * optimised for Metro's use case (many paths, few roots) under assumptions @@ -159,6 +161,24 @@ export class RootPathUtils { ); } + resolveSymlinkToNormal(symlinkNormalPath: string, readlinkResult: string): string { + let target = normalizePathSeparatorsToSystem(readlinkResult); + // WARN: This only applies to Windows + Node 20 case, where the value is completely + // unnormalized and a trailing slash may be returned + if (target[target.length - 1] === path.sep) { + target = target.slice(0, -1); + } + if (path.isAbsolute(target)) { + return this.absoluteToNormal(target); + } + // Resolve relative to the symlink's containing directory, expressed as + // a root-relative (possibly non-normal) path, then normalize + const sepIdx = symlinkNormalPath.lastIndexOf(path.sep); + const rootRelativeTarget = + sepIdx === -1 ? target : symlinkNormalPath.slice(0, sepIdx) + path.sep + target; + return this.relativeToNormal(rootRelativeTarget); + } + // If a path is a direct ancestor of the project root (or the root itself), // return a number with the degrees of separation, e.g. root=0, parent=1,.. // or null otherwise. diff --git a/packages/@expo/metro-file-map/src/lib/TreeFS.ts b/packages/@expo/metro-file-map/src/lib/TreeFS.ts index 2b2ee1855d5e73..d7e50dfeed2eff 100644 --- a/packages/@expo/metro-file-map/src/lib/TreeFS.ts +++ b/packages/@expo/metro-file-map/src/lib/TreeFS.ts @@ -5,10 +5,13 @@ * LICENSE file in the root directory of this source tree. */ +import fs from 'fs'; import invariant from 'invariant'; import path from 'path'; import H from '../constants'; +import normalizePathSeparatorsToPosix from './normalizePathSeparatorsToPosix'; +import normalizePathSeparatorsToSystem from './normalizePathSeparatorsToSystem'; import type { CacheData, FileData, @@ -34,12 +37,6 @@ function isRegularFile(node: FileNode): boolean { return node[H.SYMLINK] === 0; } -interface NormalizedSymlinkTarget { - ancestorOfRootIdx: number | null; - normalPath: string; - startOfBasenameIdx: number; -} - interface DeserializedSnapshotInput { rootDir: string; fileSystemData: DirectoryNode; @@ -122,7 +119,6 @@ interface MetadataIteratorOptions { * a trailing slash */ export default class TreeFS implements MutableFileSystem { - readonly #cachedNormalSymlinkTargets: WeakMap = new WeakMap(); readonly #pathUtils: RootPathUtils; readonly #processFile: ProcessFileFunction; readonly #rootDir: Path; @@ -203,12 +199,17 @@ export default class TreeFS implements MutableFileSystem { } if ( newMetadata[H.MTIME] != null && - // TODO: Remove when mtime is null if not populated newMetadata[H.MTIME] !== 0 && newMetadata[H.MTIME] === metadata[H.MTIME] ) { // Types and modified time match - not changed. changedFiles.delete(canonicalPath); + } else if ( + (newMetadata[H.MTIME] == null || newMetadata[H.MTIME] === 0) && + (metadata[H.MTIME] == null || metadata[H.MTIME] === 0) + ) { + // If file is still untouched then mark it as unchanged + changedFiles.delete(canonicalPath); } else if ( newMetadata[H.SHA1] != null && newMetadata[H.SHA1] === metadata[H.SHA1] && @@ -229,6 +230,13 @@ export default class TreeFS implements MutableFileSystem { }; } + getMtimeByNormalPath(normalPath: Path): number | null { + const result = this.#lookupByNormalPath(normalPath, { + followLeaf: false, + }); + return result.exists && !isDirectory(result.node) ? result.node[H.MTIME] : null; + } + getSha1(mixedPath: Path): string | null { const fileMetadata = this.#getFileData(mixedPath); return (fileMetadata && fileMetadata[H.SHA1]) ?? null; @@ -246,6 +254,18 @@ export default class TreeFS implements MutableFileSystem { } const { canonicalPath, node: fileMetadata } = result; + // Populate mtime and size on demand + if (fileMetadata[H.MTIME] == null || fileMetadata[H.MTIME] === 0) { + fileMetadata[H.SHA1] = null; + const absolutePath = this.#pathUtils.normalToAbsolute(canonicalPath); + try { + const stat = await fs.promises.lstat(absolutePath); + const diskMtime = stat.mtime.getTime(); + fileMetadata[H.MTIME] = diskMtime; + fileMetadata[H.SIZE] = stat.size; + } catch {} + } + // Empty strings const existing = fileMetadata[H.SHA1]; if (existing != null && existing.length > 0) { @@ -703,6 +723,13 @@ export default class TreeFS implements MutableFileSystem { segmentNode, currentPath ); + if (normalSymlinkTarget == null) { + return { + canonicalMissingPath: currentPath, + exists: false, + missingSegmentName: segmentName, + }; + } if (opts.collectLinkPaths) { opts.collectLinkPaths.add(this.#pathUtils.normalToAbsolute(currentPath)); } @@ -712,7 +739,7 @@ export default class TreeFS implements MutableFileSystem { // Append any subsequent path segments to the symlink target, and reset // with our new target. const joinedResult = this.#pathUtils.joinNormalToRelative( - normalSymlinkTarget.normalPath, + normalSymlinkTarget, remainingTargetPath ); @@ -732,7 +759,8 @@ export default class TreeFS implements MutableFileSystem { collectAncestors && !isLastSegment && // No-op optimisation to bail out the common case of nothing to do. - (normalSymlinkTarget.ancestorOfRootIdx === 0 || joinedResult.collapsedSegments > 0) + ((ancestorOfRootIdx = this.#pathUtils.getAncestorOfRootIdx(normalSymlinkTarget)) === 0 || + joinedResult.collapsedSegments > 0) ) { let node: MixedNode = this.#rootNode; let collapsedPath = ''; @@ -742,7 +770,7 @@ export default class TreeFS implements MutableFileSystem { // Add the root only if the target is the root or we have // collapsed segments. i > 0 || - normalSymlinkTarget.ancestorOfRootIdx === 0 || + ancestorOfRootIdx === 0 || joinedResult.collapsedSegments > 0 ) { reverseAncestors.push({ @@ -762,7 +790,7 @@ export default class TreeFS implements MutableFileSystem { // the symlink target, and start collecting ancestors only // from the target itself (ie, the basename of the normal target path) // onwards. - unseenPathFromIdx = normalSymlinkTarget.startOfBasenameIdx; + unseenPathFromIdx = normalSymlinkTarget.lastIndexOf(path.sep) + 1; if (seen == null) { // Optimisation: set this lazily only when we've encountered a symlink @@ -1162,28 +1190,32 @@ export default class TreeFS implements MutableFileSystem { #resolveSymlinkTargetToNormalPath( symlinkNode: FileMetadata, canonicalPathOfSymlink: Path - ): NormalizedSymlinkTarget { - const cachedResult = this.#cachedNormalSymlinkTargets.get(symlinkNode); - if (cachedResult != null) { - return cachedResult; + ): Path | null { + const symlinkTarget = symlinkNode[H.SYMLINK]; + if (symlinkTarget === 1) { + // Symlink target not yet resolved — read it lazily on first traversal + const absoluteSymlink = this.#pathUtils.normalToAbsolute(canonicalPathOfSymlink); + try { + const literalSymlinkTarget = fs.readlinkSync(absoluteSymlink); + const normalTarget = this.#pathUtils.resolveSymlinkToNormal( + canonicalPathOfSymlink, + literalSymlinkTarget + ); + symlinkNode[H.SYMLINK] = normalizePathSeparatorsToPosix(normalTarget); + symlinkNode[H.VISITED] = 1; + return normalTarget; + } catch { + return null; + } + } else if (symlinkTarget === 0 || symlinkTarget == null) { + // WARN: We shouldn't call this method on non-symlinks. Outside of tests + // this condition shouldn't trigger. It's fine not to resolve a symlink if + // it does trigger however + return null; + } else { + invariant(typeof symlinkTarget === 'string', 'Expected symlink target to be populated.'); + return normalizePathSeparatorsToSystem(symlinkTarget); } - - const literalSymlinkTarget = symlinkNode[H.SYMLINK]; - invariant(typeof literalSymlinkTarget === 'string', 'Expected symlink target to be populated.'); - const absoluteSymlinkTarget = path.resolve( - this.#rootDir, - canonicalPathOfSymlink, - '..', // Symlink target is relative to its containing directory. - literalSymlinkTarget // May be absolute, in which case the above are ignored - ); - const normalSymlinkTarget = path.relative(this.#rootDir, absoluteSymlinkTarget); - const result = { - ancestorOfRootIdx: this.#pathUtils.getAncestorOfRootIdx(normalSymlinkTarget), - normalPath: normalSymlinkTarget, - startOfBasenameIdx: normalSymlinkTarget.lastIndexOf(path.sep) + 1, - }; - this.#cachedNormalSymlinkTargets.set(symlinkNode, result); - return result; } #getFileData( diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/FileProcessor.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/FileProcessor.test.ts index 02e5405bb2247f..83b950a1aae1b7 100644 --- a/packages/@expo/metro-file-map/src/lib/__tests__/FileProcessor.test.ts +++ b/packages/@expo/metro-file-map/src/lib/__tests__/FileProcessor.test.ts @@ -41,7 +41,7 @@ describe('processBatch', () => { setup: () => {}, processFile: mockWorkerFn, Worker: class { - processFile: (msg: WorkerMessage) => WorkerMetadata = mockWorkerFn; + processFile: (msg: WorkerMessage) => Promise = mockWorkerFn; }, })); FileProcessor = require('../FileProcessor').FileProcessor; @@ -299,34 +299,34 @@ describe('processBatch', () => { describe('processRegularFile', () => { let FileProcessor: typeof import('../FileProcessor').FileProcessor; - const mockReadFileSync = jest.fn(); + const mockReadFile = jest.fn(); beforeEach(() => { jest.resetModules(); jest.clearAllMocks(); jest.unmock('../../worker'); - jest.mock('graceful-fs', () => ({ - readFileSync: mockReadFileSync, + jest.mock('fs', () => ({ + promises: { + readFile: mockReadFile, + }, })); FileProcessor = require('../FileProcessor').FileProcessor; }); - test('synchronously populates metadata', () => { + test('asynchronously populates metadata', async () => { const processor = new FileProcessor(defaultOptions); const [normalFilePath, metadata] = getNMockFiles(1)[0]!; expect(metadata[H.SHA1]).toBeFalsy(); const fileContent = Buffer.from('hello world'); - mockReadFileSync.mockReturnValue(fileContent); + mockReadFile.mockResolvedValue(fileContent); - const result = processor.processRegularFile(normalFilePath, metadata, { + const result = await processor.processRegularFile(normalFilePath, metadata, { computeSha1: true, maybeReturnContent: true, }); - expect(mockReadFileSync).toHaveBeenCalledWith( - path.resolve(defaultOptions.rootDir, normalFilePath) - ); + expect(mockReadFile).toHaveBeenCalledWith(path.resolve(defaultOptions.rootDir, normalFilePath)); expect(result).toEqual({ content: fileContent, diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts index e4f5349f758270..f8f1b1e2d30351 100644 --- a/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts +++ b/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts @@ -140,4 +140,33 @@ describe.each([['win32'], ['posix']] as const)('RootPathUtils on %s', (platform) ] as const)('getAncestorOfRootIdx (%s => %s)', (input, expected) => { expect(pathUtils.getAncestorOfRootIdx(input)).toEqual(expected); }); + + describe('resolveSymlinkToNormal', () => { + beforeEach(() => { + pathUtils = new RootPathUtils(p('/project/root')); + }); + + test.each([ + ['foo/link', './target.js', p('foo/target.js')], + ['foo/link', '../bar.js', 'bar.js'], + ['link', 'target.js', 'target.js'], + [p('a/b/link'), p('../../c.js'), 'c.js'], + [p('a/b/link'), p('../../../outside/f.js'), p('../outside/f.js')], + ])('resolves relative target (%s -> %s) to %s', (symlinkPath, readlinkResult, expected) => { + expect(pathUtils.resolveSymlinkToNormal(p(symlinkPath), readlinkResult)).toEqual(expected); + }); + + test.each([ + ['link', p('/project/root/target.js'), 'target.js'], + ['link', p('/project/root/a/b.js'), p('a/b.js')], + ['link', p('/outside/foo.js'), p('../../outside/foo.js')], + [p('a/link'), p('/project/root'), ''], + ])('resolves absolute target (%s -> %s) to %s', (symlinkPath, readlinkResult, expected) => { + expect(pathUtils.resolveSymlinkToNormal(p(symlinkPath), readlinkResult)).toEqual(expected); + }); + + test('strips trailing separator from target', () => { + expect(pathUtils.resolveSymlinkToNormal('link', p('/project/root/dir/'))).toEqual('dir'); + }); + }); }); diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts index 851cdef7d9b21a..1daa88c6f1737d 100644 --- a/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts +++ b/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts @@ -12,6 +12,16 @@ import type TreeFSType from '../TreeFS'; let mockPathModule: typeof import('path'); jest.mock('path', () => mockPathModule); +const mockLstat = jest.fn(); +const mockReadlinkSync = jest.fn(); +jest.mock('fs', () => ({ + ...jest.requireActual('fs'), + readlinkSync: mockReadlinkSync, + promises: { + lstat: mockLstat, + }, +})); + describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { // Convenience function to write paths with posix separators but convert them // to system separators @@ -29,22 +39,22 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { rootDir: p('/project'), files: new Map([ [p('foo/another.js'), [123, 2, 0, null, 0, 'another']], - [p('foo/owndir'), [0, 0, 0, null, '.', null]], - [p('foo/link-to-bar.js'), [0, 0, 0, null, p('../bar.js'), null]], - [p('foo/link-to-another.js'), [0, 0, 0, null, p('another.js'), null]], + [p('foo/owndir'), [0, 0, 0, null, 'foo', null]], + [p('foo/link-to-bar.js'), [0, 0, 0, null, 'bar.js', null]], + [p('foo/link-to-another.js'), [0, 0, 0, null, 'foo/another.js', null]], [p('../outside/external.js'), [0, 0, 0, null, 0, null]], [p('bar.js'), [234, 3, 0, null, 0, 'bar']], - [p('link-to-foo'), [456, 0, 0, null, p('./../project/foo'), null]], - [p('abs-link-out'), [456, 0, 0, null, p('/outside/./baz/..'), null]], + [p('link-to-foo'), [456, 0, 0, null, 'foo', null]], + [p('abs-link-out'), [456, 0, 0, null, '../outside', null]], [p('root'), [0, 0, 0, null, '..', null]], - [p('link-to-nowhere'), [123, 0, 0, null, p('./nowhere'), null]], - [p('link-to-self'), [123, 0, 0, null, p('./link-to-self'), null]], - [p('link-cycle-1'), [123, 0, 0, null, p('./link-cycle-2'), null]], - [p('link-cycle-2'), [123, 0, 0, null, p('./link-cycle-1'), null]], + [p('link-to-nowhere'), [123, 0, 0, null, 'nowhere', null]], + [p('link-to-self'), [123, 0, 0, null, 'link-to-self', null]], + [p('link-cycle-1'), [123, 0, 0, null, 'link-cycle-2', null]], + [p('link-cycle-2'), [123, 0, 0, null, 'link-cycle-1', null]], [p('node_modules/pkg/a.js'), [123, 0, 0, null, 0, 'a']], [p('node_modules/pkg/package.json'), [123, 0, 0, null, 0, 'pkg']], ]), - processFile: () => { + processFile: async () => { throw new Error('Not implemented'); }, }); @@ -166,9 +176,9 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { rootDir: p('/deep/project/root'), files: new Map([ [p('foo/index.js'), [123, 0, 0, null, 0, null]], - [p('link-up'), [123, 0, 0, null, p('..'), null]], + [p('link-up'), [123, 0, 0, null, '..', null]], ]), - processFile: () => { + processFile: async () => { throw new Error('Not implemented'); }, }); @@ -193,7 +203,7 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { describe('symlinks to an ancestor of the project root', () => { beforeEach(() => { - tfs.addOrModify(p('foo/link-up-2'), [0, 0, 0, null, p('../..'), null]); + tfs.addOrModify(p('foo/link-up-2'), [0, 0, 0, null, '..', null]); }); test.each([ @@ -237,18 +247,212 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { }); }); + describe('lazy symlink resolution', () => { + let lazyTfs: TreeFSType; + + beforeEach(() => { + mockReadlinkSync.mockReset(); + lazyTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('target.js'), [123, 10, 0, null, 0, null]], + [p('unresolved-link'), [0, 0, 0, null, 1, null]], + [p('dir/nested.js'), [123, 10, 0, null, 0, null]], + [p('unresolved-dir-link'), [0, 0, 0, null, 1, null]], + [p('sub/deep.js'), [123, 10, 0, null, 0, null]], + [p('link-to-nested-file'), [0, 0, 0, null, 1, null]], + ]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + }); + + test('resolves unresolved symlink via readlinkSync on lookup', () => { + mockReadlinkSync.mockReturnValue(p('./target.js')); + + expect(lazyTfs.lookup(p('/project/unresolved-link'))).toMatchObject({ + exists: true, + realPath: p('/project/target.js'), + type: 'f', + }); + expect(mockReadlinkSync).toHaveBeenCalledTimes(1); + expect(mockReadlinkSync).toHaveBeenCalledWith(p('/project/unresolved-link')); + }); + + test('resolves unresolved symlink to a directory', () => { + mockReadlinkSync.mockReturnValue(p('./dir')); + + expect(lazyTfs.lookup(p('/project/unresolved-dir-link/nested.js'))).toMatchObject({ + exists: true, + realPath: p('/project/dir/nested.js'), + type: 'f', + }); + }); + + test('updates metadata after lazy resolution', () => { + mockReadlinkSync.mockReturnValue(p('./target.js')); + + lazyTfs.lookup(p('/project/unresolved-link')); + + const metadata = [ + ...lazyTfs.metadataIterator({ + includeSymlinks: true, + includeNodeModules: true, + }), + ].find((entry) => entry.canonicalPath === p('unresolved-link')); + + expect(metadata?.metadata[H.SYMLINK]).toBe('target.js'); + expect(metadata?.metadata[H.VISITED]).toBe(1); + }); + + test('stores resolved symlink target with posix separators', () => { + // Use a target with a directory separator to exercise the + // normalizePathSeparatorsToPosix storage (matters on win32). + mockReadlinkSync.mockReturnValue(p('./sub/deep.js')); + + lazyTfs.lookup(p('/project/link-to-nested-file')); + + const metadata = [ + ...lazyTfs.metadataIterator({ + includeSymlinks: true, + includeNodeModules: true, + }), + ].find((entry) => entry.canonicalPath === p('link-to-nested-file')); + + // Stored value must always use posix separators, even on win32 + expect(metadata?.metadata[H.SYMLINK]).toBe('sub/deep.js'); + expect(metadata?.metadata[H.VISITED]).toBe(1); + }); + + test('second lookup of lazily-resolved symlink with nested target works', () => { + // Resolves to a target containing a directory separator + mockReadlinkSync.mockReturnValue(p('./sub/deep.js')); + + // First lookup: lazy resolution, populates H.SYMLINK + expect(lazyTfs.lookup(p('/project/link-to-nested-file'))).toMatchObject({ + exists: true, + realPath: p('/project/sub/deep.js'), + type: 'f', + }); + + // Second lookup: uses cached H.SYMLINK (the else branch) + expect(lazyTfs.lookup(p('/project/link-to-nested-file'))).toMatchObject({ + exists: true, + realPath: p('/project/sub/deep.js'), + type: 'f', + }); + expect(mockReadlinkSync).toHaveBeenCalledTimes(1); + }); + + test('caches resolved symlink and does not re-read', () => { + mockReadlinkSync.mockReturnValue(p('./target.js')); + + lazyTfs.lookup(p('/project/unresolved-link')); + lazyTfs.lookup(p('/project/unresolved-link')); + + expect(mockReadlinkSync).toHaveBeenCalledTimes(1); + }); + + test('returns exists:false for broken unresolved symlink', () => { + mockReadlinkSync.mockImplementation(() => { + throw new Error('ENOENT'); + }); + + expect(lazyTfs.lookup(p('/project/unresolved-link'))).toMatchObject({ + exists: false, + }); + }); + + test('does not call readlinkSync for already-resolved symlinks', () => { + expect(tfs.lookup(p('/project/foo/link-to-bar.js'))).toMatchObject({ + exists: true, + realPath: p('/project/bar.js'), + }); + expect(mockReadlinkSync).not.toHaveBeenCalled(); + }); + + test('lazily resolves symlink pointing above root', () => { + const aboveTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('../outside/external.js'), [123, 10, 0, null, 0, null]], + [p('link-out'), [0, 0, 0, null, 1, null]], + ]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + mockReadlinkSync.mockReturnValue(p('../outside')); + + expect(aboveTfs.lookup(p('/project/link-out/external.js'))).toMatchObject({ + exists: true, + realPath: p('/outside/external.js'), + type: 'f', + }); + }); + + test('lazily resolves a chain of symlinks', () => { + const chainTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('real.js'), [123, 10, 0, null, 0, null]], + [p('link-a'), [0, 0, 0, null, 'link-b', null]], + [p('link-b'), [0, 0, 0, null, 1, null]], + ]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + mockReadlinkSync.mockReturnValue(p('./real.js')); + + expect(chainTfs.lookup(p('/project/link-a'))).toMatchObject({ + exists: true, + realPath: p('/project/real.js'), + type: 'f', + }); + // Only link-b needed readlinkSync; link-a was already resolved + expect(mockReadlinkSync).toHaveBeenCalledTimes(1); + expect(mockReadlinkSync).toHaveBeenCalledWith(p('/project/link-b')); + }); + + test('getAllFiles resolves lazy symlinks that point to files', () => { + mockReadlinkSync.mockReturnValue(p('./target.js')); + + // getAllFiles iterates all paths including through symlinks; + // a lazy file-symlink should resolve and be included + const files = lazyTfs.getAllFiles().sort(); + expect(files).toContain(p('/project/target.js')); + expect(files).toContain(p('/project/dir/nested.js')); + expect(files).toContain(p('/project/sub/deep.js')); + }); + + test('matchFiles with follow resolves lazy dir symlinks', () => { + mockReadlinkSync.mockReturnValue(p('./dir')); + + const matches = [ + ...lazyTfs.matchFiles({ + rootDir: p('/project'), + follow: true, + recursive: true, + }), + ]; + expect(matches).toContain(p('/project/unresolved-dir-link/nested.js')); + }); + }); + describe('getDifference', () => { test('returns changed (inc. new) and removed files in given FileData', () => { const newFiles: FileData = new Map([ [p('new-file'), [789, 0, 0, null, 0, null]], - [p('link-to-foo'), [456, 0, 0, null, p('./foo'), null]], + [p('link-to-foo'), [456, 0, 0, null, 'foo', null]], // Different modified time, expect new mtime in changedFiles [p('foo/another.js'), [124, 0, 0, null, 0, null]], - [p('link-cycle-1'), [123, 0, 0, null, p('./link-cycle-2'), null]], - [p('link-cycle-2'), [123, 0, 0, null, p('./link-cycle-1'), null]], + [p('link-cycle-1'), [123, 0, 0, null, 'link-cycle-2', null]], + [p('link-cycle-2'), [123, 0, 0, null, 'link-cycle-1', null]], // Was a symlink, now a regular file [p('link-to-self'), [123, 0, 0, null, 0, null]], - [p('link-to-nowhere'), [123, 0, 0, null, p('./nowhere'), null]], + [p('link-to-nowhere'), [123, 0, 0, null, 'nowhere', null]], [p('node_modules/pkg/a.js'), [123, 0, 0, null, 0, 'a']], [p('node_modules/pkg/package.json'), [123, 0, 0, null, 0, 'pkg']], ]); @@ -326,6 +530,162 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { expect(withEmpty).toEqual(withUndefined); }); + + test('treats files as unchanged when both old and new mtime are null', () => { + const nullMtimeTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([[p('a.js'), [null, 0, 0, null, 0, null]]]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + + const newFiles: FileData = new Map([ + [p('a.js'), [null, 0, 0, null, 0, null]], + ]); + + expect(nullMtimeTfs.getDifference(newFiles)).toEqual({ + changedFiles: new Map(), + removedFiles: new Set(), + }); + }); + + test('treats files as unchanged when both old and new mtime are 0', () => { + const zeroMtimeTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([[p('a.js'), [0, 0, 0, null, 0, null]]]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + + const newFiles: FileData = new Map([ + [p('a.js'), [0, 0, 0, null, 0, null]], + ]); + + expect(zeroMtimeTfs.getDifference(newFiles)).toEqual({ + changedFiles: new Map(), + removedFiles: new Set(), + }); + }); + + test('treats file as changed when old has mtime but new does not', () => { + const newFiles: FileData = new Map([ + [p('bar.js'), [null, 0, 0, null, 0, null]], + ]); + + const result = tfs.getDifference(newFiles); + expect(result.changedFiles.has(p('bar.js'))).toBe(true); + }); + + test('treats file as changed when new has mtime but old does not', () => { + const nullMtimeTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([[p('a.js'), [null, 0, 0, null, 0, null]]]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + + const newFiles: FileData = new Map([ + [p('a.js'), [500, 10, 0, null, 0, null]], + ]); + + expect(nullMtimeTfs.getDifference(newFiles)).toEqual({ + changedFiles: new Map([ + [p('a.js'), [500, 10, 0, null, 0, null]], + ]), + removedFiles: new Set(), + }); + }); + + test('detects type change even when both mtimes are null', () => { + const symlinkTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('a.js'), [null, 0, 0, null, 'b.js', null]], + ]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + + const newFiles: FileData = new Map([ + [p('a.js'), [null, 0, 0, null, 0, null]], + ]); + + const result = symlinkTfs.getDifference(newFiles); + expect(result.changedFiles.has(p('a.js'))).toBe(true); + }); + + test('treats unresolved symlink (1) as unchanged vs resolved symlink with null mtime', () => { + // Simulates re-crawl: old state has a resolved symlink target, new crawl + // produces an unresolved lazy marker (1) with null mtime + const resolvedTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('my-link'), [0, 0, 0, null, 'target.js', null]], + ]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + + const newFiles: FileData = new Map([ + // Re-crawl produces unresolved symlink with null mtime + [p('my-link'), [null, 0, 0, null, 1, null]], + ]); + + const result = resolvedTfs.getDifference(newFiles); + // Both are symlinks (not regular files), both have null/0 mtime → unchanged + expect(result.changedFiles.has(p('my-link'))).toBe(false); + expect(result.removedFiles.size).toBe(0); + }); + + test('treats re-crawled file with null mtime as unchanged when old mtime was also null', () => { + // Simulates warm re-crawl where a file was previously lazy and is still lazy + const lazyTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([[p('file.js'), [null, 0, 0, null, 0, null]]]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + + const newFiles: FileData = new Map([ + [p('file.js'), [null, 0, 0, null, 0, null]], + ]); + + expect(lazyTfs.getDifference(newFiles)).toEqual({ + changedFiles: new Map(), + removedFiles: new Set(), + }); + }); + }); + + describe('getMtimeByNormalPath', () => { + test('returns mtime for an existing file', () => { + expect(tfs.getMtimeByNormalPath(p('bar.js'))).toBe(234); + }); + + test('returns null for a non-existent file', () => { + expect(tfs.getMtimeByNormalPath(p('nonexistent.js'))).toBeNull(); + }); + + test('returns null for a directory', () => { + expect(tfs.getMtimeByNormalPath(p('foo'))).toBeNull(); + }); + + test('returns null for a file with null mtime', () => { + const nullMtimeTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([[p('a.js'), [null, 0, 0, null, 0, null]]]), + processFile: async () => { + throw new Error('Not implemented'); + }, + }); + expect(nullMtimeTfs.getMtimeByNormalPath(p('a.js'))).toBeNull(); + }); }); describe('hierarchicalLookup', () => { @@ -337,12 +697,12 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { files: new Map( ( [ - [p('a/1/package.json'), [0, 0, 0, null, './real-package.json', null]], - [p('a/2/package.json'), [0, 0, 0, null, './notexist-package.json', null]], - [p('a/b/c/d/link-to-C'), [0, 0, 0, null, p('../../../..'), null]], - [p('a/b/c/d/link-to-B'), [0, 0, 0, null, p('../../../../..'), null]], - [p('a/b/c/d/link-to-A'), [0, 0, 0, null, p('../../../../../..'), null]], - [p('n_m/workspace/link-to-pkg'), [0, 0, 0, null, p('../../../workspace-pkg'), null]], + [p('a/1/package.json'), [0, 0, 0, null, 'a/1/real-package.json', null]], + [p('a/2/package.json'), [0, 0, 0, null, 'a/2/notexist-package.json', null]], + [p('a/b/c/d/link-to-C'), [0, 0, 0, null, '', null]], + [p('a/b/c/d/link-to-B'), [0, 0, 0, null, '..', null]], + [p('a/b/c/d/link-to-A'), [0, 0, 0, null, '../..', null]], + [p('n_m/workspace/link-to-pkg'), [0, 0, 0, null, '../workspace-pkg', null]], ] as [CanonicalPath, FileMetadata][] ).concat( [ @@ -367,7 +727,7 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { ) ) ), - processFile: () => { + processFile: async () => { throw new Error('Not implemented'); }, }); @@ -708,7 +1068,7 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { test('adds new files and modifies existing, new symlinks work', () => { tfs.bulkAddOrModify( new Map([ - [p('newdir/link-to-link-to-bar.js'), [0, 0, 0, null, p('../foo/link-to-bar.js'), null]], + [p('newdir/link-to-link-to-bar.js'), [0, 0, 0, null, 'foo/link-to-bar.js', null]], [p('foo/baz.js'), [0, 0, 0, null, 0, null]], [p('bar.js'), [999, 1, 0, null, 0, null]], ]) @@ -847,7 +1207,7 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { { baseName: 'link-to-bar.js', canonicalPath: p('foo/link-to-bar.js'), - metadata: [0, 0, 0, null, p('../bar.js'), null], + metadata: [0, 0, 0, null, 'bar.js', null], }, ]) ); @@ -863,7 +1223,7 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { files: new Map([ [p('foo.js'), [123, 0, 0, 'def456', 0, null]], [p('bar.js'), [123, 0, 0, null, 0, null]], - [p('link-to-bar'), [456, 0, 0, null, p('./bar.js'), null]], + [p('link-to-bar'), [456, 0, 0, null, 'bar.js', null]], ]), processFile: mockProcessFile, }); @@ -872,6 +1232,7 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { return; }); mockProcessFile.mockClear(); + mockLstat.mockClear(); }); test('returns the precomputed SHA-1 of a file if set', async () => { @@ -934,6 +1295,153 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { expect(await tfs.getOrComputeSha1(p('bar.js'))).toEqual({ sha1: 'abc123' }); expect(mockProcessFile).toHaveBeenCalledTimes(2); }); + + test('lazily resolves unresolved symlink and computes SHA1', async () => { + tfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('target.js'), [123, 10, 0, null, 0, null]], + [p('lazy-link'), [0, 0, 0, null, 1, null]], + ]), + processFile: mockProcessFile, + }); + + mockReadlinkSync.mockReturnValue(p('./target.js')); + + expect(await tfs.getOrComputeSha1(p('lazy-link'))).toEqual({ + sha1: 'abc123', + }); + expect(mockReadlinkSync).toHaveBeenCalledTimes(1); + expect(mockProcessFile).toHaveBeenCalledWith(p('target.js'), expect.any(Array), { + computeSha1: true, + }); + }); + + test('lazily resolves symlink to nested path and computes SHA1 on second call', async () => { + tfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('sub/target.js'), [123, 10, 0, null, 0, null]], + [p('lazy-nested-link'), [0, 0, 0, null, 1, null]], + ]), + processFile: mockProcessFile, + }); + + mockReadlinkSync.mockReturnValue(p('./sub/target.js')); + + // First call: resolves the symlink lazily, stores posix path + expect(await tfs.getOrComputeSha1(p('lazy-nested-link'))).toEqual({ + sha1: 'abc123', + }); + expect(mockReadlinkSync).toHaveBeenCalledTimes(1); + expect(mockProcessFile).toHaveBeenCalledWith(p('sub/target.js'), expect.any(Array), { + computeSha1: true, + }); + + // Second call: uses cached H.SYMLINK (posix), should still resolve + mockProcessFile.mockClear(); + expect(await tfs.getOrComputeSha1(p('lazy-nested-link'))).toEqual({ + sha1: 'abc123', + }); + // No re-read needed + expect(mockReadlinkSync).toHaveBeenCalledTimes(1); + // SHA1 was cached on target, no reprocessing + expect(mockProcessFile).not.toHaveBeenCalled(); + }); + + test('lazily resolves symlink and stats target with null mtime', async () => { + tfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + // Target file has null mtime (also lazily crawled) + [p('sub/target.js'), [null, 0, 0, null, 0, null]], + [p('lazy-link'), [0, 0, 0, null, 1, null]], + ]), + processFile: mockProcessFile, + }); + + mockReadlinkSync.mockReturnValue(p('./sub/target.js')); + mockLstat.mockResolvedValueOnce({ + mtime: { getTime: () => 555 }, + size: 42, + }); + + expect(await tfs.getOrComputeSha1(p('lazy-link'))).toEqual({ + sha1: 'abc123', + }); + // Symlink was resolved + expect(mockReadlinkSync).toHaveBeenCalledTimes(1); + // Target was stat'd because its mtime was null + expect(mockLstat).toHaveBeenCalledTimes(1); + expect(mockLstat).toHaveBeenCalledWith(p('/project/sub/target.js')); + // Target was processed for SHA1 + expect(mockProcessFile).toHaveBeenCalledTimes(1); + // Target's mtime is now populated + expect(tfs.getMtimeByNormalPath(p('sub/target.js'))).toBe(555); + }); + + test('lazily stats file and clears SHA1 when mtime is null', async () => { + tfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('unstated.js'), [null, 0, 0, 'stale', 0, null]], + ]), + processFile: mockProcessFile, + }); + + mockLstat.mockResolvedValueOnce({ + mtime: { getTime: () => 999 }, + size: 50, + }); + + await tfs.getOrComputeSha1(p('unstated.js')); + + expect(mockLstat).toHaveBeenCalledTimes(1); + expect(mockProcessFile).toHaveBeenCalledTimes(1); + expect(tfs.getMtimeByNormalPath(p('unstated.js'))).toBe(999); + }); + + test('lazily stats file when mtime is 0', async () => { + tfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([[p('zero.js'), [0, 0, 0, null, 0, null]]]), + processFile: mockProcessFile, + }); + + mockLstat.mockResolvedValueOnce({ + mtime: { getTime: () => 888 }, + size: 30, + }); + + await tfs.getOrComputeSha1(p('zero.js')); + + expect(mockLstat).toHaveBeenCalledTimes(1); + expect(mockProcessFile).toHaveBeenCalledTimes(1); + expect(tfs.getMtimeByNormalPath(p('zero.js'))).toBe(888); + }); + + test('does not stat file when mtime is already populated', async () => { + mockLstat.mockClear(); + await tfs.getOrComputeSha1(p('bar.js')); + + expect(mockLstat).not.toHaveBeenCalled(); + }); + + test('handles lstat failure gracefully when mtime is null', async () => { + tfs = new TreeFS({ + rootDir: p('/project'), + files: new Map([ + [p('missing.js'), [null, 0, 0, null, 0, null]], + ]), + processFile: mockProcessFile, + }); + + mockLstat.mockRejectedValueOnce(new Error('ENOENT')); + + const result = await tfs.getOrComputeSha1(p('missing.js')); + expect(result).toEqual({ sha1: 'abc123' }); + expect(mockProcessFile).toHaveBeenCalledTimes(1); + }); }); describe('change listener', () => { @@ -954,9 +1462,9 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { files: new Map([ [p('existing.js'), [123, 0, 0, '', 0]], [p('dir/nested.js'), [456, 0, 0, '', 0]], - [p('mylink'), [0, 0, 0, '', p('./dir')]], + [p('mylink'), [0, 0, 0, '', 'dir']], ]), - processFile: () => { + processFile: async () => { throw new Error('Not implemented'); }, }); @@ -1059,18 +1567,16 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { describe('symlinks with listener', () => { test('tracks added files when adding a symlink', () => { - simpleTfs.addOrModify(p('link-to-existing'), [0, 0, 0, '', p('./existing.js')], listener); + simpleTfs.addOrModify(p('link-to-existing'), [0, 0, 0, '', 'existing.js'], listener); expect(logChange.mock.calls).toEqual([ - ['fileAdded', p('link-to-existing'), [0, 0, 0, '', p('./existing.js')]], + ['fileAdded', p('link-to-existing'), [0, 0, 0, '', 'existing.js']], ]); }); test('tracks removed symlinks with their metadata', () => { simpleTfs.remove(p('mylink'), listener); - expect(logChange.mock.calls).toEqual([ - ['fileRemoved', p('mylink'), [0, 0, 0, '', p('./dir')]], - ]); + expect(logChange.mock.calls).toEqual([['fileRemoved', p('mylink'), [0, 0, 0, '', 'dir']]]); }); }); }); diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/removeOverlappingRoots.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/removeOverlappingRoots.test.ts new file mode 100644 index 00000000000000..4953cf02c5dd8e --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/__tests__/removeOverlappingRoots.test.ts @@ -0,0 +1,92 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow strict + * @oncall react_native + */ + +let mockPathModule; +jest.mock('path', () => mockPathModule); + +describe.each([['win32'], ['posix']])('removeOverlappingRoots on %s', (platform) => { + // Convenience function to write paths with posix separators but convert them + // to system separators + const p = (filePath: string): string => + platform === 'win32' ? filePath.replace(/\//g, '\\').replace(/^\\/, 'C:\\') : filePath; + + let removeOverlappingRoots; + + beforeEach(() => { + jest.resetModules(); + mockPathModule = jest.requireActual('path')[platform]; + removeOverlappingRoots = require('../removeOverlappingRoots').default; + }); + + test('returns empty array for empty input', () => { + expect(removeOverlappingRoots([])).toEqual([]); + }); + + test('returns single root unchanged', () => { + expect(removeOverlappingRoots([p('/a/b')])).toEqual([p('/a/b')]); + }); + + test('sorts roots', () => { + expect(removeOverlappingRoots([p('/b'), p('/a')])).toEqual([p('/a'), p('/b')]); + }); + + test('removes exact duplicates', () => { + expect(removeOverlappingRoots([p('/a'), p('/b'), p('/a')])).toEqual([p('/a'), p('/b')]); + }); + + test('removes a subdirectory of another root', () => { + expect(removeOverlappingRoots([p('/a/b'), p('/a/b/c')])).toEqual([p('/a/b')]); + }); + + test('removes deeply nested subdirectories', () => { + expect(removeOverlappingRoots([p('/a'), p('/a/b'), p('/a/b/c')])).toEqual([p('/a')]); + }); + + test('keeps sibling directories', () => { + expect(removeOverlappingRoots([p('/a/b'), p('/a/c')])).toEqual([p('/a/b'), p('/a/c')]); + }); + + test('does not treat a path-prefix as a parent (e.g. /a/b vs /a/b-foo)', () => { + expect(removeOverlappingRoots([p('/a/b'), p('/a/b-foo')])).toEqual([p('/a/b-foo'), p('/a/b')]); + }); + + test('filters subdirectories even when interleaved with non-children', () => { + expect(removeOverlappingRoots([p('/a/b/c'), p('/a/b-foo'), p('/a/b')])).toEqual([ + p('/a/b-foo'), + p('/a/b'), + ]); + }); + + test('shorter parent always sorts before longer child', () => { + expect(removeOverlappingRoots([p('/a/long/nested/path'), p('/a')])).toEqual([p('/a')]); + }); + + test('handles a mix of duplicates, subdirectories, and siblings', () => { + expect( + removeOverlappingRoots([ + p('/project/src'), + p('/project/lib'), + p('/project/src/utils'), + p('/project/src'), + p('/project/lib/internal'), + p('/other'), + ]) + ).toEqual([p('/other'), p('/project/lib'), p('/project/src')]); + }); + + test('resolves paths (normalizes trailing slashes and ..)', () => { + expect(removeOverlappingRoots([p('/a/b/'), p('/a/c/../d')])).toEqual([p('/a/b'), p('/a/d')]); + }); + + test('resolves paths before deduplicating', () => { + expect(removeOverlappingRoots([p('/a/b'), p('/a/b/')])).toEqual([p('/a/b')]); + }); +}); diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/rootRelativeCacheKeys.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/rootRelativeCacheKeys.test.ts index 998ecc6b0ff843..85769324b1dd8b 100644 --- a/packages/@expo/metro-file-map/src/lib/__tests__/rootRelativeCacheKeys.test.ts +++ b/packages/@expo/metro-file-map/src/lib/__tests__/rootRelativeCacheKeys.test.ts @@ -17,7 +17,6 @@ const buildParameters: BuildParameters = { computeSha1: false, enableSymlinks: false, extensions: ['a'], - forceNodeFilesystemAPI: false, ignorePattern: /a/, plugins: [getMockPlugin('1')], retainAllFiles: false, diff --git a/packages/@expo/metro-file-map/src/lib/removeOverlappingRoots.ts b/packages/@expo/metro-file-map/src/lib/removeOverlappingRoots.ts new file mode 100644 index 00000000000000..6fe6bd3c472830 --- /dev/null +++ b/packages/@expo/metro-file-map/src/lib/removeOverlappingRoots.ts @@ -0,0 +1,33 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict + * @format + */ + +import path from 'path'; + +export default function removeOverlappingRoots(roots: readonly string[]): readonly string[] { + const sorted = roots + .map((r) => path.resolve(r)) + .sort((a, b) => { + const aRoot = a + path.sep; + const bRoot = b + path.sep; + return aRoot < bRoot ? -1 : aRoot > bRoot ? 1 : 0; + }); + if (sorted.length === 0) { + return sorted; + } + const result: string[] = [sorted[0]!]; + for (let i = 1; i < sorted.length; i++) { + const rootPath = sorted[i] + path.sep; + const prevPath = result[result.length - 1] + path.sep; + if (!rootPath.startsWith(prevPath)) { + result.push(sorted[i]!); + } + } + return result; +} diff --git a/packages/@expo/metro-file-map/src/lib/rootRelativeCacheKeys.ts b/packages/@expo/metro-file-map/src/lib/rootRelativeCacheKeys.ts index 09ceae5e11f56b..fceeeaf6bda850 100644 --- a/packages/@expo/metro-file-map/src/lib/rootRelativeCacheKeys.ts +++ b/packages/@expo/metro-file-map/src/lib/rootRelativeCacheKeys.ts @@ -33,11 +33,12 @@ export default function rootRelativeCacheKeys(buildParameters: BuildParameters): case 'extensions': case 'computeSha1': case 'enableSymlinks': - case 'forceNodeFilesystemAPI': case 'retainAllFiles': return buildParameters[key] ?? null; case 'ignorePattern': return buildParameters[key].toString(); + case 'forceNodeFilesystemAPI': + return null; default: key satisfies never; throw new Error('Unrecognised key in build parameters: ' + key); diff --git a/packages/@expo/metro-file-map/src/plugins/dependencies/worker.ts b/packages/@expo/metro-file-map/src/plugins/dependencies/worker.ts index 553fcf5aa9182a..03d7a0a79f0063 100644 --- a/packages/@expo/metro-file-map/src/plugins/dependencies/worker.ts +++ b/packages/@expo/metro-file-map/src/plugins/dependencies/worker.ts @@ -27,8 +27,11 @@ export default class DependencyExtractorWorker implements MetadataWorker { } } - processFile(data: WorkerMessage, utils: Readonly<{ getContent: () => Buffer }>): V8Serializable { - const content = utils.getContent().toString(); + async processFile( + data: WorkerMessage, + utils: { readonly getContent: () => Promise } + ): Promise { + const content = (await utils.getContent()).toString(); const { filePath } = data; const dependencies = diff --git a/packages/@expo/metro-file-map/src/plugins/haste/worker.ts b/packages/@expo/metro-file-map/src/plugins/haste/worker.ts index b5a3f1a8495080..c436f27abf5917 100644 --- a/packages/@expo/metro-file-map/src/plugins/haste/worker.ts +++ b/packages/@expo/metro-file-map/src/plugins/haste/worker.ts @@ -23,13 +23,16 @@ export default class Worker implements MetadataWorker { } } - processFile(data: WorkerMessage, utils: Readonly<{ getContent: () => Buffer }>): V8Serializable { + async processFile( + data: WorkerMessage, + utils: { readonly getContent: () => Promise } + ): Promise { let hasteName: string | null = null; const { filePath } = data; if (filePath.endsWith(PACKAGE_JSON)) { // Process a package.json that is returned as a PACKAGE type with its name. try { - const fileData = JSON.parse(utils.getContent().toString()); + const fileData = JSON.parse((await utils.getContent()).toString()); if (fileData.name) { hasteName = fileData.name; } diff --git a/packages/@expo/metro-file-map/src/types.ts b/packages/@expo/metro-file-map/src/types.ts index 2ef10914c13dd2..3ce348f65fc654 100644 --- a/packages/@expo/metro-file-map/src/types.ts +++ b/packages/@expo/metro-file-map/src/types.ts @@ -19,7 +19,8 @@ export interface BuildParameters { readonly computeSha1: boolean; readonly enableSymlinks: boolean; readonly extensions: readonly string[]; - readonly forceNodeFilesystemAPI: boolean; + /** @deprecated */ + readonly forceNodeFilesystemAPI?: boolean; readonly ignorePattern: RegExp; readonly plugins: readonly InputFileMapPlugin[]; readonly retainAllFiles: boolean; @@ -113,7 +114,8 @@ export interface CrawlerOptions { computeSha1: boolean; console: Console; extensions: readonly string[]; - forceNodeFilesystemAPI: boolean; + /** @deprecated */ + forceNodeFilesystemAPI?: boolean; ignore: IgnoreMatcher; includeSymlinks: boolean; perfLogger?: PerfLogger | null | undefined; @@ -226,11 +228,14 @@ export interface FileMapPlugin< export type InputFileMapPlugin = FileMapPlugin; export interface MetadataWorkerParams { - getContent(): Buffer; + getContent(): Promise; } export interface MetadataWorker { - processFile(message: WorkerMessage, params: MetadataWorkerParams): V8Serializable; + processFile( + message: WorkerMessage, + params: MetadataWorkerParams + ): V8Serializable | Promise; } export type IgnoreMatcher = (item: string) => boolean; @@ -278,6 +283,7 @@ export interface FileSystem { }; getSerializableSnapshot(): CacheData['fileSystemData']; + getMtimeByNormalPath(file: Path): number | undefined | null; getSha1(file: Path): string | undefined | null; getOrComputeSha1(file: Path): Promise<{ sha1: string; content?: Buffer } | undefined | null>; @@ -463,7 +469,7 @@ export type ProcessFileFunction = ( normalPath: string, metadata: FileMetadata, request: Readonly<{ computeSha1: boolean }> -) => Buffer | undefined | null; +) => Promise; export type RawMockMap = { /** posix-separated mock name to posix-separated project-relative paths */ diff --git a/packages/@expo/metro-file-map/src/worker.ts b/packages/@expo/metro-file-map/src/worker.ts index a6d99a8845cadc..0e4a6e487ee648 100644 --- a/packages/@expo/metro-file-map/src/worker.ts +++ b/packages/@expo/metro-file-map/src/worker.ts @@ -6,7 +6,7 @@ */ import { createHash } from 'crypto'; -import fs from 'graceful-fs'; +import fs from 'fs'; import type { MetadataWorker, WorkerMessage, WorkerMetadata, WorkerSetupArgs } from './types'; @@ -28,33 +28,39 @@ export class Worker { }); } - processFile(data: WorkerMessage): WorkerMetadata { - let content: Buffer | undefined; - let sha1: WorkerMetadata['sha1']; + async processFile(data: WorkerMessage): Promise { + let contentPromise: Promise | undefined; + let sha1Promise: Promise | undefined; const { computeSha1, filePath, pluginsToRun } = data; - const getContent = (): Buffer => { - if (content == null) { - content = fs.readFileSync(filePath) as Buffer; + const getContent = (): Promise => { + if (contentPromise == null) { + contentPromise = fs.promises.readFile(filePath); } - - return content!; + return contentPromise; }; const workerUtils = { getContent }; - const pluginData = pluginsToRun.map((pluginIdx) => - this.#plugins[pluginIdx]!.processFile(data, workerUtils) + const pluginDataPromise = Promise.all( + pluginsToRun.map((pluginIdx) => this.#plugins[pluginIdx]!.processFile(data, workerUtils)) ); // If a SHA-1 is requested on update, compute it. if (computeSha1) { - sha1 = sha1hex(getContent()); + sha1Promise = getContent().then(sha1hex); } - return content && data.maybeReturnContent - ? { content, pluginData, sha1 } - : { pluginData, sha1 }; + return contentPromise != null && data.maybeReturnContent + ? { + content: await contentPromise, + pluginData: await pluginDataPromise, + sha1: await sha1Promise, + } + : { + pluginData: await pluginDataPromise, + sha1: await sha1Promise, + }; } } @@ -74,7 +80,7 @@ export function setup(args: WorkerSetupArgs): void { /** * Called by jest-worker with each workload */ -export function processFile(data: WorkerMessage): WorkerMetadata { +export async function processFile(data: WorkerMessage): Promise { if (!singletonWorker) { throw new Error('metro-file-map: setup() must be called before processFile()'); } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c4ddc766b66e47..ceb23194e497a3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -2614,9 +2614,6 @@ importers: fb-watchman: specifier: ^2.0.2 version: 2.0.2 - graceful-fs: - specifier: ^4.2.4 - version: 4.2.11 invariant: specifier: ^2.2.4 version: 2.2.4 @@ -2639,9 +2636,6 @@ importers: '@types/fb-watchman': specifier: ^2.0.6 version: 2.0.6 - '@types/graceful-fs': - specifier: ^4.1.9 - version: 4.1.9 '@types/invariant': specifier: ^2.2.37 version: 2.2.37 From b7739a3500ccda81a68ca3577e28c2f39007081d Mon Sep 17 00:00:00 2001 From: Phil Pluckthun Date: Tue, 5 May 2026 17:17:39 +0100 Subject: [PATCH 03/26] feat(cli,metro-file-map): Implement on-demand filesystem access (#45391) # Why Stacked on #45378 This adds `enableFallback` and `scopeFallback` flags to `@expo/metro-file-map` that control the **on-demand filesystem**. These file map additions decouple the crawling from `watchFolders` and allow the file map to lazily access and crawl more files. When `scopeFallback` is enabled (the default) it's only allowed to access files in the server root, that aren't ignored (extensions / blocklist), to prevent existing projects from pulling in more files than before. This is disabled for our `expo/expo` out-of-tree debugging (CLI in monorepo run against projects outside of monorepo) This has been implmented to: - allow `watchFolders` to be scoped down to watch and crawl fewer files, allowing a trade-off of having fewer watchers (if files are known not to change frequently), for higher startup performance - allow crawling/access outside of the monorepo root (must be implemented separately), for example, to reliably support pnpm/bun global stores without attaching a file watcher to them - allow the crawler to skip sub-folders in the project (e.g. native folders) that may (in very rare cases) still contain JS project source files (must be implmented separately) # How - Implement fallback filesystem changes (Picked from #44567) - Add unit tests for on-demand access logic - Add `experiments.onDemandFilesystem` flag (`boolean | 'UNSTABLE_ALLOW_ALL'`; untyped until JSON schema update) passed onto `resolver.unstable_onDemandFilesystem` - Remove internal `watchFolders` modification meant for CI that adds additional `watchFolders` - **Note:** This means we can't disable the above flag in E2E CI, since E2E relies on the on-demand filesystem instead now - Empty out `watchFolders = [projectRoot]` if we're exporting and the flag is enabled - **Note:** Pre-crawling a full server root is generally never faster for an export. The cost of doing a few blocking IO calls during bundling is negligible compared to doing a full crawl ahead of bundling # Test Plan - Unit tests added for `@expo/metro-file-map` changes - E2E CI should pass unchanged # Checklist - [x] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --------- Co-authored-by: Hassan Khan --- packages/@expo/cli/CHANGELOG.md | 1 + .../start/server/metro/createFileMap-fork.ts | 26 +- .../start/server/metro/instantiateMetro.ts | 5 + .../server/metro/withMetroMultiPlatform.ts | 44 +- .../cli/src/utils/resolveWatchFolders.ts | 42 -- packages/@expo/metro-file-map/CHANGELOG.md | 1 + .../build/crawlers/node/fallback.d.ts | 20 + .../build/crawlers/node/fallback.js | 148 +++++ .../@expo/metro-file-map/build/index.d.ts | 3 + packages/@expo/metro-file-map/build/index.js | 33 +- .../build/lib/RootPathUtils.d.ts | 2 + .../metro-file-map/build/lib/RootPathUtils.js | 35 ++ .../metro-file-map/build/lib/TreeFS.d.ts | 10 +- .../@expo/metro-file-map/build/lib/TreeFS.js | 178 +++++- .../@expo/metro-file-map/build/types.d.ts | 6 + .../crawlers/node/__tests__/fallback.test.ts | 271 ++++++++ .../src/crawlers/node/fallback.ts | 174 ++++++ packages/@expo/metro-file-map/src/index.ts | 41 +- .../metro-file-map/src/lib/RootPathUtils.ts | 37 ++ .../@expo/metro-file-map/src/lib/TreeFS.ts | 221 ++++++- .../src/lib/__tests__/RootPathUtils.test.ts | 89 +++ .../src/lib/__tests__/TreeFS.test.ts | 580 ++++++++++++++++++ packages/@expo/metro-file-map/src/types.ts | 16 + .../types/expo-metro-augmentations.d.ts | 6 + 24 files changed, 1843 insertions(+), 146 deletions(-) delete mode 100644 packages/@expo/cli/src/utils/resolveWatchFolders.ts create mode 100644 packages/@expo/metro-file-map/build/crawlers/node/fallback.d.ts create mode 100644 packages/@expo/metro-file-map/build/crawlers/node/fallback.js create mode 100644 packages/@expo/metro-file-map/src/crawlers/node/__tests__/fallback.test.ts create mode 100644 packages/@expo/metro-file-map/src/crawlers/node/fallback.ts diff --git a/packages/@expo/cli/CHANGELOG.md b/packages/@expo/cli/CHANGELOG.md index f37e82202635c0..481a7b0780c791 100644 --- a/packages/@expo/cli/CHANGELOG.md +++ b/packages/@expo/cli/CHANGELOG.md @@ -18,6 +18,7 @@ - Use stream rendering in SSR ([#43963](https://github.com/expo/expo/pull/43963) by [@hassankhan](https://github.com/hassankhan)) - Add support for metadata in streaming SSR ([#44731](https://github.com/expo/expo/pull/44731) by [@hassankhan](https://github.com/hassankhan)) - Support streaming SSR in development ([#45362](https://github.com/expo/expo/pull/45362) by [@hassankhan](https://github.com/hassankhan)) +- Add `experiments.onDemandFilesystem` (enabled by default) to allow files in the server root outside of `watchFolders` to be accessed and crawled lazily ([#45391](https://github.com/expo/expo/pull/45391) by [@kitten](https://github.com/kitten)) ### 🐛 Bug fixes diff --git a/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts b/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts index ae8fc80854c573..aad6b183532095 100644 --- a/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts +++ b/packages/@expo/cli/src/start/server/metro/createFileMap-fork.ts @@ -8,6 +8,7 @@ import type MetroServer from '@expo/metro/metro/Server'; import type { ConfigT } from '@expo/metro/metro-config'; import FileMap, { DependencyPlugin, DiskCacheManager, HastePlugin } from '@expo/metro-file-map'; import ciInfo from 'ci-info'; +import path from 'node:path'; function getIgnorePattern(config: ConfigT): RegExp { const { blockList, blacklistRE } = config.resolver; @@ -77,6 +78,18 @@ export default function createFileMap(config: ConfigT, options?: CreateFileMapOp }); plugins.push(hasteMap); + const projectRoot = config.projectRoot; + const serverRoot = config.server.unstable_serverRoot; + const enableFallback = !!config.resolver.unstable_onDemandFilesystem; + + // NOTE(@kitten): We allow the on-demand filesystem to escape the server root and access any file, + // - if we're using the CLI from `expo/expo` on an external project (e.g. in CI( + // - if the user explicitly sets the experimental flag to 'UNSTABLE_ALLOW_ALL' + const scopeFallback = + enableFallback && + config.resolver.unstable_onDemandFilesystem !== 'UNSTABLE_ALLOW_ALL' && + isDirectoryIn(__dirname, serverRoot ?? projectRoot); + const fileMap = new FileMap({ // NOTE(@kitten): Dropped `config.unstable_fileMapCacheManagerFactory` cacheManagerFactory: (factoryParams: any) => { @@ -89,6 +102,9 @@ export default function createFileMap(config: ConfigT, options?: CreateFileMapOp perfLoggerFactory: config.unstable_perfLoggerFactory, computeSha1: !config.watcher.unstable_lazySha1, enableSymlinks: true, + // NOTE(@kitten): @expo/metro-file-map fork adds `enableFallback` and `scopeFallback` + enableFallback, + scopeFallback, extensions: Array.from( new Set([ ...config.resolver.sourceExts, @@ -96,19 +112,19 @@ export default function createFileMap(config: ConfigT, options?: CreateFileMapOp ...config.watcher.additionalExts, ]) ), - // NOTE(@kitten): Native find crawler support has been dropped - forceNodeFilesystemAPI: true, healthCheck: config.watcher.healthCheck, ignorePattern: getIgnorePattern(config), maxWorkers: config.maxWorkers, plugins, retainAllFiles: true, resetCache: config.resetCache, - rootDir: config.projectRoot, + rootDir: projectRoot, roots: config.watchFolders, useWatchman: config.resolver.useWatchman ?? false, watch, watchmanDeferStates: config.watcher.watchman.deferStates, + // NOTE: (@expo/metro-file-map fork) New option is required for `scopeFallback: true` checks + serverRoot, }); return { @@ -118,6 +134,10 @@ export default function createFileMap(config: ConfigT, options?: CreateFileMapOp }; } +function isDirectoryIn(targetPath: string, rootPath: string) { + return targetPath === rootPath || targetPath.startsWith(rootPath + path.sep); +} + function assertMetroFileMapPatched(metro: { getBundler(): any }): void { const depGraph = metro.getBundler().getBundler()?._depGraph; const fileMap = depGraph?._haste; diff --git a/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts b/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts index a56713c9f13ea3..041211d213b84e 100644 --- a/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts +++ b/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts @@ -193,6 +193,11 @@ export async function loadMetroConfigAsync( asWritable(config.transformer).extendsBabelConfigPath = config.transformer.enableBabelRCLookup !== false ? resolveBabelrcName(projectRoot) : undefined; + // On-Demand Filesystem is enabled by default + // TODO(@kitten): Add to config-types JSON schema + const onDemandFilesystem = (exp.experiments as any)?.onDemandFilesystem ?? true; + asWritable(config.resolver).unstable_onDemandFilesystem = onDemandFilesystem; + // NOTE(@kitten): `useWatchman` is currently enabled by default, but it also disables `forceNodeFilesystemAPI`. // If we instead set it to the special value `null`, it gets enables but also bypasses the "native find" codepath, // which is slower than just using the Node filesystem API diff --git a/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts b/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts index c2818b8e84ee62..d47ee9af4e4ac6 100644 --- a/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts +++ b/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts @@ -36,8 +36,6 @@ import { withMetroMutatedResolverContext, withMetroResolvers } from './withMetro import { withMetroSupervisingTransformWorker } from './withMetroSupervisingTransformWorker'; import { Log } from '../../../log'; import { env } from '../../../utils/env'; -import { CommandError } from '../../../utils/errors'; -import { resolveWatchFolders } from '../../../utils/resolveWatchFolders'; import { isServerEnvironment } from '../middleware/metroOptions'; import type { PlatformBundlers } from '../platformBundlers'; import { createTypescriptResolver } from './createTypescriptResolver'; @@ -869,7 +867,6 @@ export async function withMetroMultiPlatformAsync( config, exp, platformBundlers, - serverRoot, isTsconfigPathsEnabled, isAutolinkingResolverEnabled, @@ -895,46 +892,23 @@ export async function withMetroMultiPlatformAsync( const watchFolders = (config.watchFolders as string[]) || []; asWritable(config).watchFolders = watchFolders; + // NOTE(@kitten): If the on-demand filesystem is enabled, we can aggressively cut down the `watchFolders` + // to a minimum, since the files will be read lazily. This almost always speeds up exports + if (isExporting && !!config.resolver.unstable_onDemandFilesystem) { + watchFolders.length = 0; + watchFolders.push(projectRoot); + } + // Change the default metro-runtime to a custom one that supports bundle splitting. // NOTE(@kitten): This is now always active and EXPO_USE_METRO_REQUIRE / isNamedRequiresEnabled is disregarded const metroDefaults: typeof import('@expo/metro/metro-config/defaults/defaults') = require('@expo/metro/metro-config/defaults/defaults'); const metroRequirePolyfill = require.resolve('@expo/cli/build/metro-require/require'); - const metroOriginalModuleSystem = metroDefaults.moduleSystem; asWritable(metroDefaults).moduleSystem = metroRequirePolyfill; watchFolders.push(path.dirname(metroRequirePolyfill)); // Required for @expo/metro-runtime to format paths in the web LogBox. process.env.EXPO_PUBLIC_PROJECT_ROOT = process.env.EXPO_PUBLIC_PROJECT_ROOT ?? projectRoot; - // This is used for running Expo CLI in development against projects outside the monorepo. - // NOTE(@kitten): If `projectRoot` is used without `serverRoot` being available this can mistrigger for user monorepos! - if (!isDirectoryIn(__dirname, serverRoot ?? projectRoot)) { - let reactNativePolyfills: string[] = []; - - // Support web-only `expo start` - if (exp.platforms?.includes('ios') || exp.platforms?.includes('android')) { - try { - reactNativePolyfills = require('react-native/rn-get-polyfills')(); - watchFolders.push(...resolveWatchFolders('react-native', { deep: false })); - } catch (error) { - // If the project targets native platforms, react-native is required. - throw new CommandError( - 'REACT_NATIVE_NOT_FOUND', - 'Failed to resolve react-native. Make sure it is installed in the project dependencies. Remove native platforms from the Expo config if you do not intend to target native platforms.' - ); - } - } - - watchFolders.push( - ...resolveWatchFolders('expo', { deep: true }), - ...resolveWatchFolders('@expo/metro', { deep: true }), - ...resolveWatchFolders('@expo/metro-runtime', { deep: true }), - ...[config.resolver.emptyModulePath, metroOriginalModuleSystem, ...reactNativePolyfills] - .map((targetPath) => (fs.existsSync(targetPath) ? path.dirname(targetPath) : null)) - .filter((targetPath) => targetPath != null) - ); - } - let expoConfigPlatforms = Object.entries(platformBundlers) .filter( ([platform, bundler]) => bundler === 'metro' && exp.platforms?.includes(platform as Platform) @@ -966,10 +940,6 @@ export async function withMetroMultiPlatformAsync( }); } -function isDirectoryIn(targetPath: string, rootPath: string) { - return targetPath.startsWith(rootPath) && targetPath.length >= rootPath.length; -} - function hasExpoRouterModule( projectRoot: string, autolinkingModuleResolverInput: AutolinkingModuleResolverInput | undefined diff --git a/packages/@expo/cli/src/utils/resolveWatchFolders.ts b/packages/@expo/cli/src/utils/resolveWatchFolders.ts deleted file mode 100644 index fcc2149a8e2c79..00000000000000 --- a/packages/@expo/cli/src/utils/resolveWatchFolders.ts +++ /dev/null @@ -1,42 +0,0 @@ -import path from 'node:path'; - -// NOTE(@kitten): This is a heuristic and shouldn't trigger. However, if we erroneously start the watch folders -// traversal, we never want to create a situation where (for whatever reason) it gets stuck, -// or slows the startup down by an unreasonable amount -const MAX_DEPTH = 6; - -export function resolveWatchFolders(pkgName: string, { deep }: { deep: boolean }): string[] { - const seen = new Set(); - const folders = new Set(); - const recurse = (pkgName: string, fromPath: string | undefined = undefined, depth = 0) => { - if (seen.has(pkgName) || depth > MAX_DEPTH) { - return; - } else { - seen.add(pkgName); - } - let target: string; - try { - target = require.resolve(`${pkgName}/package.json`, { - paths: fromPath ? [fromPath] : undefined, - }); - } catch { - return; - } - let folder = path.dirname(path.dirname(target)); - if (pkgName[0] === '@') { - folder = path.dirname(folder); - } - folders.add(folder); - if (deep) { - const pkg = require(target); - if (pkg.dependencies != null && typeof pkg.dependencies === 'object') { - for (const pkgName in pkg.dependencies) recurse(pkgName, target, depth + 1); - } - if (pkg.peerDependencies != null && typeof pkg.peerDependencies === 'object') { - for (const pkgName in pkg.peerDependencies) recurse(pkgName, target, depth + 1); - } - } - }; - recurse(pkgName); - return [...folders]; -} diff --git a/packages/@expo/metro-file-map/CHANGELOG.md b/packages/@expo/metro-file-map/CHANGELOG.md index 932357fcee3406..1ae82013d25dfe 100644 --- a/packages/@expo/metro-file-map/CHANGELOG.md +++ b/packages/@expo/metro-file-map/CHANGELOG.md @@ -11,6 +11,7 @@ - Lazily stat files and populate symlinks for Node crawled file trees ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) - Pre-resolve symlink targets and store normal POSIX paths ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) +- Implement on-demand filesystem access controlled by `enableFallback` and `scopeFallback` ([#45391](https://github.com/expo/expo/pull/45391) by [@kitten](https://github.com/kitten)) ### 🐛 Bug fixes diff --git a/packages/@expo/metro-file-map/build/crawlers/node/fallback.d.ts b/packages/@expo/metro-file-map/build/crawlers/node/fallback.d.ts new file mode 100644 index 00000000000000..6763d31bd2f2b2 --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/node/fallback.d.ts @@ -0,0 +1,20 @@ +import { RootPathUtils } from '../../lib/RootPathUtils'; +import type { FallbackFilesystem, IgnoreMatcher } from '../../types'; +type FallbackFilesystemOptions = { + rootPathUtils: RootPathUtils; + extensions: readonly string[]; + ignore: IgnoreMatcher; + includeSymlinks: boolean; +}; +/** + * Create a FallbackFilesystem that synchronously queries the real filesystem. + * + * - `lookup` uses lstatSync to check a single path (for traversal). + * - `readdir` uses readdirSync to list directory contents (for enumeration). + * + * Both methods apply the same filtering as the node crawler: ignore patterns, + * extension filtering, and symlink inclusion. + */ +export default function createFallbackFilesystem(opts: FallbackFilesystemOptions): FallbackFilesystem; +export declare function shouldFallbackCrawlDir(canonicalPath: string): boolean; +export {}; diff --git a/packages/@expo/metro-file-map/build/crawlers/node/fallback.js b/packages/@expo/metro-file-map/build/crawlers/node/fallback.js new file mode 100644 index 00000000000000..6969e4aa9f3c4c --- /dev/null +++ b/packages/@expo/metro-file-map/build/crawlers/node/fallback.js @@ -0,0 +1,148 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = createFallbackFilesystem; +exports.shouldFallbackCrawlDir = shouldFallbackCrawlDir; +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const normalizePathSeparatorsToPosix_1 = __importDefault(require("../../lib/normalizePathSeparatorsToPosix")); +const readdirMarker = Symbol.for('fallbackDir'); +function markDir(dirNode) { + dirNode[readdirMarker] = true; +} +function isMarkedDir(dirNode) { + return !!dirNode[readdirMarker]; +} +function isDirectory(node) { + return node instanceof Map; +} +/** + * Create a FallbackFilesystem that synchronously queries the real filesystem. + * + * - `lookup` uses lstatSync to check a single path (for traversal). + * - `readdir` uses readdirSync to list directory contents (for enumeration). + * + * Both methods apply the same filtering as the node crawler: ignore patterns, + * extension filtering, and symlink inclusion. + */ +function createFallbackFilesystem(opts) { + const { rootPathUtils, extensions, ignore, includeSymlinks } = opts; + const exts = extensions.reduce((acc, ext) => { + acc[ext] = true; + return acc; + }, {}); + function readdir(_normalPath, absolutePath, dirNode) { + if (dirNode != null && isMarkedDir(dirNode)) { + return dirNode; + } + let dirEntries; + try { + dirEntries = fs_1.default.readdirSync(absolutePath, { withFileTypes: true }); + } + catch { + return null; + } + const result = dirNode ?? new Map(); + for (const entry of dirEntries) { + const name = entry.name.toString(); + const childAbsolutePath = absolutePath + path_1.default.sep + name; + if (ignore(childAbsolutePath)) { + continue; + } + if (entry.isDirectory()) { + if (!result.has(name)) { + result.set(name, new Map()); + } + } + else if (entry.isSymbolicLink()) { + // We can skip reading the symlink target here, since it'll be read lazily + if (includeSymlinks && !result.has(name)) { + result.set(name, [null, 0, 0, null, 1, null]); + } + } + else if (entry.isFile()) { + const ext = path_1.default.extname(name).slice(1); + if (exts[ext] && !result.has(name)) { + result.set(name, [null, 0, 0, null, 0, null]); + } + } + } + markDir(result); + return result; + } + return { + readdir, + lookup(normalPath, absolutePath, prevNode) { + if (ignore(absolutePath)) { + return null; + } + let stat; + try { + stat = fs_1.default.lstatSync(absolutePath); + } + catch { + return null; + } + if (stat.isDirectory()) { + const dirNode = isDirectory(prevNode) ? prevNode : null; + return shouldFallbackCrawlDir(absolutePath) + ? readdir(normalPath, absolutePath, dirNode) + : (dirNode ?? new Map()); + } + else if (stat.isSymbolicLink()) { + if (!includeSymlinks) { + return null; + } + try { + // We might as well read the symlink target here and assume it'll be used + const symlinkTarget = fs_1.default.readlinkSync(absolutePath); + // Cached value should be in posix format + const target = (0, normalizePathSeparatorsToPosix_1.default)(rootPathUtils.resolveSymlinkToNormal(normalPath, symlinkTarget)); + return [stat.mtime.getTime(), stat.size, 0, null, target, null]; + } + catch { + return null; + } + } + else if (stat.isFile()) { + // Check extension — symlinks bypass this check (same as node crawler) + const ext = path_1.default.extname(absolutePath).slice(1); + if (!exts[ext]) { + return null; + } + else { + return [stat.mtime.getTime(), stat.size, 0, null, 0, null]; + } + } + else { + return null; + } + }, + }; +} +// Whether a directory at the given canonical path should be eagerly +// populated via readdir. Returns false for directories that are typically +// too large (node_modules) or not useful (.git, .hg, etc.) to enumerate. +function shouldFallbackCrawlDir(canonicalPath) { + const lastSepIdx = canonicalPath.lastIndexOf(path_1.default.sep); + const baseStart = lastSepIdx + 1; + const baseLen = canonicalPath.length - baseStart; + if (baseLen === 2 && + canonicalPath.charCodeAt(baseStart) === 46 /*'.'*/ && + canonicalPath.charCodeAt(baseStart + 1) === 46 /*'.'*/) { + // '..' is the parent-of-rootDir indirection, not a hidden directory. + return true; + } + else if (canonicalPath.charCodeAt(baseStart) === 46 /*'.'*/) { + // starts with '.' + return false; + } + else if (baseLen === 12 && canonicalPath.startsWith('node_modules', baseStart)) { + return false; + } + else { + return true; + } +} diff --git a/packages/@expo/metro-file-map/build/index.d.ts b/packages/@expo/metro-file-map/build/index.d.ts index 1f8edd42dc6825..debfd71731019a 100644 --- a/packages/@expo/metro-file-map/build/index.d.ts +++ b/packages/@expo/metro-file-map/build/index.d.ts @@ -9,6 +9,7 @@ import type { BuildParameters, BuildResult, CacheData, CacheManagerFactory, Chan export type { BuildParameters, BuildResult, CacheData, ChangeEventMetadata, FileData, FileMap, FileSystem, HasteMapData, HasteMapItem, InputFileMapPlugin, }; export interface InputOptions { readonly computeSha1?: boolean | undefined | null; + readonly enableFallback?: boolean | undefined | null; readonly enableSymlinks?: boolean | undefined | null; readonly extensions: readonly string[]; readonly forceNodeFilesystemAPI?: boolean | undefined | null; @@ -17,6 +18,8 @@ export interface InputOptions { readonly retainAllFiles: boolean; readonly rootDir: string; readonly roots: readonly string[]; + readonly scopeFallback?: boolean | undefined | null; + readonly serverRoot?: string | undefined | null; readonly cacheManagerFactory?: CacheManagerFactory | undefined | null; readonly console?: Console; readonly healthCheck: HealthCheckOptions; diff --git a/packages/@expo/metro-file-map/build/index.js b/packages/@expo/metro-file-map/build/index.js index f4a7abb9cc08bb..912978d405f05b 100644 --- a/packages/@expo/metro-file-map/build/index.js +++ b/packages/@expo/metro-file-map/build/index.js @@ -51,6 +51,7 @@ const perf_hooks_1 = require("perf_hooks"); const Watcher_1 = require("./Watcher"); const DiskCacheManager_1 = require("./cache/DiskCacheManager"); const constants_1 = __importDefault(require("./constants")); +const fallback_1 = __importDefault(require("./crawlers/node/fallback")); const FileProcessor_1 = require("./lib/FileProcessor"); const FileSystemChangeAggregator_1 = require("./lib/FileSystemChangeAggregator"); const RootPathUtils_1 = require("./lib/RootPathUtils"); @@ -218,6 +219,8 @@ class FileMap extends events_1.default { } } this.#plugins = indexedPlugins; + const enableFallback = options.enableFallback ?? true; + const scopeFallback = options.scopeFallback ?? true; const buildParameters = { cacheBreaker: CACHE_BREAKER, computeSha1: options.computeSha1 || false, @@ -238,6 +241,9 @@ class FileMap extends events_1.default { useWatchman: options.useWatchman ?? false, watch: !!options.watch, watchmanDeferStates: options.watchmanDeferStates ?? [], + enableFallback, + scopeFallback: enableFallback && scopeFallback, + serverRoot: options.serverRoot, }; const cacheFactoryOptions = { buildParameters, @@ -284,6 +290,16 @@ class FileMap extends events_1.default { this.emit('metadata'); return result?.content; }; + const fallbackFilesystem = this.#options.enableFallback + ? (0, fallback_1.default)({ + rootPathUtils: this.#pathUtils, + extensions: this.#options.extensions, + ignore: (filePath) => this.#options.ignorePattern.test(filePath), + includeSymlinks: this.#options.enableSymlinks, + }) + : null; + const { roots } = this.#options; + const serverRoot = this.#options.scopeFallback ? this.#options.serverRoot : null; const fileSystem = initialData != null ? TreeFS_1.default.fromDeserializedSnapshot({ // Typed `mixed` because we've read this from an external @@ -292,8 +308,17 @@ class FileMap extends events_1.default { fileSystemData: initialData.fileSystemData, processFile, rootDir, + fallbackFilesystem, + roots, + serverRoot, }) - : new TreeFS_1.default({ processFile, rootDir }); + : new TreeFS_1.default({ + processFile, + rootDir, + fallbackFilesystem, + roots, + serverRoot, + }); this.#startupPerfLogger?.point('constructFileSystem_end'); const plugins = this.#plugins; // Initialize plugins from cached file system and plugin state while @@ -436,8 +461,10 @@ class FileMap extends events_1.default { filesToProcess.push([normalFilePath, fileData]); } else if (fileData[constants_1.default.MTIME] != null && fileData[constants_1.default.MTIME] !== 0) { - // The symlink will only be updated, if it's been accessed before - // If this is a newly crawled entry, it's skipped + // Symlink was previously resolved and its mtime changed — resolve + // eagerly to update the cached target. Symlinks with null mtime + // (cold start or never accessed) are deferred to lazy resolution + // in TreeFS.#resolveSymlinkTargetToNormalPath. const maybeReadLink = this.#maybeReadLink(normalFilePath, fileData); if (maybeReadLink) { readLinkPromises.push(maybeReadLink.catch((error) => { diff --git a/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts b/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts index bd6eb7cdca906f..22e8884de37698 100644 --- a/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts +++ b/packages/@expo/metro-file-map/build/lib/RootPathUtils.d.ts @@ -20,3 +20,5 @@ export declare class RootPathUtils { }; relative(from: string, to: string): string; } +export declare function getAncestorOfRootIdx(normalPath: string): number; +export declare function pathsToPattern(paths: readonly string[], pathUtils: RootPathUtils): RegExp | null; diff --git a/packages/@expo/metro-file-map/build/lib/RootPathUtils.js b/packages/@expo/metro-file-map/build/lib/RootPathUtils.js index fee7dfdcf8a04c..ad76b0330b39bf 100644 --- a/packages/@expo/metro-file-map/build/lib/RootPathUtils.js +++ b/packages/@expo/metro-file-map/build/lib/RootPathUtils.js @@ -10,6 +10,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); exports.RootPathUtils = void 0; +exports.getAncestorOfRootIdx = getAncestorOfRootIdx; +exports.pathsToPattern = pathsToPattern; const invariant_1 = __importDefault(require("invariant")); const path_1 = __importDefault(require("path")); const normalizePathSeparatorsToSystem_1 = __importDefault(require("./normalizePathSeparatorsToSystem")); @@ -274,3 +276,36 @@ class RootPathUtils { } } exports.RootPathUtils = RootPathUtils; +function getAncestorOfRootIdx(normalPath) { + let pos = 0; + while (normalPath.startsWith(UP_FRAGMENT_SEP, pos)) { + pos += UP_FRAGMENT_SEP_LENGTH; + } + if (normalPath.length === pos + 2 && + normalPath.charCodeAt(pos) === 46 && + normalPath.charCodeAt(pos + 1) === 46) { + return pos / UP_FRAGMENT_SEP_LENGTH + 1; + } + return pos / UP_FRAGMENT_SEP_LENGTH; +} +function pathsToPattern(paths, pathUtils) { + if (paths.length === 0) { + return null; + } + const pathsPatterns = paths.map((input) => { + let pattern = pathUtils.absoluteToNormal(input); + // When pattern is '' (root === rootDir), match any normal path that + // doesn't escape the root via '..' indirections. + if (pattern === '') { + return `(?!\\.\\.(?:\\${path_1.default.sep}|$))`; + } + // Append separator so that 'src' matches 'src/foo' but not 'src2'. + if (!pattern.endsWith(path_1.default.sep)) { + pattern += path_1.default.sep; + } + // Escape all regex-special characters. + // eslint-disable-next-line no-useless-escape + return pattern.replace(/[\-\[\]\{\}\(\)\*\+\?\.\\\^\$\|\/]/g, '\\$&'); + }); + return new RegExp(`^(?:${pathsPatterns.join('|')})`); +} diff --git a/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts b/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts index a42b73e39ba5a3..b8839a51ba2157 100644 --- a/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts +++ b/packages/@expo/metro-file-map/build/lib/TreeFS.d.ts @@ -4,19 +4,25 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ -import type { CacheData, FileData, FileMetadata, FileStats, FileSystemListener, LookupResult, MutableFileSystem, Path, ProcessFileFunction } from '../types'; -type DirectoryNode = Map; +import type { CacheData, FallbackFilesystem, FileData, FileMetadata, FileStats, FileSystemListener, LookupResult, MutableFileSystem, Path, ProcessFileFunction } from '../types'; +type DirectoryNode = Map; type FileNode = FileMetadata; type MixedNode = FileNode | DirectoryNode; interface DeserializedSnapshotInput { rootDir: string; fileSystemData: DirectoryNode; processFile: ProcessFileFunction; + fallbackFilesystem?: FallbackFilesystem | null | undefined; + roots?: readonly string[]; + serverRoot?: string | null | undefined; } interface TreeFSOptions { rootDir: Path; files?: FileData; processFile: ProcessFileFunction; + fallbackFilesystem?: FallbackFilesystem | null | undefined; + roots?: readonly string[]; + serverRoot?: string | null | undefined; } interface MatchFilesOptions { readonly filter?: RegExp | null; diff --git a/packages/@expo/metro-file-map/build/lib/TreeFS.js b/packages/@expo/metro-file-map/build/lib/TreeFS.js index 7fa238df8b2e52..703ef4681af243 100644 --- a/packages/@expo/metro-file-map/build/lib/TreeFS.js +++ b/packages/@expo/metro-file-map/build/lib/TreeFS.js @@ -15,12 +15,13 @@ const path_1 = __importDefault(require("path")); const constants_1 = __importDefault(require("../constants")); const normalizePathSeparatorsToPosix_1 = __importDefault(require("./normalizePathSeparatorsToPosix")); const normalizePathSeparatorsToSystem_1 = __importDefault(require("./normalizePathSeparatorsToSystem")); +const fallback_1 = require("../crawlers/node/fallback"); const RootPathUtils_1 = require("./RootPathUtils"); function isDirectory(node) { return node instanceof Map; } function isRegularFile(node) { - return node[constants_1.default.SYMLINK] === 0; + return node != null && node[constants_1.default.SYMLINK] === 0; } /** * OVERVIEW: @@ -72,25 +73,42 @@ function isRegularFile(node) { * a trailing slash */ class TreeFS { + #fallbackBoundaryDepth; + #fallbackFilesystem; #pathUtils; #processFile; #rootDir; + #rootPattern; #rootNode = new Map(); constructor(opts) { - const { rootDir, files, processFile } = opts; + const { rootDir, files, processFile, fallbackFilesystem, roots, serverRoot } = opts; this.#rootDir = rootDir; this.#pathUtils = new RootPathUtils_1.RootPathUtils(rootDir); this.#processFile = processFile; + this.#fallbackFilesystem = fallbackFilesystem ?? null; + if (serverRoot != null) { + this.#fallbackBoundaryDepth = (0, RootPathUtils_1.getAncestorOfRootIdx)(this.#pathUtils.absoluteToNormal(serverRoot)); + } + else { + this.#fallbackBoundaryDepth = null; + } + this.#rootPattern = (0, RootPathUtils_1.pathsToPattern)(roots ?? [], this.#pathUtils); if (files != null) { this.bulkAddOrModify(files); } } getSerializableSnapshot() { - return this.#cloneTree(this.#rootNode); + return this.#cloneTree(this.#rootNode, ''); } static fromDeserializedSnapshot(args) { - const { rootDir, fileSystemData, processFile } = args; - const tfs = new TreeFS({ processFile, rootDir }); + const { rootDir, fileSystemData, processFile, fallbackFilesystem, roots, serverRoot } = args; + const tfs = new TreeFS({ + processFile, + rootDir, + fallbackFilesystem, + roots, + serverRoot, + }); tfs.#rootNode = fileSystemData; return tfs; } @@ -158,6 +176,7 @@ class TreeFS { getMtimeByNormalPath(normalPath) { const result = this.#lookupByNormalPath(normalPath, { followLeaf: false, + skipFallback: true, }); return result.exists && !isDirectory(result.node) ? result.node[constants_1.default.MTIME] : null; } @@ -446,23 +465,41 @@ class TreeFS { } if (segmentNode == null) { if (opts.makeDirectories !== true && segmentName !== '..') { - return { - canonicalMissingPath: isLastSegment - ? targetNormalPath - : targetNormalPath.slice(0, fromIdx - 1), - exists: false, - missingSegmentName: segmentName, - }; + if (!opts.skipFallback && this.#fallbackFilesystem != null) { + const parentEnd = isLastSegment + ? fromIdx - segmentName.length - 1 + : fromIdx - segmentName.length - 2; + const parentCanonicalPath = parentEnd > 0 ? targetNormalPath.slice(0, parentEnd) : ''; + segmentNode = this.#populateFromFilesystem(parentNode, segmentName, parentCanonicalPath); + if (segmentNode != null) { + ancestorOfRootIdx = null; + } + } + if (segmentNode == null) { + return { + canonicalMissingPath: isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1), + exists: false, + missingSegmentName: segmentName, + }; + } } - segmentNode = new Map(); - if (opts.makeDirectories === true) { - if (changeListener != null) { - const canonicalPath = isLastSegment - ? targetNormalPath - : targetNormalPath.slice(0, fromIdx - 1); - changeListener.directoryAdded(canonicalPath); + if (segmentNode == null) { + segmentNode = new Map(); + if (opts.makeDirectories === true) { + if (changeListener != null) { + const canonicalPath = isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1); + changeListener.directoryAdded(canonicalPath); + } + parentNode.set(segmentName, segmentNode); + } + else if (!opts.skipFallback && + this.#fallbackFilesystem != null) { + parentNode.set(segmentName, segmentNode); } - parentNode.set(segmentName, segmentNode); } } // We are done if... @@ -749,7 +786,10 @@ class TreeFS { } *#metadataIterator(rootNode, opts, prefix = '') { for (const [name, node] of rootNode) { - if (!opts.includeNodeModules && isDirectory(node) && name === 'node_modules') { + if (node == null) { + continue; + } + else if (!opts.includeNodeModules && isDirectory(node) && name === 'node_modules') { continue; } const prefixedName = prefix === '' ? name : prefix + path_1.default.sep + name; @@ -779,8 +819,21 @@ class TreeFS { *#pathIterator(iterationRootNode, iterationRootParentNode, ancestorOfRootIdx, opts, pathPrefix = '', followedLinks = new Set()) { const pathSep = opts.alwaysYieldPosix ? '/' : path_1.default.sep; const prefixWithSep = pathPrefix === '' ? pathPrefix : pathPrefix + pathSep; + // Optimization: We can attempt to eagerly populate directories we're visiting + // if they're missing and not accessing a parent ('..') + if (this.#fallbackFilesystem != null && + iterationRootNode.size === 0 && + pathPrefix !== '..' && + !pathPrefix.endsWith(pathSep + '..')) { + const canonicalRoot = opts.canonicalPathOfRoot; + const rootCanonical = pathPrefix === '' ? canonicalRoot : canonicalRoot + path_1.default.sep + pathPrefix; + this.#populateDirFromFilesystem(iterationRootNode, rootCanonical, false); + } for (const [name, node] of this.#directoryNodeIterator(iterationRootNode, iterationRootParentNode, ancestorOfRootIdx)) { - if (opts.subtreeOnly && name === '..') { + if (node == null) { + continue; + } + else if (opts.subtreeOnly && name === '..') { continue; } const nodePath = prefixWithSep + name; @@ -821,6 +874,15 @@ class TreeFS { } } else if (opts.recursive) { + // Optimization: We can attempt to eagerly popuplate directories we're visiting + // if they're missing and not accessing a parent ('..') + if (this.#fallbackFilesystem != null && node.size === 0 && name !== '..') { + const nodePathWithSystemSeparators = pathSep === path_1.default.sep ? nodePath : nodePath.replaceAll(pathSep, path_1.default.sep); + const canonicalPath = opts.canonicalPathOfRoot === '' + ? nodePathWithSystemSeparators + : opts.canonicalPathOfRoot + path_1.default.sep + nodePathWithSystemSeparators; + this.#populateDirFromFilesystem(node, canonicalPath, false); + } yield* this.#pathIterator(node, iterationRootParentNode, ancestorOfRootIdx != null && ancestorOfRootIdx > 0 ? ancestorOfRootIdx - 1 : null, opts, nodePath, followedLinks); } } @@ -862,11 +924,17 @@ class TreeFS { } return result.node; } - #cloneTree(root) { + #cloneTree(root, prefix) { const clone = new Map(); for (const [name, node] of root) { - if (isDirectory(node)) { - clone.set(name, this.#cloneTree(node)); + if (node == null) { + continue; + } + else if (isDirectory(node)) { + const childPath = prefix === '' ? name : prefix + path_1.default.sep + name; + if (this.#rootPattern == null || this.#rootPattern.test(childPath + path_1.default.sep)) { + clone.set(name, this.#cloneTree(node, childPath)); + } } else { clone.set(name, [...node]); @@ -874,5 +942,65 @@ class TreeFS { } return clone; } + #isOutsideFallbackBoundary(canonicalPath) { + const maxDepth = this.#fallbackBoundaryDepth; + return maxDepth != null && (0, RootPathUtils_1.getAncestorOfRootIdx)(canonicalPath) > maxDepth; + } + /** + * Synchronously populate a missing tree node by querying the injected + * fallback filesystem. The fallback returns tree-compatible nodes + * (FileMetadata tuples or directory Maps) that are inserted directly. + * + * Returns the newly created node, or null if the path doesn't exist on disk. + */ + #populateFromFilesystem(parentNode, segmentName, parentCanonicalPath) { + const fallback = this.#fallbackFilesystem; + if (fallback == null) { + return null; + } + const childCanonicalPath = parentCanonicalPath === '' ? segmentName : parentCanonicalPath + path_1.default.sep + segmentName; + if (this.#rootPattern?.test(childCanonicalPath + path_1.default.sep) || + this.#isOutsideFallbackBoundary(childCanonicalPath)) { + return null; + } + else if (parentCanonicalPath !== '' && (0, fallback_1.shouldFallbackCrawlDir)(parentCanonicalPath)) { + this.#populateDirFromFilesystem(parentNode, parentCanonicalPath, true); + return parentNode.get(segmentName) ?? null; + } + else if (parentNode.has(segmentName)) { + return parentNode.get(segmentName) ?? null; + } + else { + const parentAbsolute = this.#pathUtils.normalToAbsolute(parentCanonicalPath); + const absolutePath = parentAbsolute + path_1.default.sep + segmentName; + const node = fallback.lookup(childCanonicalPath, absolutePath, parentNode.get(segmentName)); + parentNode.set(segmentName, node); + return node; + } + } + /** + * Populate an existing (potentially empty sentinel) directory node from + * the filesystem. Used by #pathIterator to fill lazy directories before + * iteration, and by #populateFromFilesystem for optimistic parent + * population. + */ + #populateDirFromFilesystem(dirNode, canonicalPath, skipCheck) { + const fallback = this.#fallbackFilesystem; + if (fallback == null || + (!skipCheck && + (this.#rootPattern?.test(canonicalPath + path_1.default.sep) || + this.#isOutsideFallbackBoundary(canonicalPath)))) { + return; + } + const absolutePath = this.#pathUtils.normalToAbsolute(canonicalPath); + const entries = fallback.readdir(canonicalPath, absolutePath, dirNode); + if (entries != null && entries !== dirNode) { + for (const [name, entry] of entries) { + if (!dirNode.has(name)) { + dirNode.set(name, entry); + } + } + } + } } exports.default = TreeFS; diff --git a/packages/@expo/metro-file-map/build/types.d.ts b/packages/@expo/metro-file-map/build/types.d.ts index ff61f2372bb77b..f2ac3a051fd489 100644 --- a/packages/@expo/metro-file-map/build/types.d.ts +++ b/packages/@expo/metro-file-map/build/types.d.ts @@ -330,6 +330,12 @@ export interface MutableFileSystem extends FileSystem { bulkAddOrModify(addedOrModifiedFiles: FileData, listener?: FileSystemListener | undefined): void; } export type Path = string; +type DirectoryNode = Map; +type MixedNode = FileMetadata | DirectoryNode; +export interface FallbackFilesystem { + lookup(normalPath: Path, absolutePath: string, prevNode: MixedNode | null | undefined): MixedNode | null; + readdir(normalPath: Path, absolutePath: string, dirNode: DirectoryNode | null | undefined): DirectoryNode | null; +} export type ProcessFileFunction = (normalPath: string, metadata: FileMetadata, request: Readonly<{ computeSha1: boolean; }>) => Promise; diff --git a/packages/@expo/metro-file-map/src/crawlers/node/__tests__/fallback.test.ts b/packages/@expo/metro-file-map/src/crawlers/node/__tests__/fallback.test.ts new file mode 100644 index 00000000000000..2c9b641894c816 --- /dev/null +++ b/packages/@expo/metro-file-map/src/crawlers/node/__tests__/fallback.test.ts @@ -0,0 +1,271 @@ +import { vol } from 'memfs'; + +import { RootPathUtils } from '../../../lib/RootPathUtils'; +import type { FallbackFilesystem } from '../../../types'; +import createFallbackFilesystem from '../fallback'; + +const rootDir = '/project'; + +function createFallback( + overrides: { + extensions?: string[]; + ignore?: (path: string) => boolean; + includeSymlinks?: boolean; + } = {} +): FallbackFilesystem { + return createFallbackFilesystem({ + rootPathUtils: new RootPathUtils(rootDir), + extensions: overrides.extensions ?? ['js', 'ts'], + ignore: overrides.ignore ?? (() => false), + includeSymlinks: overrides.includeSymlinks ?? true, + }); +} + +describe('createFallbackFilesystem', () => { + beforeEach(() => { + vol.reset(); + }); + + describe('lookup', () => { + test('returns file metadata for a regular file with matching extension', () => { + vol.fromJSON({ '/project/foo.js': 'content' }); + const fallback = createFallback(); + const node = fallback.lookup('foo.js', '/project/foo.js', undefined); + + expect(node).not.toBeNull(); + expect(Array.isArray(node)).toBe(true); + const meta = node as any[]; + expect(meta[0]).toBeGreaterThan(0); // mtime + expect(meta[4]).toBe(0); // SYMLINK = 0 for regular files + }); + + test('returns null for a file with non-matching extension', () => { + vol.fromJSON({ '/project/foo.txt': 'content' }); + const fallback = createFallback({ extensions: ['js'] }); + const node = fallback.lookup('foo.txt', '/project/foo.txt', undefined); + + expect(node).toBeNull(); + }); + + test('returns null for ignored paths', () => { + vol.fromJSON({ '/project/foo.js': 'content' }); + const fallback = createFallback({ + ignore: (p) => p.includes('foo'), + }); + const node = fallback.lookup('foo.js', '/project/foo.js', undefined); + + expect(node).toBeNull(); + }); + + test('returns null for nonexistent paths', () => { + const fallback = createFallback(); + const node = fallback.lookup('missing.js', '/project/missing.js', undefined); + + expect(node).toBeNull(); + }); + + test('returns a directory Map for a directory', () => { + vol.fromJSON({ '/project/dir/file.js': 'content' }); + const fallback = createFallback(); + const node = fallback.lookup('dir', '/project/dir', undefined); + + expect(node).toBeInstanceOf(Map); + // Should be populated (shouldFallbackCrawlDir returns true for 'dir') + expect((node as Map).has('file.js')).toBe(true); + }); + + test('returns empty Map for directories that should not be crawled', () => { + vol.fromJSON({ '/project/node_modules/pkg/index.js': 'content' }); + const fallback = createFallback(); + const node = fallback.lookup('node_modules', '/project/node_modules', undefined); + + expect(node).toBeInstanceOf(Map); + expect((node as Map).size).toBe(0); + }); + + test('returns symlink metadata with resolved target', () => { + vol.fromJSON({ '/project/target.js': 'content' }); + vol.symlinkSync('/project/target.js', '/project/link.js'); + const fallback = createFallback(); + const node = fallback.lookup('link.js', '/project/link.js', undefined); + + expect(node).not.toBeNull(); + const meta = node as any[]; + expect(typeof meta[4]).toBe('string'); // SYMLINK = target path + expect(meta[4]).toBe('target.js'); + }); + + test('returns null for symlinks when includeSymlinks is false', () => { + vol.fromJSON({ '/project/target.js': 'content' }); + vol.symlinkSync('/project/target.js', '/project/link.js'); + const fallback = createFallback({ includeSymlinks: false }); + const node = fallback.lookup('link.js', '/project/link.js', undefined); + + expect(node).toBeNull(); + }); + + test('preserves existing directory Map entries', () => { + vol.fromJSON({ + '/project/dir/new.js': 'new', + '/project/dir/existing.js': 'existing', + }); + const fallback = createFallback(); + const existing = new Map([['existing.js', [999, 5, 0, null, 0, null] as any]]); + const node = fallback.lookup('dir', '/project/dir', existing); + + expect(node).toBeInstanceOf(Map); + const dir = node as Map; + // Existing entry preserved (not overwritten) + expect(dir.get('existing.js')?.[0]).toBe(999); + // New entry added + expect(dir.has('new.js')).toBe(true); + }); + }); + + describe('readdir', () => { + test('returns directory entries filtered by extension', () => { + vol.fromJSON({ + '/project/src/a.js': 'a', + '/project/src/b.ts': 'b', + '/project/src/c.txt': 'c', + }); + const fallback = createFallback({ extensions: ['js', 'ts'] }); + const result = fallback.readdir('src', '/project/src', undefined); + + expect(result).toBeInstanceOf(Map); + expect(result!.has('a.js')).toBe(true); + expect(result!.has('b.ts')).toBe(true); + expect(result!.has('c.txt')).toBe(false); + }); + + test('includes subdirectories as empty Maps', () => { + vol.fromJSON({ + '/project/src/sub/file.js': 'content', + }); + const fallback = createFallback(); + const result = fallback.readdir('src', '/project/src', undefined); + + expect(result).toBeInstanceOf(Map); + expect(result!.get('sub')).toBeInstanceOf(Map); + }); + + test('includes symlinks with unresolved marker when includeSymlinks is true', () => { + vol.fromJSON({ '/project/src/target.js': 'target' }); + vol.symlinkSync('/project/src/target.js', '/project/src/link.js'); + const fallback = createFallback({ includeSymlinks: true }); + const result = fallback.readdir('src', '/project/src', undefined); + + expect(result).toBeInstanceOf(Map); + const linkMeta = result!.get('link.js') as any[]; + expect(linkMeta[4]).toBe(1); // unresolved symlink marker + }); + + test('skips symlinks when includeSymlinks is false', () => { + vol.fromJSON({ '/project/src/target.js': 'target' }); + vol.symlinkSync('/project/src/target.js', '/project/src/link.js'); + const fallback = createFallback({ includeSymlinks: false }); + const result = fallback.readdir('src', '/project/src', undefined); + + expect(result).toBeInstanceOf(Map); + expect(result!.has('link.js')).toBe(false); + }); + + test('applies ignore filter to children', () => { + vol.fromJSON({ + '/project/src/keep.js': 'keep', + '/project/src/skip.js': 'skip', + }); + const fallback = createFallback({ + ignore: (p) => p.includes('skip'), + }); + const result = fallback.readdir('src', '/project/src', undefined); + + expect(result!.has('keep.js')).toBe(true); + expect(result!.has('skip.js')).toBe(false); + }); + + test('returns null for nonexistent directory', () => { + const fallback = createFallback(); + const result = fallback.readdir('missing', '/project/missing', undefined); + + expect(result).toBeNull(); + }); + + test('does not re-read an already marked directory', () => { + vol.fromJSON({ '/project/src/a.js': 'a' }); + const fallback = createFallback(); + + // First call populates and marks + const first = fallback.readdir('src', '/project/src', undefined); + expect(first).toBeInstanceOf(Map); + + // Mutate the vol — but the marked dir should be returned as-is + vol.fromJSON({ '/project/src/a.js': 'a', '/project/src/b.js': 'b' }); + const second = fallback.readdir('src', '/project/src', first); + expect(second).toBe(first); // same reference + expect(second!.has('b.js')).toBe(false); // not re-read + }); + + test('does not overwrite existing entries in provided dirNode', () => { + vol.fromJSON({ + '/project/src/file.js': 'new', + }); + const fallback = createFallback(); + const existing = new Map([['file.js', [999, 5, 0, null, 0, null] as any]]); + const result = fallback.readdir('src', '/project/src', existing); + + expect(result!.get('file.js')?.[0]).toBe(999); // preserved + }); + }); +}); + +// shouldFallbackCrawlDir is the only platform-sensitive function (uses path.sep). +// Test it with both win32 and posix separators. +describe.each([['win32'], ['posix']] as const)('shouldFallbackCrawlDir on %s', (platform) => { + let mockPathModule: typeof import('path'); + let shouldFallbackCrawlDir: typeof import('../fallback').shouldFallbackCrawlDir; + + const p = (filePath: string): string => + platform === 'win32' ? filePath.replace(/\//g, '\\').replace(/^\\/, 'C:\\') : filePath; + + beforeAll(() => { + mockPathModule = jest.requireActual('path')[platform]; + }); + + beforeEach(() => { + jest.resetModules(); + jest.doMock('path', () => mockPathModule); + shouldFallbackCrawlDir = require('../fallback').shouldFallbackCrawlDir; + }); + + afterEach(() => { + jest.dontMock('path'); + }); + + test('returns true for regular directories', () => { + expect(shouldFallbackCrawlDir(p('/project/src'))).toBe(true); + expect(shouldFallbackCrawlDir(p('/project/lib'))).toBe(true); + }); + + test('returns true for ".." (parent indirection)', () => { + expect(shouldFallbackCrawlDir(p('/project/..'))).toBe(true); + expect(shouldFallbackCrawlDir('..')).toBe(true); + }); + + test('returns false for dot-prefixed directories', () => { + expect(shouldFallbackCrawlDir(p('/project/.git'))).toBe(false); + expect(shouldFallbackCrawlDir(p('/project/.hg'))).toBe(false); + expect(shouldFallbackCrawlDir(p('/project/.cache'))).toBe(false); + expect(shouldFallbackCrawlDir('.hidden')).toBe(false); + }); + + test('returns false for node_modules', () => { + expect(shouldFallbackCrawlDir(p('/project/node_modules'))).toBe(false); + expect(shouldFallbackCrawlDir('node_modules')).toBe(false); + }); + + test('returns true for directories containing "node_modules" as a substring', () => { + expect(shouldFallbackCrawlDir(p('/project/not_node_modules'))).toBe(true); + expect(shouldFallbackCrawlDir(p('/project/node_modules_extra'))).toBe(true); + }); +}); diff --git a/packages/@expo/metro-file-map/src/crawlers/node/fallback.ts b/packages/@expo/metro-file-map/src/crawlers/node/fallback.ts new file mode 100644 index 00000000000000..198cea043b26a0 --- /dev/null +++ b/packages/@expo/metro-file-map/src/crawlers/node/fallback.ts @@ -0,0 +1,174 @@ +import fs from 'fs'; +import path from 'path'; + +import { RootPathUtils } from '../../lib/RootPathUtils'; +import normalizePathSeparatorsToPosix from '../../lib/normalizePathSeparatorsToPosix'; +import type { FallbackFilesystem, FileMetadata, IgnoreMatcher } from '../../types'; + +type DirectoryNode = Map; +type FileNode = FileMetadata; +type MixedNode = FileNode | DirectoryNode; + +type FallbackFilesystemOptions = { + rootPathUtils: RootPathUtils; + extensions: readonly string[]; + ignore: IgnoreMatcher; + includeSymlinks: boolean; +}; + +const readdirMarker = Symbol.for('fallbackDir'); + +function markDir(dirNode: any) { + dirNode[readdirMarker] = true; +} + +function isMarkedDir(dirNode: any) { + return !!dirNode[readdirMarker]; +} + +function isDirectory(node: MixedNode | null | undefined): node is DirectoryNode { + return node instanceof Map; +} + +/** + * Create a FallbackFilesystem that synchronously queries the real filesystem. + * + * - `lookup` uses lstatSync to check a single path (for traversal). + * - `readdir` uses readdirSync to list directory contents (for enumeration). + * + * Both methods apply the same filtering as the node crawler: ignore patterns, + * extension filtering, and symlink inclusion. + */ +export default function createFallbackFilesystem( + opts: FallbackFilesystemOptions +): FallbackFilesystem { + const { rootPathUtils, extensions, ignore, includeSymlinks } = opts; + + const exts = extensions.reduce( + (acc, ext) => { + acc[ext] = true; + return acc; + }, + {} as Record + ); + + function readdir( + _normalPath: string, + absolutePath: string, + dirNode: DirectoryNode | null | undefined + ): DirectoryNode | null { + if (dirNode != null && isMarkedDir(dirNode)) { + return dirNode; + } + let dirEntries; + try { + dirEntries = fs.readdirSync(absolutePath, { withFileTypes: true }); + } catch { + return null; + } + const result = dirNode ?? new Map(); + for (const entry of dirEntries) { + const name = entry.name.toString(); + const childAbsolutePath = absolutePath + path.sep + name; + + if (ignore(childAbsolutePath)) { + continue; + } + + if (entry.isDirectory()) { + if (!result.has(name)) { + result.set(name, new Map()); + } + } else if (entry.isSymbolicLink()) { + // We can skip reading the symlink target here, since it'll be read lazily + if (includeSymlinks && !result.has(name)) { + result.set(name, [null, 0, 0, null, 1, null]); + } + } else if (entry.isFile()) { + const ext = path.extname(name).slice(1); + if (exts[ext] && !result.has(name)) { + result.set(name, [null, 0, 0, null, 0, null]); + } + } + } + markDir(result); + return result; + } + + return { + readdir, + + lookup( + normalPath: string, + absolutePath: string, + prevNode: MixedNode | null | undefined + ): MixedNode | null { + if (ignore(absolutePath)) { + return null; + } + + let stat; + try { + stat = fs.lstatSync(absolutePath); + } catch { + return null; + } + + if (stat.isDirectory()) { + const dirNode = isDirectory(prevNode) ? prevNode : null; + return shouldFallbackCrawlDir(absolutePath) + ? readdir(normalPath, absolutePath, dirNode) + : (dirNode ?? new Map()); + } else if (stat.isSymbolicLink()) { + if (!includeSymlinks) { + return null; + } + try { + // We might as well read the symlink target here and assume it'll be used + const symlinkTarget = fs.readlinkSync(absolutePath); + // Cached value should be in posix format + const target = normalizePathSeparatorsToPosix( + rootPathUtils.resolveSymlinkToNormal(normalPath, symlinkTarget) + ); + return [stat.mtime.getTime(), stat.size, 0, null, target, null]; + } catch { + return null; + } + } else if (stat.isFile()) { + // Check extension — symlinks bypass this check (same as node crawler) + const ext = path.extname(absolutePath).slice(1); + if (!exts[ext]) { + return null; + } else { + return [stat.mtime.getTime(), stat.size, 0, null, 0, null]; + } + } else { + return null; + } + }, + }; +} + +// Whether a directory at the given canonical path should be eagerly +// populated via readdir. Returns false for directories that are typically +// too large (node_modules) or not useful (.git, .hg, etc.) to enumerate. +export function shouldFallbackCrawlDir(canonicalPath: string): boolean { + const lastSepIdx = canonicalPath.lastIndexOf(path.sep); + const baseStart = lastSepIdx + 1; + const baseLen = canonicalPath.length - baseStart; + if ( + baseLen === 2 && + canonicalPath.charCodeAt(baseStart) === 46 /*'.'*/ && + canonicalPath.charCodeAt(baseStart + 1) === 46 /*'.'*/ + ) { + // '..' is the parent-of-rootDir indirection, not a hidden directory. + return true; + } else if (canonicalPath.charCodeAt(baseStart) === 46 /*'.'*/) { + // starts with '.' + return false; + } else if (baseLen === 12 && canonicalPath.startsWith('node_modules', baseStart)) { + return false; + } else { + return true; + } +} diff --git a/packages/@expo/metro-file-map/src/index.ts b/packages/@expo/metro-file-map/src/index.ts index 45699a2a56bd03..0674742627e739 100644 --- a/packages/@expo/metro-file-map/src/index.ts +++ b/packages/@expo/metro-file-map/src/index.ts @@ -14,6 +14,7 @@ import { performance } from 'perf_hooks'; import { Watcher } from './Watcher'; import { DiskCacheManager } from './cache/DiskCacheManager'; import H from './constants'; +import createFallbackFilesystem from './crawlers/node/fallback'; import { FileProcessor } from './lib/FileProcessor'; import { FileSystemChangeAggregator } from './lib/FileSystemChangeAggregator'; import { RootPathUtils } from './lib/RootPathUtils'; @@ -72,6 +73,7 @@ export type { export interface InputOptions { readonly computeSha1?: boolean | undefined | null; + readonly enableFallback?: boolean | undefined | null; readonly enableSymlinks?: boolean | undefined | null; readonly extensions: readonly string[]; readonly forceNodeFilesystemAPI?: boolean | undefined | null; @@ -80,7 +82,8 @@ export interface InputOptions { readonly retainAllFiles: boolean; readonly rootDir: string; readonly roots: readonly string[]; - + readonly scopeFallback?: boolean | undefined | null; + readonly serverRoot?: string | undefined | null; readonly cacheManagerFactory?: CacheManagerFactory | undefined | null; readonly console?: Console; readonly healthCheck: HealthCheckOptions; @@ -101,6 +104,9 @@ interface HealthCheckOptions { } interface InternalOptions extends BuildParameters { + readonly enableFallback: boolean; + readonly scopeFallback: boolean; + readonly serverRoot: string | undefined | null; readonly healthCheck: HealthCheckOptions; readonly perfLoggerFactory: PerfLoggerFactory | undefined | null; readonly resetCache: boolean | undefined | null; @@ -307,6 +313,9 @@ export default class FileMap extends EventEmitter { } this.#plugins = indexedPlugins; + const enableFallback = options.enableFallback ?? true; + const scopeFallback = options.scopeFallback ?? true; + const buildParameters: BuildParameters = { cacheBreaker: CACHE_BREAKER, computeSha1: options.computeSha1 || false, @@ -328,6 +337,9 @@ export default class FileMap extends EventEmitter { useWatchman: options.useWatchman ?? false, watch: !!options.watch, watchmanDeferStates: options.watchmanDeferStates ?? [], + enableFallback, + scopeFallback: enableFallback && scopeFallback, + serverRoot: options.serverRoot, }; const cacheFactoryOptions: CacheManagerFactoryOptions = { @@ -378,6 +390,16 @@ export default class FileMap extends EventEmitter { this.emit('metadata'); return result?.content; }; + const fallbackFilesystem = this.#options.enableFallback + ? createFallbackFilesystem({ + rootPathUtils: this.#pathUtils, + extensions: this.#options.extensions, + ignore: (filePath) => this.#options.ignorePattern.test(filePath), + includeSymlinks: this.#options.enableSymlinks, + }) + : null; + const { roots } = this.#options; + const serverRoot = this.#options.scopeFallback ? this.#options.serverRoot : null; const fileSystem = initialData != null ? TreeFS.fromDeserializedSnapshot({ @@ -387,8 +409,17 @@ export default class FileMap extends EventEmitter { fileSystemData: initialData.fileSystemData as any, processFile, rootDir, + fallbackFilesystem, + roots, + serverRoot, }) - : new TreeFS({ processFile, rootDir }); + : new TreeFS({ + processFile, + rootDir, + fallbackFilesystem, + roots, + serverRoot, + }); this.#startupPerfLogger?.point('constructFileSystem_end'); const plugins = this.#plugins; @@ -579,8 +610,10 @@ export default class FileMap extends EventEmitter { if (fileData[H.SYMLINK] === 0) { filesToProcess.push([normalFilePath, fileData]); } else if (fileData[H.MTIME] != null && fileData[H.MTIME] !== 0) { - // The symlink will only be updated, if it's been accessed before - // If this is a newly crawled entry, it's skipped + // Symlink was previously resolved and its mtime changed — resolve + // eagerly to update the cached target. Symlinks with null mtime + // (cold start or never accessed) are deferred to lazy resolution + // in TreeFS.#resolveSymlinkTargetToNormalPath. const maybeReadLink = this.#maybeReadLink(normalFilePath, fileData); if (maybeReadLink) { readLinkPromises.push( diff --git a/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts b/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts index fc287ff1254827..a1edbe58a6c183 100644 --- a/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts +++ b/packages/@expo/metro-file-map/src/lib/RootPathUtils.ts @@ -319,3 +319,40 @@ export class RootPathUtils { } } } + +export function getAncestorOfRootIdx(normalPath: string): number { + let pos = 0; + while (normalPath.startsWith(UP_FRAGMENT_SEP, pos)) { + pos += UP_FRAGMENT_SEP_LENGTH; + } + if ( + normalPath.length === pos + 2 && + normalPath.charCodeAt(pos) === 46 && + normalPath.charCodeAt(pos + 1) === 46 + ) { + return pos / UP_FRAGMENT_SEP_LENGTH + 1; + } + return pos / UP_FRAGMENT_SEP_LENGTH; +} + +export function pathsToPattern(paths: readonly string[], pathUtils: RootPathUtils): RegExp | null { + if (paths.length === 0) { + return null; + } + const pathsPatterns = paths.map((input) => { + let pattern = pathUtils.absoluteToNormal(input); + // When pattern is '' (root === rootDir), match any normal path that + // doesn't escape the root via '..' indirections. + if (pattern === '') { + return `(?!\\.\\.(?:\\${path.sep}|$))`; + } + // Append separator so that 'src' matches 'src/foo' but not 'src2'. + if (!pattern.endsWith(path.sep)) { + pattern += path.sep; + } + // Escape all regex-special characters. + // eslint-disable-next-line no-useless-escape + return pattern.replace(/[\-\[\]\{\}\(\)\*\+\?\.\\\^\$\|\/]/g, '\\$&'); + }); + return new RegExp(`^(?:${pathsPatterns.join('|')})`); +} diff --git a/packages/@expo/metro-file-map/src/lib/TreeFS.ts b/packages/@expo/metro-file-map/src/lib/TreeFS.ts index d7e50dfeed2eff..e283049a424201 100644 --- a/packages/@expo/metro-file-map/src/lib/TreeFS.ts +++ b/packages/@expo/metro-file-map/src/lib/TreeFS.ts @@ -12,8 +12,10 @@ import path from 'path'; import H from '../constants'; import normalizePathSeparatorsToPosix from './normalizePathSeparatorsToPosix'; import normalizePathSeparatorsToSystem from './normalizePathSeparatorsToSystem'; +import { shouldFallbackCrawlDir } from '../crawlers/node/fallback'; import type { CacheData, + FallbackFilesystem, FileData, FileMetadata, FileStats, @@ -23,9 +25,9 @@ import type { Path, ProcessFileFunction, } from '../types'; -import { RootPathUtils } from './RootPathUtils'; +import { RootPathUtils, getAncestorOfRootIdx, pathsToPattern } from './RootPathUtils'; -type DirectoryNode = Map; +type DirectoryNode = Map; type FileNode = FileMetadata; type MixedNode = FileNode | DirectoryNode; @@ -33,20 +35,26 @@ function isDirectory(node: MixedNode | null | undefined): node is DirectoryNode return node instanceof Map; } -function isRegularFile(node: FileNode): boolean { - return node[H.SYMLINK] === 0; +function isRegularFile(node: FileNode | null | undefined): boolean { + return node != null && node[H.SYMLINK] === 0; } interface DeserializedSnapshotInput { rootDir: string; fileSystemData: DirectoryNode; processFile: ProcessFileFunction; + fallbackFilesystem?: FallbackFilesystem | null | undefined; + roots?: readonly string[]; + serverRoot?: string | null | undefined; } interface TreeFSOptions { rootDir: Path; files?: FileData; processFile: ProcessFileFunction; + fallbackFilesystem?: FallbackFilesystem | null | undefined; + roots?: readonly string[]; + serverRoot?: string | null | undefined; } interface MatchFilesOptions { @@ -119,28 +127,46 @@ interface MetadataIteratorOptions { * a trailing slash */ export default class TreeFS implements MutableFileSystem { + readonly #fallbackBoundaryDepth: number | null; + readonly #fallbackFilesystem: FallbackFilesystem | null; readonly #pathUtils: RootPathUtils; readonly #processFile: ProcessFileFunction; readonly #rootDir: Path; + readonly #rootPattern: RegExp | null; #rootNode: DirectoryNode = new Map(); constructor(opts: TreeFSOptions) { - const { rootDir, files, processFile } = opts; + const { rootDir, files, processFile, fallbackFilesystem, roots, serverRoot } = opts; this.#rootDir = rootDir; this.#pathUtils = new RootPathUtils(rootDir); this.#processFile = processFile; + this.#fallbackFilesystem = fallbackFilesystem ?? null; + if (serverRoot != null) { + this.#fallbackBoundaryDepth = getAncestorOfRootIdx( + this.#pathUtils.absoluteToNormal(serverRoot) + ); + } else { + this.#fallbackBoundaryDepth = null; + } + this.#rootPattern = pathsToPattern(roots ?? [], this.#pathUtils); if (files != null) { this.bulkAddOrModify(files); } } getSerializableSnapshot(): CacheData['fileSystemData'] { - return this.#cloneTree(this.#rootNode); + return this.#cloneTree(this.#rootNode, ''); } static fromDeserializedSnapshot(args: DeserializedSnapshotInput): TreeFS { - const { rootDir, fileSystemData, processFile } = args; - const tfs = new TreeFS({ processFile, rootDir }); + const { rootDir, fileSystemData, processFile, fallbackFilesystem, roots, serverRoot } = args; + const tfs = new TreeFS({ + processFile, + rootDir, + fallbackFilesystem, + roots, + serverRoot, + }); tfs.#rootNode = fileSystemData; return tfs; } @@ -233,6 +259,7 @@ export default class TreeFS implements MutableFileSystem { getMtimeByNormalPath(normalPath: Path): number | null { const result = this.#lookupByNormalPath(normalPath, { followLeaf: false, + skipFallback: true, }); return result.exists && !isDirectory(result.node) ? result.node[H.MTIME] : null; } @@ -577,6 +604,8 @@ export default class TreeFS implements MutableFileSystem { * directory is already present as a file. */ makeDirectories?: boolean; + /** Whether to use the fallback filesystem during discovery */ + skipFallback?: boolean; startPathIdx?: number; startNode?: DirectoryNode; start?: { @@ -640,7 +669,7 @@ export default class TreeFS implements MutableFileSystem { continue; } - let segmentNode = parentNode.get(segmentName); + let segmentNode: MixedNode | null | undefined = parentNode.get(segmentName); // In normal paths all indirections are at the prefix, so we are at the // nth ancestor of the root iff the path so far is n '..' segments. @@ -652,23 +681,47 @@ export default class TreeFS implements MutableFileSystem { if (segmentNode == null) { if (opts.makeDirectories !== true && segmentName !== '..') { - return { - canonicalMissingPath: isLastSegment - ? targetNormalPath - : targetNormalPath.slice(0, fromIdx - 1), - exists: false, - missingSegmentName: segmentName, - }; + if (!opts.skipFallback && this.#fallbackFilesystem != null) { + const parentEnd = isLastSegment + ? fromIdx - segmentName.length - 1 + : fromIdx - segmentName.length - 2; + const parentCanonicalPath = parentEnd > 0 ? targetNormalPath.slice(0, parentEnd) : ''; + segmentNode = this.#populateFromFilesystem( + parentNode, + segmentName, + parentCanonicalPath + ); + if (segmentNode != null) { + ancestorOfRootIdx = null; + } + } + + if (segmentNode == null) { + return { + canonicalMissingPath: isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1), + exists: false, + missingSegmentName: segmentName, + }; + } } - segmentNode = new Map(); - if (opts.makeDirectories === true) { - if (changeListener != null) { - const canonicalPath = isLastSegment - ? targetNormalPath - : targetNormalPath.slice(0, fromIdx - 1); - changeListener.directoryAdded(canonicalPath); + if (segmentNode == null) { + segmentNode = new Map(); + if (opts.makeDirectories === true) { + if (changeListener != null) { + const canonicalPath = isLastSegment + ? targetNormalPath + : targetNormalPath.slice(0, fromIdx - 1); + changeListener.directoryAdded(canonicalPath); + } + parentNode.set(segmentName, segmentNode); + } else if ( + !opts.skipFallback && + this.#fallbackFilesystem != null + ) { + parentNode.set(segmentName, segmentNode); } - parentNode.set(segmentName, segmentNode); } } @@ -1069,7 +1122,9 @@ export default class TreeFS implements MutableFileSystem { metadata: FileMetadata; }> { for (const [name, node] of rootNode) { - if (!opts.includeNodeModules && isDirectory(node) && name === 'node_modules') { + if (node == null) { + continue; + } else if (!opts.includeNodeModules && isDirectory(node) && name === 'node_modules') { continue; } const prefixedName = prefix === '' ? name : prefix + path.sep + name; @@ -1091,7 +1146,7 @@ export default class TreeFS implements MutableFileSystem { node: DirectoryNode, parent: DirectoryNode | null | undefined, ancestorOfRootIdx: number | null | undefined - ): Generator<[string, MixedNode]> { + ): Generator<[string, MixedNode | null]> { if (ancestorOfRootIdx != null && ancestorOfRootIdx > 0 && parent) { yield [this.#pathUtils.getBasenameOfNthAncestor(ancestorOfRootIdx - 1), parent]; } @@ -1118,12 +1173,29 @@ export default class TreeFS implements MutableFileSystem { ): Iterable { const pathSep = opts.alwaysYieldPosix ? '/' : path.sep; const prefixWithSep = pathPrefix === '' ? pathPrefix : pathPrefix + pathSep; + + // Optimization: We can attempt to eagerly populate directories we're visiting + // if they're missing and not accessing a parent ('..') + if ( + this.#fallbackFilesystem != null && + iterationRootNode.size === 0 && + pathPrefix !== '..' && + !pathPrefix.endsWith(pathSep + '..') + ) { + const canonicalRoot = opts.canonicalPathOfRoot; + const rootCanonical = + pathPrefix === '' ? canonicalRoot : canonicalRoot + path.sep + pathPrefix; + this.#populateDirFromFilesystem(iterationRootNode, rootCanonical, false); + } + for (const [name, node] of this.#directoryNodeIterator( iterationRootNode, iterationRootParentNode, ancestorOfRootIdx )) { - if (opts.subtreeOnly && name === '..') { + if (node == null) { + continue; + } else if (opts.subtreeOnly && name === '..') { continue; } @@ -1175,6 +1247,17 @@ export default class TreeFS implements MutableFileSystem { } } } else if (opts.recursive) { + // Optimization: We can attempt to eagerly popuplate directories we're visiting + // if they're missing and not accessing a parent ('..') + if (this.#fallbackFilesystem != null && node.size === 0 && name !== '..') { + const nodePathWithSystemSeparators = + pathSep === path.sep ? nodePath : nodePath.replaceAll(pathSep, path.sep); + const canonicalPath = + opts.canonicalPathOfRoot === '' + ? nodePathWithSystemSeparators + : opts.canonicalPathOfRoot + path.sep + nodePathWithSystemSeparators; + this.#populateDirFromFilesystem(node, canonicalPath, false); + } yield* this.#pathIterator( node, iterationRootParentNode, @@ -1232,15 +1315,93 @@ export default class TreeFS implements MutableFileSystem { return result.node; } - #cloneTree(root: DirectoryNode): DirectoryNode { + #cloneTree(root: DirectoryNode, prefix: string): DirectoryNode { const clone: DirectoryNode = new Map(); for (const [name, node] of root) { - if (isDirectory(node)) { - clone.set(name, this.#cloneTree(node)); + if (node == null) { + continue; + } else if (isDirectory(node)) { + const childPath = prefix === '' ? name : prefix + path.sep + name; + if (this.#rootPattern == null || this.#rootPattern.test(childPath + path.sep)) { + clone.set(name, this.#cloneTree(node, childPath)); + } } else { clone.set(name, [...node]); } } return clone; } + + #isOutsideFallbackBoundary(canonicalPath: string): boolean { + const maxDepth = this.#fallbackBoundaryDepth; + return maxDepth != null && getAncestorOfRootIdx(canonicalPath) > maxDepth; + } + + /** + * Synchronously populate a missing tree node by querying the injected + * fallback filesystem. The fallback returns tree-compatible nodes + * (FileMetadata tuples or directory Maps) that are inserted directly. + * + * Returns the newly created node, or null if the path doesn't exist on disk. + */ + #populateFromFilesystem( + parentNode: DirectoryNode, + segmentName: string, + parentCanonicalPath: string + ): MixedNode | null { + const fallback = this.#fallbackFilesystem; + if (fallback == null) { + return null; + } + const childCanonicalPath = + parentCanonicalPath === '' ? segmentName : parentCanonicalPath + path.sep + segmentName; + if ( + this.#rootPattern?.test(childCanonicalPath + path.sep) || + this.#isOutsideFallbackBoundary(childCanonicalPath) + ) { + return null; + } else if (parentCanonicalPath !== '' && shouldFallbackCrawlDir(parentCanonicalPath)) { + this.#populateDirFromFilesystem(parentNode, parentCanonicalPath, true); + return parentNode.get(segmentName) ?? null; + } else if (parentNode.has(segmentName)) { + return parentNode.get(segmentName) ?? null; + } else { + const parentAbsolute = this.#pathUtils.normalToAbsolute(parentCanonicalPath); + const absolutePath = parentAbsolute + path.sep + segmentName; + const node = fallback.lookup(childCanonicalPath, absolutePath, parentNode.get(segmentName)); + parentNode.set(segmentName, node); + return node; + } + } + + /** + * Populate an existing (potentially empty sentinel) directory node from + * the filesystem. Used by #pathIterator to fill lazy directories before + * iteration, and by #populateFromFilesystem for optimistic parent + * population. + */ + #populateDirFromFilesystem( + dirNode: DirectoryNode, + canonicalPath: string, + skipCheck: boolean + ): void { + const fallback = this.#fallbackFilesystem; + if ( + fallback == null || + (!skipCheck && + (this.#rootPattern?.test(canonicalPath + path.sep) || + this.#isOutsideFallbackBoundary(canonicalPath))) + ) { + return; + } + const absolutePath = this.#pathUtils.normalToAbsolute(canonicalPath); + const entries = fallback.readdir(canonicalPath, absolutePath, dirNode); + if (entries != null && entries !== dirNode) { + for (const [name, entry] of entries) { + if (!dirNode.has(name)) { + dirNode.set(name, entry); + } + } + } + } } diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts index f8f1b1e2d30351..ee5664af861ba5 100644 --- a/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts +++ b/packages/@expo/metro-file-map/src/lib/__tests__/RootPathUtils.test.ts @@ -141,6 +141,26 @@ describe.each([['win32'], ['posix']] as const)('RootPathUtils on %s', (platform) expect(pathUtils.getAncestorOfRootIdx(input)).toEqual(expected); }); + describe('standalone getAncestorOfRootIdx (free function)', () => { + let getAncestorOfRootIdx: typeof import('../RootPathUtils').getAncestorOfRootIdx; + + beforeEach(() => { + getAncestorOfRootIdx = require('../RootPathUtils').getAncestorOfRootIdx; + }); + + test.each([ + ['', 0], + ['..', 1], + [p('../..'), 2], + [p('../../..'), 3], + ['foo', 0], + [p('../foo'), 1], + [p('../../foo'), 2], + ] as const)('getAncestorOfRootIdx(%s) => %s', (input, expected) => { + expect(getAncestorOfRootIdx(input)).toEqual(expected); + }); + }); + describe('resolveSymlinkToNormal', () => { beforeEach(() => { pathUtils = new RootPathUtils(p('/project/root')); @@ -169,4 +189,73 @@ describe.each([['win32'], ['posix']] as const)('RootPathUtils on %s', (platform) expect(pathUtils.resolveSymlinkToNormal('link', p('/project/root/dir/'))).toEqual('dir'); }); }); + + describe('pathsToPattern', () => { + let pathsToPattern: typeof import('../RootPathUtils').pathsToPattern; + + beforeEach(() => { + pathsToPattern = require('../RootPathUtils').pathsToPattern; + pathUtils = new RootPathUtils(p('/project')); + }); + + test('returns null for empty paths array', () => { + expect(pathsToPattern([], pathUtils)).toBeNull(); + }); + + test('creates pattern that matches paths inside a root', () => { + const pattern = pathsToPattern([p('/project/src')], pathUtils)!; + expect(pattern).not.toBeNull(); + expect(pattern.test(p('src/foo.js'))).toBe(true); + expect(pattern.test(p('src/sub/bar.js'))).toBe(true); + }); + + test('pattern does not match paths outside the root', () => { + const pattern = pathsToPattern([p('/project/src')], pathUtils)!; + expect(pattern.test(p('lib/foo.js'))).toBe(false); + expect(pattern.test(p('src2/foo.js'))).toBe(false); + }); + + test('pattern matches root directory with trailing separator', () => { + const pattern = pathsToPattern([p('/project/src')], pathUtils)!; + // The root itself + separator should match + expect(pattern.test('src' + sep)).toBe(true); + }); + + test('handles rootDir as a watched root (empty normal path)', () => { + const pattern = pathsToPattern([p('/project')], pathUtils)!; + expect(pattern).not.toBeNull(); + // Paths within root should match + expect(pattern.test(p('foo/bar.js'))).toBe(true); + // Paths above root should not match + expect(pattern.test(p('../outside/foo.js'))).toBe(false); + expect(pattern.test('..')).toBe(false); + }); + + test('handles multiple roots', () => { + const pattern = pathsToPattern([p('/project/src'), p('/project/lib')], pathUtils)!; + expect(pattern.test(p('src/foo.js'))).toBe(true); + expect(pattern.test(p('lib/bar.js'))).toBe(true); + expect(pattern.test(p('other/baz.js'))).toBe(false); + }); + + test('escapes regex-special characters in paths', () => { + const pattern = pathsToPattern([p('/project/src+lib')], pathUtils)!; + // Should match literally, not as regex + + expect(pattern.test(p('src+lib/foo.js'))).toBe(true); + expect(pattern.test(p('srcXlib/foo.js'))).toBe(false); + }); + + test('handles root above rootDir (produces ..-relative pattern)', () => { + // pathUtils has rootDir = /project, so root '/' produces '../' pattern + const pattern = pathsToPattern([p('/')], pathUtils)!; + expect(pattern).not.toBeNull(); + // '../foo' is inside '/' (one level above /project) + expect(pattern.test(p('../foo'))).toBe(true); + // '../../foo' would be above '/' — but since '/' is the filesystem root, + // the pattern for '/' relative to '/project' is '../' which matches '../anything' + expect(pattern.test(p('../nested/bar.js'))).toBe(true); + // Paths inside /project (no '..' prefix) should not match the '../' pattern + expect(pattern.test(p('src/foo.js'))).toBe(false); + }); + }); }); diff --git a/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts b/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts index 1daa88c6f1737d..2d64d1631249d7 100644 --- a/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts +++ b/packages/@expo/metro-file-map/src/lib/__tests__/TreeFS.test.ts @@ -1580,4 +1580,584 @@ describe.each([['win32'], ['posix']] as const)('TreeFS on %s', (platform) => { }); }); }); + + describe('fallback filesystem', () => { + let mockFallback: { + lookup: jest.Mock; + readdir: jest.Mock; + }; + + function makeFallbackTfs( + opts: { + files?: FileData; + roots?: string[]; + serverRoot?: string | null; + } = {} + ): TreeFSType { + mockFallback = { + lookup: jest.fn().mockReturnValue(null), + readdir: jest.fn().mockReturnValue(null), + }; + return new TreeFS({ + rootDir: p('/project'), + files: opts.files ?? new Map(), + processFile: async () => { + throw new Error('Not implemented'); + }, + fallbackFilesystem: mockFallback, + roots: opts.roots ?? [p('/project/src')], + serverRoot: opts.serverRoot, + }); + } + + describe('lookup triggers fallback for missing paths', () => { + test('calls fallback.lookup for a missing file outside watched roots', () => { + const fbTfs = makeFallbackTfs(); + // The first missing segment is 'outside' (a directory), then 'file.js' inside it. + // #populateFromFilesystem at root level calls fallback.lookup for 'outside'. + // Return a directory Map with the file already in it. + const outsideDir = new Map([['file.js', [100, 5, 0, null, 0, null] as any]]); + mockFallback.lookup.mockReturnValue(outsideDir); + + const result = fbTfs.lookup(p('/project/outside/file.js')); + expect(result).toMatchObject({ exists: true, type: 'f' }); + expect(mockFallback.lookup).toHaveBeenCalled(); + }); + + test('does not call fallback for paths inside watched roots', () => { + const fbTfs = makeFallbackTfs({ + files: new Map([[p('src/existing.js'), [100, 5, 0, null, 0, null]]]), + }); + + // 'src' directory is populated by the file above. + // Looking up a missing file inside 'src' should not trigger fallback + // because rootPattern blocks it. + fbTfs.lookup(p('/project/src/missing.js')); + expect(mockFallback.lookup).not.toHaveBeenCalled(); + expect(mockFallback.readdir).not.toHaveBeenCalled(); + }); + + test('does not call fallback when skipFallback is true (getMtimeByNormalPath)', () => { + const fbTfs = makeFallbackTfs(); + fbTfs.getMtimeByNormalPath(p('outside/file.js')); + expect(mockFallback.lookup).not.toHaveBeenCalled(); + }); + + test('populates parent directory via readdir for crawlable parents', () => { + const fbTfs = makeFallbackTfs(); + // For 'outside/file.js': + // 1. 'outside' is missing at root → fallback.lookup('outside', ...) returns a directory + // 2. 'file.js' is missing inside 'outside' → shouldFallbackCrawlDir('outside') = true + // → fallback.readdir('outside', ...) populates the dir + const outsideDir = new Map(); + mockFallback.lookup.mockReturnValue(outsideDir); + mockFallback.readdir.mockImplementation( + (_normalPath: string, _absolutePath: string, dirNode: any) => { + const result = dirNode ?? new Map(); + result.set('file.js', [100, 5, 0, null, 0, null]); + result.set('other.js', [200, 3, 0, null, 0, null]); + return result; + } + ); + + const result = fbTfs.lookup(p('/project/outside/file.js')); + expect(result).toMatchObject({ exists: true, type: 'f' }); + expect(mockFallback.readdir).toHaveBeenCalled(); + }); + + test('traverses ".." and populates sibling via fallback', () => { + const fbTfs = makeFallbackTfs(); + // For '../sibling/file.js': + // 1. '..' is missing → creates empty Map, sets in tree + // 2. 'sibling' is missing inside '..' → shouldFallbackCrawlDir('..') = true + // → fallback.readdir('..', ...) populates the parent directory + mockFallback.readdir.mockImplementation( + (_normalPath: string, _absolutePath: string, dirNode: any) => { + const result = dirNode ?? new Map(); + if (!result.has('sibling')) { + const siblingDir = new Map([['file.js', [100, 5, 0, null, 0, null] as any]]); + result.set('sibling', siblingDir); + } + return result; + } + ); + + const result = fbTfs.lookup(p('/project/../sibling/file.js')); + expect(result).toMatchObject({ exists: true }); + }); + }); + + describe('fallback boundary (scopeFallback/serverRoot)', () => { + test('blocks fallback beyond serverRoot boundary depth', () => { + // serverRoot is /project itself → boundary depth = 0 + // Paths above root (../) should be blocked + const fbTfs = makeFallbackTfs({ serverRoot: p('/project') }); + mockFallback.readdir.mockImplementation( + (_normalPath: string, _absolutePath: string, dirNode: any) => { + const result = dirNode ?? new Map(); + result.set('outside', new Map([['file.js', [100, 5, 0, null, 0, null] as any]])); + return result; + } + ); + + // Looking up ../outside/file.js — the '..' traversal is within the tree, + // but 'outside' lookup inside '..' should be blocked by boundary + const result = fbTfs.lookup(p('/project/../outside/file.js')); + expect(result).toMatchObject({ exists: false }); + // Fallback should not have been called for paths beyond boundary + expect(mockFallback.readdir).not.toHaveBeenCalled(); + }); + + test('allows fallback within serverRoot boundary', () => { + // serverRoot is filesystem root → boundary includes all ancestors + const fbTfs = makeFallbackTfs({ serverRoot: p('/') }); + mockFallback.readdir.mockImplementation( + (_normalPath: string, _absolutePath: string, dirNode: any) => { + const result = dirNode ?? new Map(); + if (!result.has('sibling')) { + result.set('sibling', new Map([['file.js', [100, 5, 0, null, 0, null] as any]])); + } + return result; + } + ); + + const result = fbTfs.lookup(p('/project/../sibling/file.js')); + expect(result).toMatchObject({ exists: true }); + }); + + test('no boundary when serverRoot is null (scopeFallback disabled)', () => { + const fbTfs = makeFallbackTfs({ serverRoot: null }); + mockFallback.readdir.mockImplementation( + (_normalPath: string, _absolutePath: string, dirNode: any) => { + const result = dirNode ?? new Map(); + if (!result.has('deep')) { + result.set('deep', new Map([['file.js', [100, 5, 0, null, 0, null] as any]])); + } + return result; + } + ); + + // Even deeply nested parent access should work + const result = fbTfs.lookup(p('/project/../../deep/file.js')); + expect(result).toMatchObject({ exists: true }); + }); + }); + + describe('matchFiles with fallback', () => { + test('populates empty directories during recursive iteration', () => { + const files = new Map([ + [p('existing/placeholder.js'), [100, 5, 0, null, 0, null]], + ]); + const fbTfs = makeFallbackTfs({ files }); + // When matchFiles iterates into 'outside' (an empty directory), + // the fallback should populate it + mockFallback.readdir.mockImplementation( + (normalPath: string, _absolutePath: string, dirNode: any) => { + if (normalPath === p('outside') || normalPath.endsWith(p('/outside'))) { + const result = dirNode ?? new Map(); + result.set('discovered.js', [200, 3, 0, null, 0, null]); + return result; + } + return dirNode; + } + ); + + // First trigger fallback to create 'outside' directory + mockFallback.lookup.mockReturnValue(new Map()); + fbTfs.lookup(p('/project/outside')); + + const matches = [ + ...fbTfs.matchFiles({ + rootDir: p('/project/outside'), + recursive: true, + }), + ]; + expect(matches).toContain(p('/project/outside/discovered.js')); + }); + + test('does not populate ".." directories during iteration', () => { + const fbTfs = makeFallbackTfs(); + // Create a '..' node in the tree by looking up a path above root + mockFallback.lookup.mockReturnValue(null); + fbTfs.lookup(p('/project/../something/file.js')); + + // Now iterate — should not try to populate '..' directories + mockFallback.readdir.mockClear(); + [...fbTfs.matchFiles({ rootDir: p('/project'), recursive: true })]; + // readdir should not have been called with a '..' canonical path + for (const call of mockFallback.readdir.mock.calls) { + const canonicalPath = call[0] as string; + expect(canonicalPath).not.toContain('..'); + } + }); + }); + + describe('getSerializableSnapshot excludes fallback data', () => { + test('does not include fallback-populated directories in snapshot', () => { + const files = new Map([ + [p('src/real.js'), [100, 5, 0, null, 0, null]], + ]); + const fbTfs = makeFallbackTfs({ files, roots: [p('/project/src')] }); + + // Trigger fallback to populate a directory outside roots. + // 'outside' is the first missing segment → fallback.lookup returns a directory. + const outsideDir = new Map([['external.js', [200, 3, 0, null, 0, null] as any]]); + mockFallback.lookup.mockReturnValue(outsideDir); + fbTfs.lookup(p('/project/outside/external.js')); + + // Snapshot should only contain data within watched roots + const snapshot = fbTfs.getSerializableSnapshot() as Map; + // 'src' should be in the snapshot + expect(snapshot.has('src')).toBe(true); + // 'outside' should NOT be in the snapshot (directory outside roots) + expect(snapshot.has('outside')).toBe(false); + }); + + test('includes all watched root data in snapshot', () => { + const files = new Map([ + [p('src/a.js'), [100, 5, 0, null, 0, null]], + [p('src/b.js'), [200, 3, 0, null, 0, null]], + ]); + const fbTfs = makeFallbackTfs({ files, roots: [p('/project/src')] }); + + const snapshot = fbTfs.getSerializableSnapshot() as Map; + const srcDir = snapshot.get('src') as Map; + expect(srcDir).toBeInstanceOf(Map); + expect(srcDir.has('a.js')).toBe(true); + expect(srcDir.has('b.js')).toBe(true); + }); + }); + + describe('rootPattern consistency with trailing separator', () => { + test('blocks fallback for paths that exactly match a root name', () => { + const files = new Map([ + [p('src/existing.js'), [100, 5, 0, null, 0, null]], + ]); + // Root is 'src' — pattern should block 'src/' and 'src/foo' paths + const fbTfs = makeFallbackTfs({ files, roots: [p('/project/src')] }); + mockFallback.lookup.mockReturnValue([200, 3, 0, null, 0, null]); + + // Looking up a file directly inside 'src' — rootPattern should block + fbTfs.lookup(p('/project/src/new-file.js')); + + // Fallback should not have been called for paths within the root + for (const call of mockFallback.lookup.mock.calls) { + const childPath = call[0] as string; + expect(childPath.startsWith('src' + p('/'))).toBe(false); + } + }); + }); + + describe('interaction with lazy stat and symlink resolution', () => { + test('fallback-discovered file with null mtime is stat-ed by getOrComputeSha1', async () => { + const mockProcessFile = jest.fn((_path: string, metadata: FileMetadata) => { + metadata[H.SHA1] = 'computed'; + }); + mockFallback = { + lookup: jest.fn().mockReturnValue(null), + readdir: jest.fn().mockReturnValue(null), + }; + const fbTfs = new TreeFS({ + rootDir: p('/project'), + files: new Map(), + processFile: mockProcessFile, + fallbackFilesystem: mockFallback, + roots: [p('/project/src')], + }); + + // Fallback returns a directory with a file that has null mtime (lazy) + const outsideDir = new Map([['lazy.js', [null, 0, 0, null, 0, null] as any]]); + mockFallback.lookup.mockReturnValue(outsideDir); + + // First verify file is discoverable + const lookupResult = fbTfs.lookup(p('/project/outside/lazy.js')); + expect(lookupResult).toMatchObject({ exists: true, type: 'f' }); + + // Now getOrComputeSha1 should trigger lstat (null mtime path) + mockLstat.mockResolvedValueOnce({ + mtime: { getTime: () => 777 }, + size: 20, + }); + const sha1Result = await fbTfs.getOrComputeSha1(p('outside/lazy.js')); + expect(sha1Result).toEqual({ sha1: 'computed' }); + expect(mockLstat).toHaveBeenCalledWith(p('/project/outside/lazy.js')); + expect(mockProcessFile).toHaveBeenCalledTimes(1); + }); + + test('fallback-discovered symlink (readdir marker) resolves lazily on traversal', () => { + const fbTfs = makeFallbackTfs({ + files: new Map([[p('target.js'), [100, 5, 0, null, 0, null]]]), + }); + + // Fallback returns a directory with an unresolved symlink marker + const outsideDir = new Map([ + ['link.js', [null, 0, 0, null, 1, null]], // SYMLINK = 1 = unresolved + ]); + mockFallback.lookup.mockReturnValue(outsideDir); + + // readlinkSync will be called when the symlink is traversed + mockReadlinkSync.mockReturnValue(p('../target.js')); + + const result = fbTfs.lookup(p('/project/outside/link.js')); + expect(result).toMatchObject({ exists: true, type: 'f' }); + expect(mockReadlinkSync).toHaveBeenCalledWith(p('/project/outside/link.js')); + }); + + test('fallback lookup eagerly-resolved symlink does not call readlinkSync again', () => { + const fbTfs = makeFallbackTfs({ + files: new Map([[p('target.js'), [100, 5, 0, null, 0, null]]]), + }); + + // Fallback lookup returns a symlink that's already eagerly resolved (string target) + const outsideDir = new Map([['link.js', [50, 0, 0, null, 'target.js', null]]]); + mockFallback.lookup.mockReturnValue(outsideDir); + + const result = fbTfs.lookup(p('/project/outside/link.js')); + expect(result).toMatchObject({ exists: true, type: 'f' }); + // readlinkSync should NOT be called — target already resolved + expect(mockReadlinkSync).not.toHaveBeenCalled(); + }); + + test('addOrModify updates a path that was originally discovered via fallback', () => { + const fbTfs = makeFallbackTfs(); + const outsideDir = new Map([['file.js', [100, 5, 0, null, 0, null] as any]]); + mockFallback.lookup.mockReturnValue(outsideDir); + + // Discover via fallback + expect(fbTfs.lookup(p('/project/outside/file.js'))).toMatchObject({ exists: true }); + + // Simulate watcher update — mtime changed + fbTfs.addOrModify(p('outside/file.js'), [200, 8, 0, null, 0, null]); + + // Verify updated metadata is reflected + expect(fbTfs.getMtimeByNormalPath(p('outside/file.js'))).toBe(200); + }); + + test('remove deletes a path that was originally discovered via fallback', () => { + const fbTfs = makeFallbackTfs(); + const outsideDir = new Map([['file.js', [100, 5, 0, null, 0, null] as any]]); + mockFallback.lookup.mockReturnValue(outsideDir); + + // Discover via fallback + expect(fbTfs.lookup(p('/project/outside/file.js'))).toMatchObject({ exists: true }); + + // Remove it + fbTfs.remove(p('outside/file.js')); + + // Should no longer exist (fallback won't re-discover because parent dir is already populated) + expect(fbTfs.lookup(p('/project/outside/file.js'))).toMatchObject({ exists: false }); + }); + + test('metadataIterator includes fallback-discovered files', () => { + const fbTfs = makeFallbackTfs({ + files: new Map([[p('src/real.js'), [100, 5, 0, null, 0, null]]]), + roots: [p('/project/src')], + }); + + // Discover a file via fallback + const outsideDir = new Map([['found.js', [200, 3, 0, null, 0, null] as any]]); + mockFallback.lookup.mockReturnValue(outsideDir); + fbTfs.lookup(p('/project/outside/found.js')); + + const entries = [ + ...fbTfs.metadataIterator({ + includeSymlinks: false, + includeNodeModules: true, + }), + ]; + const paths = entries.map((e) => e.canonicalPath); + expect(paths).toContain(p('src/real.js')); + expect(paths).toContain(p('outside/found.js')); + }); + + test('matchFiles follows fallback-discovered directory symlink', () => { + const fbTfs = makeFallbackTfs({ + files: new Map([[p('real-dir/nested.js'), [100, 5, 0, null, 0, null]]]), + roots: [p('/project/src')], + }); + + // Fallback discovers a directory containing a symlink to 'real-dir' + const outsideDir = new Map([ + ['dir-link', [50, 0, 0, null, 1, null]], // unresolved symlink marker + ]); + mockFallback.lookup.mockReturnValue(outsideDir); + // When the symlink is resolved, it points to real-dir + mockReadlinkSync.mockReturnValue(p('../real-dir')); + + // First discover 'outside' directory via fallback + fbTfs.lookup(p('/project/outside')); + + const matches = [ + ...fbTfs.matchFiles({ + rootDir: p('/project/outside'), + follow: true, + recursive: true, + }), + ]; + // Should follow the symlink and find nested.js + expect(matches).toContain(p('/project/outside/dir-link/nested.js')); + }); + }); + + describe('#cloneTree excludes ".." when rootDir is a watched root', () => { + test('fallback-discovered ".." directories are excluded from snapshot', () => { + const fbTfs = makeFallbackTfs({ roots: [p('/project')] }); + + // Trigger fallback to create a '..' directory entry via lookup above root + mockFallback.readdir.mockImplementation( + (_normalPath: string, _absolutePath: string, dirNode: any) => { + const result = dirNode ?? new Map(); + if (!result.has('something')) { + result.set('something', new Map([['file.js', [100, 5, 0, null, 0, null] as any]])); + } + return result; + } + ); + fbTfs.lookup(p('/project/../something/file.js')); + + const snapshot = fbTfs.getSerializableSnapshot() as Map; + // '..' should NOT be in the snapshot (negative-lookahead pattern excludes it) + expect(snapshot.has('..')).toBe(false); + }); + }); + + describe('fromDeserializedSnapshot + fallback integration', () => { + test('fallback extends a deserialized tree', () => { + // Create initial tree data (simulating a deserialized snapshot) + const fileSystemData: Map = new Map([ + ['src', new Map([['existing.js', [100, 5, 0, null, 0, null]]])], + ]); + + mockFallback = { + lookup: jest.fn().mockReturnValue(null), + readdir: jest.fn().mockReturnValue(null), + }; + + const fbTfs = TreeFS.fromDeserializedSnapshot({ + rootDir: p('/project'), + fileSystemData, + processFile: async () => { + throw new Error('Not implemented'); + }, + fallbackFilesystem: mockFallback, + roots: [p('/project/src')], + }); + + // Verify snapshot data is intact + expect(fbTfs.lookup(p('/project/src/existing.js'))).toMatchObject({ + exists: true, + type: 'f', + }); + + // Now look up a path NOT in the snapshot — fallback should discover it + const outsideDir = new Map([['new-file.js', [200, 3, 0, null, 0, null] as any]]); + mockFallback.lookup.mockReturnValue(outsideDir); + + const result = fbTfs.lookup(p('/project/outside/new-file.js')); + expect(result).toMatchObject({ exists: true, type: 'f' }); + expect(mockFallback.lookup).toHaveBeenCalled(); + + // Original snapshot data should still be intact + expect(fbTfs.lookup(p('/project/src/existing.js'))).toMatchObject({ + exists: true, + type: 'f', + }); + }); + }); + + describe('negative caching behavior', () => { + test('caches null result from fallback.lookup and does not re-query', () => { + const fbTfs = makeFallbackTfs(); + mockFallback.lookup.mockReturnValue(null); + + // First lookup — fallback returns null + const result1 = fbTfs.lookup(p('/project/missing/file.js')); + expect(result1).toMatchObject({ exists: false }); + + // Clear call counts + mockFallback.lookup.mockClear(); + mockFallback.readdir.mockClear(); + + // Second lookup — fallback should NOT be called again (negative cache) + const result2 = fbTfs.lookup(p('/project/missing/file.js')); + expect(result2).toMatchObject({ exists: false }); + expect(mockFallback.lookup).not.toHaveBeenCalled(); + expect(mockFallback.readdir).not.toHaveBeenCalled(); + }); + }); + + describe('node_modules bypass via direct lookup', () => { + test('lookups inside node_modules use fallback.lookup, not readdir of node_modules', () => { + const fbTfs = makeFallbackTfs(); + + // For 'outside/node_modules/pkg/index.js': + // 1. 'outside' is missing → fallback.lookup returns a directory + // 2. 'node_modules' is missing inside 'outside' → shouldFallbackCrawlDir('outside') + // is true → readdir('outside') populates it with node_modules as a dir + // 3. 'pkg' is missing inside 'node_modules' → shouldFallbackCrawlDir checks + // 'outside/node_modules' which returns false → uses individual lookup + const nodeModulesDir = new Map(); + const outsideDir = new Map([['node_modules', nodeModulesDir]]); + mockFallback.lookup.mockImplementation( + (normalPath: string, _absolutePath: string, _existing: any) => { + if (normalPath === 'outside') { + return outsideDir; + } + if (normalPath === p('outside/node_modules/pkg')) { + return new Map([['index.js', [100, 5, 0, null, 0, null] as any]]); + } + return null; + } + ); + // readdir is called for 'outside' (crawlable parent), returns the outsideDir + mockFallback.readdir.mockImplementation( + (_normalPath: string, _absolutePath: string, dirNode: any) => { + return dirNode; + } + ); + + const result = fbTfs.lookup(p('/project/outside/node_modules/pkg/index.js')); + expect(result).toMatchObject({ exists: true, type: 'f' }); + + // readdir should NOT have been called with a path containing 'node_modules' + for (const call of mockFallback.readdir.mock.calls) { + const normalPath = call[0] as string; + expect(normalPath).not.toMatch(/node_modules/); + } + }); + }); + + describe('remove + re-lookup after fallback discovery', () => { + test('re-lookup after remove does NOT re-discover the file', () => { + const fbTfs = makeFallbackTfs(); + + // Discover directory via fallback with multiple files (so parent isn't removed) + const outsideDir = new Map([ + ['file.js', [100, 5, 0, null, 0, null] as any], + ['other.js', [200, 3, 0, null, 0, null] as any], + ]); + mockFallback.lookup.mockReturnValue(outsideDir); + expect(fbTfs.lookup(p('/project/outside/file.js'))).toMatchObject({ exists: true }); + + // Remove only one file — parent 'outside' still has 'other.js' so it persists + fbTfs.remove(p('outside/file.js')); + + // Clear mock call counts + mockFallback.lookup.mockClear(); + mockFallback.readdir.mockClear(); + + // readdir may be called on the parent ('outside' is crawlable), but should + // return the existing dirNode as-is (already marked/populated) + mockFallback.readdir.mockImplementation( + (_normalPath: string, _absolutePath: string, dirNode: any) => dirNode + ); + + // Re-lookup — file should remain absent (not re-discovered) + const result = fbTfs.lookup(p('/project/outside/file.js')); + expect(result).toMatchObject({ exists: false }); + // fallback.lookup should NOT be called for the individual file + expect(mockFallback.lookup).not.toHaveBeenCalled(); + }); + }); + }); }); diff --git a/packages/@expo/metro-file-map/src/types.ts b/packages/@expo/metro-file-map/src/types.ts index 3ce348f65fc654..a6625bb46c5329 100644 --- a/packages/@expo/metro-file-map/src/types.ts +++ b/packages/@expo/metro-file-map/src/types.ts @@ -465,6 +465,22 @@ export interface MutableFileSystem extends FileSystem { export type Path = string; +type DirectoryNode = Map; +type MixedNode = FileMetadata | DirectoryNode; + +export interface FallbackFilesystem { + lookup( + normalPath: Path, + absolutePath: string, + prevNode: MixedNode | null | undefined + ): MixedNode | null; + readdir( + normalPath: Path, + absolutePath: string, + dirNode: DirectoryNode | null | undefined + ): DirectoryNode | null; +} + export type ProcessFileFunction = ( normalPath: string, metadata: FileMetadata, diff --git a/packages/expo-module-scripts/types/expo-metro-augmentations.d.ts b/packages/expo-module-scripts/types/expo-metro-augmentations.d.ts index 54c0da180f3bc4..10b4df1c81e8bf 100644 --- a/packages/expo-module-scripts/types/expo-metro-augmentations.d.ts +++ b/packages/expo-module-scripts/types/expo-metro-augmentations.d.ts @@ -97,4 +97,10 @@ declare module '@expo/metro/metro-source-map/source-map' { export function toSegmentTuple(mapping: BabelSourceMapSegment): MetroSourceMapSegmentTuple; } +import * as __metroConfigTypes from '@expo/metro/metro-config/types'; +declare module '@expo/metro/metro-config/types' { + export interface ResolverConfigT { + unstable_onDemandFilesystem?: unknown; + } +} From a3bcc42f86ac9502c2993482558698ccf3fd3425 Mon Sep 17 00:00:00 2001 From: Phil Pluckthun Date: Tue, 5 May 2026 17:28:51 +0100 Subject: [PATCH 04/26] feat(cli): Update `expo start`'s check version output, prefetch it, and defer it to the commands table (#45400) # Why Currently, when we run the versions check it blocks `expo start` and unnecessarily slows down the CLI's startup. Instead, we can prefetch it during the startup, and display it in the commands table, if it fetched in time. If it doesn't fetch in time to be displayed in the commands table, we instead rely on the result being cached, which means it'd be displayed on the next `expo start` run or when/if the commands table is re-printed. # How - Refactor version check output to be shorter and tell users to run `npx expo install --check` - Prefetch version check output and display it (if it's available) in the commands table # Test Plan - Tested manually against `apps/router-e2e` # Checklist - [x] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- packages/@expo/cli/CHANGELOG.md | 1 + .../cli/src/start/checkDependenciesOnStart.ts | 52 +++++++++++++++++++ .../cli/src/start/interface/commandsTable.ts | 2 + .../cli/src/start/interface/startInterface.ts | 38 ++++++++++++-- packages/@expo/cli/src/start/startAsync.ts | 27 ++++++---- 5 files changed, 106 insertions(+), 14 deletions(-) create mode 100644 packages/@expo/cli/src/start/checkDependenciesOnStart.ts diff --git a/packages/@expo/cli/CHANGELOG.md b/packages/@expo/cli/CHANGELOG.md index 481a7b0780c791..60a3eee75f911e 100644 --- a/packages/@expo/cli/CHANGELOG.md +++ b/packages/@expo/cli/CHANGELOG.md @@ -67,6 +67,7 @@ - Provide Babel config path hint to Expo Metro transformer ([#45260](https://github.com/expo/expo/pull/45260) by [@kitten](https://github.com/kitten)) - Add `@expo/metro-file-map` fork ([#45373](https://github.com/expo/expo/pull/45373) by [@kitten](https://github.com/kitten)) - Disable watchman by default ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) +- Defer version check output to command table, and prefetch on start, to prevent it blocking/slowing down startup ([#45400](https://github.com/expo/expo/pull/45400) by [@kitten](https://github.com/kitten)) ## 55.0.12 — 2026-02-25 diff --git a/packages/@expo/cli/src/start/checkDependenciesOnStart.ts b/packages/@expo/cli/src/start/checkDependenciesOnStart.ts new file mode 100644 index 00000000000000..cc60841b2051c4 --- /dev/null +++ b/packages/@expo/cli/src/start/checkDependenciesOnStart.ts @@ -0,0 +1,52 @@ +import type { ExpoConfig, PackageJSONConfig } from '@expo/config'; +import chalk from 'chalk'; + +import * as Log from '../log'; +import { getVersionedDependenciesAsync } from './doctor/dependencies/validateDependenciesVersions'; + +export type DependencyCheckResult = { + expo?: { actualVersion: string; expectedVersionOrRange: string }; + otherCount: number; +}; + +/** + * Fetch dependency version check results. + * Returns null if everything is up-to-date. + */ +export async function checkDependenciesAsync( + projectRoot: string, + exp: Pick, + pkg: PackageJSONConfig +): Promise { + const incorrectDeps = await getVersionedDependenciesAsync(projectRoot, exp, pkg); + if (incorrectDeps.length === 0) { + return null; + } + + const expoDep = incorrectDeps.find((dep) => dep.packageName === 'expo'); + const otherCount = incorrectDeps.filter((dep) => dep.packageName !== 'expo').length; + + return { + expo: expoDep + ? { + actualVersion: expoDep.actualVersion, + expectedVersionOrRange: expoDep.expectedVersionOrRange, + } + : undefined, + otherCount, + }; +} + +/** Print the condensed dependency check messages to the terminal. */ +export function printDependencyCheckResult(result: DependencyCheckResult): void { + if (result.expo) { + Log.warn( + chalk`An update for {bold expo} is available: {red ${result.expo.actualVersion}} {dim →} {green ${result.expo.expectedVersionOrRange}}` + ); + } + if (result.otherCount > 0) { + Log.warn( + chalk`${result.otherCount} other package${result.otherCount === 1 ? '' : 's'} may need updating. Run {bold npx expo install --check} for details.` + ); + } +} diff --git a/packages/@expo/cli/src/start/interface/commandsTable.ts b/packages/@expo/cli/src/start/interface/commandsTable.ts index b6a549ddcb1f7d..e0fbe121884d4f 100644 --- a/packages/@expo/cli/src/start/interface/commandsTable.ts +++ b/packages/@expo/cli/src/start/interface/commandsTable.ts @@ -3,6 +3,7 @@ import chalk from 'chalk'; import wrapAnsi from 'wrap-ansi'; import * as Log from '../../log'; +import type { DependencyCheckResult } from '../checkDependenciesOnStart'; import type { McpServer } from '../server/MCP'; // Approximately how many rows apart from the commands table (usage guide on `expo start`) @@ -20,6 +21,7 @@ export type StartOptions = { maxWorkers?: number; platforms?: ExpoConfig['platforms']; mcpServer?: McpServer; + dependencyCheckPromise?: Promise; }; export const printHelp = (): void => { diff --git a/packages/@expo/cli/src/start/interface/startInterface.ts b/packages/@expo/cli/src/start/interface/startInterface.ts index 206021c1decc97..196e1ef33f3300 100644 --- a/packages/@expo/cli/src/start/interface/startInterface.ts +++ b/packages/@expo/cli/src/start/interface/startInterface.ts @@ -10,6 +10,8 @@ import { AbortCommandError } from '../../utils/errors'; import { getAllSpinners, ora } from '../../utils/ora'; import { getProgressBar, setProgressBar } from '../../utils/progress'; import { addInteractionListener, pauseInteractions } from '../../utils/prompts'; +import type { DependencyCheckResult } from '../checkDependenciesOnStart'; +import { printDependencyCheckResult } from '../checkDependenciesOnStart'; import { WebSupportProjectPrerequisite } from '../doctor/web/WebSupportProjectPrerequisite'; import type { DevServerManager } from '../server/DevServerManager'; @@ -37,7 +39,7 @@ const PLATFORM_SETTINGS: Record< export async function startInterfaceAsync( devServerManager: DevServerManager, - options: Pick + options: Pick ) { const actions = new DevServerManagerActions(devServerManager, options); @@ -49,7 +51,33 @@ export async function startInterfaceAsync( ...options, }; + // Wait briefly for the dependency check to resolve (it runs in the background since early startup). + // With a warm fetch cache this resolves near-instantly; on cold starts it may not be ready, + // in which case it will appear on the next TUI re-print (e.g. pressing 'c'). + let dependencyCheckResult: DependencyCheckResult | null | undefined; + if (options.dependencyCheckPromise) { + dependencyCheckResult = await Promise.race([ + options.dependencyCheckPromise, + new Promise((resolve) => setTimeout(resolve, 100)), + ]); + if (!dependencyCheckResult) { + // Not ready yet — capture once resolved for display on next reprint + options.dependencyCheckPromise.then((result) => { + if (result) { + dependencyCheckResult = result; + } + }); + } + } + + const printDependencyCheckIfAvailable = () => { + if (dependencyCheckResult) { + printDependencyCheckResult(dependencyCheckResult); + } + }; + actions.printDevServerInfo(usageOptions); + printDependencyCheckIfAvailable(); const onPressAsync = async (key: string) => { // Auxillary commands all escape. @@ -144,7 +172,9 @@ export async function startInterfaceAsync( Log.clear(); if (await devServerManager.toggleRuntimeMode()) { usageOptions.devClient = devServerManager.options.devClient; - return actions.printDevServerInfo(usageOptions); + actions.printDevServerInfo(usageOptions); + printDependencyCheckIfAvailable(); + return; } break; } @@ -188,7 +218,9 @@ export async function startInterfaceAsync( } case 'c': Log.clear(); - return actions.printDevServerInfo(usageOptions); + actions.printDevServerInfo(usageOptions); + printDependencyCheckIfAvailable(); + return; case 'j': return actions.openJsInspectorAsync(); case 'r': diff --git a/packages/@expo/cli/src/start/startAsync.ts b/packages/@expo/cli/src/start/startAsync.ts index 816d0058a16af0..8a48cd691ae6e8 100644 --- a/packages/@expo/cli/src/start/startAsync.ts +++ b/packages/@expo/cli/src/start/startAsync.ts @@ -2,9 +2,10 @@ import { getConfig } from '@expo/config'; import chalk from 'chalk'; import { getLogFile, shouldReduceLogs } from '../events'; +import type { DependencyCheckResult } from './checkDependenciesOnStart'; +import { checkDependenciesAsync, printDependencyCheckResult } from './checkDependenciesOnStart'; import { SimulatorAppPrerequisite } from './doctor/apple/SimulatorAppPrerequisite'; import { getXcodeVersionAsync } from './doctor/apple/XcodePrerequisite'; -import { validateDependenciesVersionsAsync } from './doctor/dependencies/validateDependenciesVersions'; import { WebSupportProjectPrerequisite } from './doctor/web/WebSupportProjectPrerequisite'; import { startInterfaceAsync } from './interface/startInterface'; import type { Options } from './resolveOptions'; @@ -13,13 +14,13 @@ import * as Log from '../log'; import type { BundlerStartOptions } from './server/BundlerDevServer'; import type { MultiBundlerStartOptions } from './server/DevServerManager'; import { DevServerManager } from './server/DevServerManager'; +import { maybeCreateMCPServerAsync } from './server/MCP'; import { openPlatformsAsync } from './server/openPlatforms'; import type { PlatformBundlers } from './server/platformBundlers'; import { getPlatformBundlers } from './server/platformBundlers'; import { env } from '../utils/env'; import { isInteractive } from '../utils/interactive'; import { profile } from '../utils/profile'; -import { maybeCreateMCPServerAsync } from './server/MCP'; import { addMcpCapabilities } from './server/MCPDevToolsPluginCLIExtensions'; async function getMultiBundlerStartOptions( @@ -81,6 +82,13 @@ export async function startAsync( const { exp, pkg } = profile(getConfig)(projectRoot); + // Start dependency version check in the background as early as possible (non-blocking). + // The result will be displayed in the TUI once it resolves. + let dependencyCheckPromise: Promise | undefined; + if (!env.EXPO_OFFLINE && !env.EXPO_NO_DEPENDENCY_VALIDATION && !settings.webOnly) { + dependencyCheckPromise = checkDependenciesAsync(projectRoot, exp, pkg).catch(() => null); + } + if (exp.platforms?.includes('ios') && process.platform !== 'win32') { // If Xcode could potentially be used, then we should eagerly perform the // assertions since they can take a while on cold boots. @@ -117,15 +125,6 @@ export async function startAsync( await devServerManager.bootstrapTypeScriptAsync(); } - if (!env.EXPO_NO_DEPENDENCY_VALIDATION && !settings.webOnly && !options.devClient) { - try { - await profile(validateDependenciesVersionsAsync)(projectRoot, exp, pkg); - } catch { - // We don't show the dependency validation error, since it's non-essential - // for the user to know it ran or failed - } - } - // Open project on devices. await profile(openPlatformsAsync)(devServerManager, options); @@ -141,6 +140,7 @@ export async function startAsync( await profile(startInterfaceAsync)(devServerManager, { platforms: exp.platforms ?? ['ios', 'android', 'web'], mcpServer, + dependencyCheckPromise, }); } else { // Display the server location in CI... @@ -151,6 +151,11 @@ export async function startAsync( } Log.log(chalk`Waiting on {underline ${defaultServerUrl}}`); } + // In non-interactive mode, await the check and print if available. + const result = await dependencyCheckPromise; + if (result) { + printDependencyCheckResult(result); + } } if (mcpServer) { From 41902b9bc32bd8a6fa91f48940dbae0fa42427d3 Mon Sep 17 00:00:00 2001 From: Hassan Khan Date: Tue, 5 May 2026 17:31:11 +0100 Subject: [PATCH 05/26] feat(expo-server): add `createStaticLoader()` and `createServerLoader()` helpers (#45401) # Why Writing loaders with the `LoaderFunction` signature is awkward for two common cases: - Static loaders always receive `undefined` as `request`, but the type forces you to accept it and leave it unused - Server loaders need a guaranteed `ImmutableRequest`, but the type allows `undefined`, requiring a manual check # How Added `createStaticLoader()` and `createServerLoader()` in `packages/expo-server/src/loaders/helpers.ts`: - `createStaticLoader(fn)`: the `fn` callback receives only `params`. Safe for both SSG and SSR. - `createServerLoader(fn)`: the `fn` callback receives an `ImmutableRequest` and `params`. Throws with an actionable error if called during SSG where no request is available. Both return `LoaderFunction`, so `useLoaderData()` type inference works unchanged. Additionally, updated the Data loaders guide documenting both helpers, and regenerated the `expo-server` API reference. # Test Plan - CI # Checklist - [x] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [x] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [x] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- .../__e2e__/server-loader/app/index.tsx | 2 + .../server-loader/app/server-helper.tsx | 44 ++++++++++ .../server-loader/app/static-helper.tsx | 36 +++++++++ .../server-loader/workerd/config.capnp | 3 + docs/pages/router/web/data-loaders.mdx | 71 +++++++++++++++- .../static/data/unversioned/expo-server.json | 2 +- packages/@expo/cli/CHANGELOG.md | 1 + .../__tests__/export/server-loader.test.ts | 16 ++++ .../__tests__/export/static-loader.test.ts | 14 ++++ packages/expo-router/CHANGELOG.md | 1 + packages/expo-router/server.d.ts | 2 + packages/expo-router/server.js | 2 +- packages/expo-server/CHANGELOG.md | 1 + packages/expo-server/build/cjs/index.d.ts | 1 + packages/expo-server/build/cjs/index.js | 4 + packages/expo-server/build/cjs/index.js.map | 2 +- .../build/cjs/loaders/helpers.d.ts | 34 ++++++++ .../expo-server/build/cjs/loaders/helpers.js | 49 ++++++++++++ .../build/cjs/loaders/helpers.js.map | 1 + packages/expo-server/build/mjs/index.d.ts | 1 + packages/expo-server/build/mjs/index.js | 1 + packages/expo-server/build/mjs/index.js.map | 2 +- .../build/mjs/loaders/helpers.d.ts | 34 ++++++++ .../expo-server/build/mjs/loaders/helpers.js | 45 +++++++++++ .../build/mjs/loaders/helpers.js.map | 1 + packages/expo-server/src/index.ts | 1 + .../src/loaders/__tests__/helpers.test.ts | 80 +++++++++++++++++++ packages/expo-server/src/loaders/helpers.ts | 53 ++++++++++++ 28 files changed, 499 insertions(+), 5 deletions(-) create mode 100644 apps/router-e2e/__e2e__/server-loader/app/server-helper.tsx create mode 100644 apps/router-e2e/__e2e__/server-loader/app/static-helper.tsx create mode 100644 packages/expo-server/build/cjs/loaders/helpers.d.ts create mode 100644 packages/expo-server/build/cjs/loaders/helpers.js create mode 100644 packages/expo-server/build/cjs/loaders/helpers.js.map create mode 100644 packages/expo-server/build/mjs/loaders/helpers.d.ts create mode 100644 packages/expo-server/build/mjs/loaders/helpers.js create mode 100644 packages/expo-server/build/mjs/loaders/helpers.js.map create mode 100644 packages/expo-server/src/loaders/__tests__/helpers.test.ts create mode 100644 packages/expo-server/src/loaders/helpers.ts diff --git a/apps/router-e2e/__e2e__/server-loader/app/index.tsx b/apps/router-e2e/__e2e__/server-loader/app/index.tsx index 7abb312f8d73f7..22ab0626499aef 100644 --- a/apps/router-e2e/__e2e__/server-loader/app/index.tsx +++ b/apps/router-e2e/__e2e__/server-loader/app/index.tsx @@ -42,6 +42,8 @@ const IndexScreen = () => { Go to static Post 2 Go to Error Go to Grouped Index + Go to Static Helper + Go to Server Helper ); diff --git a/apps/router-e2e/__e2e__/server-loader/app/server-helper.tsx b/apps/router-e2e/__e2e__/server-loader/app/server-helper.tsx new file mode 100644 index 00000000000000..2e914b6ab3a3de --- /dev/null +++ b/apps/router-e2e/__e2e__/server-loader/app/server-helper.tsx @@ -0,0 +1,44 @@ +import { useLoaderData } from 'expo-router'; +import { createServerLoader } from 'expo-router/server'; +import { Suspense } from 'react'; + +import { Loading } from '../components/Loading'; +import { SiteLinks, SiteLink } from '../components/SiteLink'; +import { Table, TableRow } from '../components/Table'; + +const _serverLoader = createServerLoader(async (request, _params) => { + return { + source: 'server-helper', + url: request.url, + method: request.method, + }; +}); + +// Only export the loader in SSR mode. In SSG mode, createServerLoader throws because there is no +// request object at build time. +export const loader = process.env.E2E_ROUTER_SERVER_RENDERING ? _serverLoader : undefined; + +export default function ServerHelperRoute() { + return ( + }> + + + ); +} + +const ServerHelperScreen = () => { + const data = useLoaderData(); + + return ( + <> + + +
+ + + Go to Index + Go to Static Helper + + + ); +}; diff --git a/apps/router-e2e/__e2e__/server-loader/app/static-helper.tsx b/apps/router-e2e/__e2e__/server-loader/app/static-helper.tsx new file mode 100644 index 00000000000000..48270a5787e0b9 --- /dev/null +++ b/apps/router-e2e/__e2e__/server-loader/app/static-helper.tsx @@ -0,0 +1,36 @@ +import { useLoaderData } from 'expo-router'; +import { createStaticLoader } from 'expo-router/server'; +import { Suspense } from 'react'; + +import { Loading } from '../components/Loading'; +import { SiteLinks, SiteLink } from '../components/SiteLink'; +import { Table, TableRow } from '../components/Table'; + +export const loader = createStaticLoader(async (_params) => { + return { source: 'static-helper' }; +}); + +export default function StaticHelperRoute() { + return ( + }> + + + ); +} + +const StaticHelperScreen = () => { + const data = useLoaderData(); + + return ( + <> + + +
+ + + Go to Index + Go to Server Helper + + + ); +}; diff --git a/apps/router-e2e/__e2e__/server-loader/workerd/config.capnp b/apps/router-e2e/__e2e__/server-loader/workerd/config.capnp index e32f5a50490099..1853ef48e6a27c 100644 --- a/apps/router-e2e/__e2e__/server-loader/workerd/config.capnp +++ b/apps/router-e2e/__e2e__/server-loader/workerd/config.capnp @@ -22,9 +22,12 @@ const server :Workerd.Worker = ( (name = "_expo/loaders/request.js", commonJsModule = embed "_expo/loaders/request.js"), (name = "_expo/loaders/response.js", commonJsModule = embed "_expo/loaders/response.js"), (name = "_expo/loaders/(group)/index.js", commonJsModule = embed "_expo/loaders/(group)/index.js"), + (name = "_expo/loaders/static-helper.js", commonJsModule = embed "_expo/loaders/static-helper.js"), + (name = "_expo/loaders/server-helper.js", commonJsModule = embed "_expo/loaders/server-helper.js"), ], bindings = [ (name = "TEST_SECRET_RUNTIME_KEY", text = "runtime-secret-value"), + (name = "E2E_ROUTER_SERVER_RENDERING", text = "true"), ], compatibilityDate = "2025-05-05", compatibilityFlags = [ diff --git a/docs/pages/router/web/data-loaders.mdx b/docs/pages/router/web/data-loaders.mdx index 5541f0cb5ad94b..abe16b6bc719cc 100644 --- a/docs/pages/router/web/data-loaders.mdx +++ b/docs/pages/router/web/data-loaders.mdx @@ -331,7 +331,76 @@ With server rendering, loaders execute on every request. This means: ## Typed loader functions -For improved type safety, you can import the `LoaderFunction` type from `expo-router`: +`expo-server` provides two helper functions that create loaders with improved type safety. They narrow the callback signature so you only receive the parameters relevant to your rendering mode. + +### `createStaticLoader` + +Use [`createStaticLoader`](/versions/unversioned/sdk/server/#createstaticloaderfn) for routes that only need route parameters. The callback only receives the route params, and is safe to use with both static and server rendering: + +```tsx src/app/posts/[postId].tsx +import { Text, View } from 'react-native'; +import { useLoaderData } from 'expo-router'; +import { createStaticLoader } from 'expo-router/server'; + +export const loader = createStaticLoader(async params => { + const response = await fetch(`https://api.example.com/posts/${params.postId}`); + return response.json(); +}); + +export default function Post() { + const data = useLoaderData(); + + return ( + + {data.title} + + ); +} +``` + +### `createServerLoader` + +Use [`createServerLoader`](/versions/unversioned/sdk/server/#createserverloaderfn) for routes that need access to the incoming HTTP request. The callback receives an [`ImmutableRequest`](/versions/latest/sdk/server/#immutablerequest) and the route params as arguments: + +```tsx src/app/profile.tsx +import { Text, View } from 'react-native'; +import { useLoaderData } from 'expo-router'; +import { createServerLoader } from 'expo-router/server'; + +export const loader = createServerLoader(async (request, params) => { + const authToken = request.headers.get('Authorization'); + + if (!authToken) { + return { user: null }; + } + + const response = await fetch('https://api.example.com/user', { + headers: { Authorization: authToken }, + }); + + return { user: await response.json() }; +}); + +export default function Profile() { + const { user } = useLoaderData(); + + if (!user) { + return Please log in; + } + + return ( + + Welcome, {user.name} + + ); +} +``` + +> **warning** `createServerLoader` will throw an error if called during static site generation (SSG) because there is no HTTP request at build time. Use `createStaticLoader` when using static rendering. + +### Using `LoaderFunction` directly + +You can also type loaders directly using the [`LoaderFunction`](/versions/latest/sdk/server/#loaderfunction) type from `expo-router/server`. This gives you full control over the function signature, including both `request` and `params`: ```tsx src/app/posts/[postId].tsx import { Text, View } from 'react-native'; diff --git a/docs/public/static/data/unversioned/expo-server.json b/docs/public/static/data/unversioned/expo-server.json index 0e670ba494438f..ff7d6321659beb 100644 --- a/docs/public/static/data/unversioned/expo-server.json +++ b/docs/public/static/data/unversioned/expo-server.json @@ -1 +1 @@ -{"schemaVersion":"2.0","name":"expo-server","variant":"project","kind":1,"children":[{"name":"StatusError","variant":"declaration","kind":128,"comment":{"summary":[{"kind":"text","text":"An error response representation which can be thrown anywhere in server-side code.\n\nA "},{"kind":"code","text":"`StatusError`"},{"kind":"text","text":" can be thrown by a request handler and will be caught by the "},{"kind":"code","text":"`expo-server`"},{"kind":"text","text":"\nruntime and replaced by a "},{"kind":"code","text":"`Response`"},{"kind":"text","text":" with the "},{"kind":"code","text":"`status`"},{"kind":"text","text":" and "},{"kind":"code","text":"`body`"},{"kind":"text","text":" that's been passed to\nthe "},{"kind":"code","text":"`StatusError`"},{"kind":"text","text":"."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport { StatusError } from 'expo-server';\n\nexport function GET(request, { postId }) {\n if (!postId) {\n throw new StatusError(400, 'postId parameter is required');\n }\n}\n```"}]}]},"children":[{"name":"constructor","variant":"declaration","kind":512,"signatures":[{"name":"StatusError","variant":"signature","kind":16384,"parameters":[{"name":"status","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}},{"name":"body","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Error"},"name":"Error","package":"typescript"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"error","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}}],"indexSignatures":[{"name":"__index","variant":"signature","kind":8192,"parameters":[{"name":"key","variant":"param","kind":32768,"type":{"type":"intrinsic","name":"string"}}],"type":{"type":"intrinsic","name":"any"}}]}}]}}],"type":{"type":"reference","name":"StatusError","package":"expo-server"},"overwrites":{"type":"reference","name":"Error.constructor","package":"typescript"}},{"name":"StatusError","variant":"signature","kind":16384,"parameters":[{"name":"status","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}},{"name":"errorOptions","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"cause","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"unknown"}},{"name":"error","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}}]}}}],"type":{"type":"reference","name":"StatusError","package":"expo-server"},"overwrites":{"type":"reference","name":"Error.constructor","package":"typescript"}},{"name":"StatusError","variant":"signature","kind":16384,"parameters":[{"name":"status","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}},{"name":"body","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Error"},"name":"Error","package":"typescript"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"error","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}}],"indexSignatures":[{"name":"__index","variant":"signature","kind":8192,"parameters":[{"name":"key","variant":"param","kind":32768,"type":{"type":"intrinsic","name":"string"}}],"type":{"type":"intrinsic","name":"any"}}]}}]}},{"name":"errorOptions","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"cause","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"unknown"}}]}}}],"type":{"type":"reference","name":"StatusError","package":"expo-server"},"overwrites":{"type":"reference","name":"Error.constructor","package":"typescript"}}],"overwrites":{"type":"reference","name":"Error.constructor","package":"typescript"}},{"name":"body","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"string"}},{"name":"status","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"number"}}],"extendedTypes":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Error"},"name":"Error","package":"typescript"}]},{"name":"ImmutableHeaders","variant":"declaration","kind":256,"comment":{"summary":[{"kind":"text","text":"An immutable version of the Fetch API's "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" object. It cannot be mutated or modified."}]},"extendedTypes":[{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/ImmutableRequest.ts","qualifiedName":"_ImmutableHeaders"},"name":"_ImmutableHeaders","package":"expo-server"}]},{"name":"ImmutableRequest","variant":"declaration","kind":256,"comment":{"summary":[{"kind":"text","text":"An immutable version of the Fetch API's "},{"kind":"code","text":"`Request`"},{"kind":"text","text":" object. It cannot be mutated or modified, its\nheaders are immutable, and you won't have access to the request body."}]},"children":[{"name":"duplex","variant":"declaration","kind":1024,"flags":{"isOptional":true,"isInherited":true},"type":{"type":"literal","value":"half"},"inheritedFrom":{"type":"reference","name":"_ImmutableRequest.duplex","package":"expo-server"}},{"name":"headers","variant":"declaration","kind":1024,"flags":{"isInherited":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/ImmutableRequest.ts","qualifiedName":"ImmutableHeaders"},"name":"ImmutableHeaders","package":"expo-server"},"inheritedFrom":{"type":"reference","name":"_ImmutableRequest.headers"}},{"name":"method","variant":"declaration","kind":1024,"flags":{"isReadonly":true},"comment":{"summary":[{"kind":"text","text":"The **"},{"kind":"code","text":"`method`"},{"kind":"text","text":"** read-only property of the Request interface contains the request's method (GET, POST, etc.)\n\n[MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/method)"}]},"type":{"type":"intrinsic","name":"string"},"overwrites":{"type":"reference","name":"_ImmutableRequest.method","package":"typescript"}},{"name":"url","variant":"declaration","kind":1024,"flags":{"isReadonly":true},"comment":{"summary":[{"kind":"text","text":"The **"},{"kind":"code","text":"`url`"},{"kind":"text","text":"** read-only property of the Request interface contains the URL of the request.\n\n[MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/url)"}]},"type":{"type":"intrinsic","name":"string"},"overwrites":{"type":"reference","name":"_ImmutableRequest.url","package":"typescript"}}],"extendedTypes":[{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/ImmutableRequest.ts","qualifiedName":"_ImmutableRequest"},"name":"_ImmutableRequest","package":"expo-server"}]},{"name":"MiddlewareMatcher","variant":"declaration","kind":256,"comment":{"summary":[{"kind":"text","text":"Middleware matcher settings that restricts the middleware to run conditionally."}]},"children":[{"name":"methods","variant":"declaration","kind":1024,"flags":{"isOptional":true},"comment":{"summary":[{"kind":"text","text":"Set this to a list of HTTP methods to conditionally run middleware on. By default, middleware will\nmatch all HTTP methods."}],"blockTags":[{"tag":"@example","name":"['POST', 'PUT', 'DELETE']","content":[]}]},"type":{"type":"array","elementType":{"type":"intrinsic","name":"string"}}},{"name":"patterns","variant":"declaration","kind":1024,"flags":{"isOptional":true},"comment":{"summary":[{"kind":"text","text":"Set this to a list of path patterns to conditionally run middleware on. This may be exact paths,\npaths containing parameter or catch-all segments ("},{"kind":"code","text":"`'/posts/[postId]'`"},{"kind":"text","text":" or "},{"kind":"code","text":"`'/blog/[...slug]'`"},{"kind":"text","text":"), or\nregular expressions matching paths."}],"blockTags":[{"tag":"@example","name":"['/api', '/posts/[id]', '/blog/[...slug]']","content":[]}]},"type":{"type":"array","elementType":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"RegExp"},"name":"RegExp","package":"typescript"}]}}}]},{"name":"MiddlewareSettings","variant":"declaration","kind":256,"comment":{"summary":[{"kind":"text","text":"Exported from a "},{"kind":"code","text":"`+middleware.ts`"},{"kind":"text","text":" file to configure the server-side middleware function."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport type { MiddlewareSettings } from 'expo-server';\n\nexport const unstable_settings: MiddlewareSettings = {\n matcher: {\n methods: ['GET'],\n patterns: ['/api', '/admin/[...path]'],\n },\n};\n```"}]},{"tag":"@see","content":[{"kind":"text","text":"https://docs.expo.dev/router/web/middleware/"}]}]},"children":[{"name":"matcher","variant":"declaration","kind":1024,"flags":{"isOptional":true},"comment":{"summary":[{"kind":"text","text":"Matcher definition that restricts the middleware to run conditionally."}]},"type":{"type":"reference","name":"MiddlewareMatcher","package":"expo-server"}}]},{"name":"GenerateMetadataFunction","variant":"declaration","kind":2097152,"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"request","variant":"param","kind":32768,"type":{"type":"reference","name":"ImmutableRequest","package":"expo-server"}},{"name":"params","variant":"param","kind":32768,"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}],"name":"Record","package":"typescript"}}],"type":{"type":"union","types":[{"type":"reference","name":"Metadata","package":"expo-server"},{"type":"literal","value":null},{"type":"intrinsic","name":"undefined"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"union","types":[{"type":"reference","name":"Metadata","package":"expo-server"},{"type":"literal","value":null},{"type":"intrinsic","name":"undefined"}]}],"name":"Promise","package":"typescript"}]}}]}}},{"name":"LoaderFunction","variant":"declaration","kind":2097152,"comment":{"summary":[{"kind":"text","text":"Function type for route loaders. Loaders are executed on the server during\nSSR/SSG to fetch data required by a route.\n\nDuring SSG (Static Site Generation), the "},{"kind":"code","text":"`request`"},{"kind":"text","text":" parameter will be "},{"kind":"code","text":"`undefined`"},{"kind":"text","text":"\nas there is no HTTP request at build time."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport type { LoaderFunction } from 'expo-server';\n\nexport const loader: LoaderFunction = async (request, params) => {\n const data = await fetchData(params.id);\n return { data };\n};\n```"}]},{"tag":"@see","content":[{"kind":"text","text":"[Data loaders](/router/web/data-loaders) for more information."}]}]},"typeParameters":[{"name":"T","variant":"typeParam","kind":131072,"default":{"type":"intrinsic","name":"any"}}],"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"request","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"An "},{"kind":"code","text":"`ImmutableRequest`"},{"kind":"text","text":" with read-only headers and no body access. In SSG, this is "},{"kind":"code","text":"`undefined`"}]},"type":{"type":"union","types":[{"type":"reference","name":"ImmutableRequest","package":"expo-server"},{"type":"intrinsic","name":"undefined"}]}},{"name":"params","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"Route parameters extracted from the URL path"}]},"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}],"name":"Record","package":"typescript"}}],"type":{"type":"union","types":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true}],"name":"Promise","package":"typescript"},{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true}]}}]}}},{"name":"Metadata","variant":"declaration","kind":2097152,"children":[{"name":"alternates","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAlternates"},"name":"MetadataAlternates","package":"expo-server"}},{"name":"appleWebApp","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAppleWebApp"},"name":"MetadataAppleWebApp","package":"expo-server"}},{"name":"applicationName","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"appLinks","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAppLinks"},"name":"MetadataAppLinks","package":"expo-server"}},{"name":"archives","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"array","elementType":{"type":"intrinsic","name":"string"}}},{"name":"assets","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"array","elementType":{"type":"intrinsic","name":"string"}}},{"name":"authors","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAuthor"},"name":"MetadataAuthor","package":"expo-server"},{"type":"array","elementType":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAuthor"},"name":"MetadataAuthor","package":"expo-server"}}]}},{"name":"bookmarks","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"array","elementType":{"type":"intrinsic","name":"string"}}},{"name":"category","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"creator","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"description","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"facebook","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataFacebook"},"name":"MetadataFacebook","package":"expo-server"}},{"name":"formatDetection","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataFormatDetection"},"name":"MetadataFormatDetection","package":"expo-server"}},{"name":"generator","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"icons","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataIcons"},"name":"MetadataIcons","package":"expo-server"}},{"name":"itunes","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataItunes"},"name":"MetadataItunes","package":"expo-server"}},{"name":"keywords","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}},{"name":"manifest","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"openGraph","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataOpenGraph"},"name":"MetadataOpenGraph","package":"expo-server"}},{"name":"other","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"reference","name":"MetadataValue","package":"expo-server"},{"type":"reference","name":"MetadataValueArray","package":"expo-server"},{"type":"literal","value":null},{"type":"intrinsic","name":"undefined"}]}],"name":"Record","package":"typescript"}},{"name":"pinterest","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataPinterest"},"name":"MetadataPinterest","package":"expo-server"}},{"name":"publisher","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"referrer","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"literal","value":"no-referrer"},{"type":"literal","value":"no-referrer-when-downgrade"},{"type":"literal","value":"origin"},{"type":"literal","value":"origin-when-cross-origin"},{"type":"literal","value":"same-origin"},{"type":"literal","value":"strict-origin"},{"type":"literal","value":"strict-origin-when-cross-origin"},{"type":"literal","value":"unsafe-url"}]}},{"name":"robots","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataRobots"},"name":"MetadataRobots","package":"expo-server"}},{"name":"title","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"twitter","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataTwitter"},"name":"MetadataTwitter","package":"expo-server"}},{"name":"verification","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataVerification"},"name":"MetadataVerification","package":"expo-server"}}]},{"name":"MetadataIconDescriptor","variant":"declaration","kind":2097152,"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"media","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"rel","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"sizes","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"type","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"url","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"string"}}]}}]}},{"name":"MetadataImage","variant":"declaration","kind":2097152,"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"alt","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"height","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}},{"name":"secureUrl","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"type","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"url","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"string"}},{"name":"width","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}}]}}]}},{"name":"MetadataValue","variant":"declaration","kind":2097152,"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"intrinsic","name":"number"},{"type":"intrinsic","name":"boolean"}]}},{"name":"MetadataValueArray","variant":"declaration","kind":2097152,"type":{"type":"array","elementType":{"type":"reference","name":"MetadataValue","package":"expo-server"}}},{"name":"MiddlewareFunction","variant":"declaration","kind":2097152,"comment":{"summary":[{"kind":"text","text":"Middleware function type. Middleware run for every request in your app, or on\nspecified conditionally matched methods and path patterns, as per "},{"kind":"inline-tag","tag":"@link","text":"MiddlewareMatcher"},{"kind":"text","text":"."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport type { MiddlewareFunction } from 'expo-server';\n\nconst middleware: MiddlewareFunction = async (request) => {\n console.log(`Middleware executed for: ${request.url}`);\n};\n\nexport default middleware;\n```"}]},{"tag":"@see","content":[{"kind":"text","text":"[Server middleware](https://docs.expo.dev/router/web/middleware/) for more information."}]}]},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"request","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"An "},{"kind":"code","text":"`ImmutableRequest`"},{"kind":"text","text":" with read-only headers and no body access"}]},"type":{"type":"reference","name":"ImmutableRequest","package":"expo-server"}}],"type":{"type":"union","types":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"union","types":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Response"},"name":"Response","package":"typescript"},{"type":"intrinsic","name":"void"}]}],"name":"Promise","package":"typescript"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Response"},"name":"Response","package":"typescript"},{"type":"intrinsic","name":"void"}]}}]}}},{"name":"deferTask","variant":"declaration","kind":64,"signatures":[{"name":"deferTask","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Defers a task until after a response has been sent.\n\nThis only calls the task function once the request handler has finished resolving a "},{"kind":"code","text":"`Response`"},{"kind":"text","text":"\nand keeps the request handler alive until the task is completed. This is useful to run non-critical\ntasks after the request handler, for example to log analytics datapoints. If the request handler\nrejects with an error, deferred tasks won't be executed."}]},"parameters":[{"name":"fn","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"A task function to execute after the request handler has finished."}]},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"type":{"type":"union","types":[{"type":"intrinsic","name":"void"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"intrinsic","name":"unknown"}],"name":"Promise","package":"typescript"}]}}]}}}],"type":{"type":"intrinsic","name":"void"}}]},{"name":"environment","variant":"declaration","kind":64,"signatures":[{"name":"environment","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Returns the request's environment, if the server runtime supports this.\n\nIn EAS Hosting, the returned environment name is the\n[alias or deployment identifier](https://docs.expo.dev/eas/hosting/deployments-and-aliases/),\nbut the value may differ for other providers."}],"blockTags":[{"tag":"@returns","content":[{"kind":"text","text":"A request environment name, or "},{"kind":"code","text":"`null`"},{"kind":"text","text":" for production."}]}]},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"literal","value":null}]}}]},{"name":"origin","variant":"declaration","kind":64,"signatures":[{"name":"origin","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Returns the current request's URL.\n\nThis typically returns the request's URL, or on certain platform,\nthe origin of the request. This does not use the "},{"kind":"code","text":"`Origin`"},{"kind":"text","text":" header\nin development as it may contain an untrusted value."}],"blockTags":[{"tag":"@returns","content":[{"kind":"text","text":"A request origin"}]}]},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"literal","value":null}]}}]},{"name":"requestHeaders","variant":"declaration","kind":64,"signatures":[{"name":"requestHeaders","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Returns an immutable copy of the current request's headers."}]},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/ImmutableRequest.ts","qualifiedName":"ImmutableHeaders"},"name":"ImmutableHeaders","package":"expo-server"}}]},{"name":"runTask","variant":"declaration","kind":64,"signatures":[{"name":"runTask","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Runs a task immediately and instructs the runtime to complete the task.\n\nA request handler may be terminated as soon as the client has finished the full "},{"kind":"code","text":"`Response`"},{"kind":"text","text":"\nand unhandled promise rejections may not be logged properly. To run tasks concurrently to\na request handler and keep the request alive until the task is completed, pass a task\nfunction to "},{"kind":"code","text":"`runTask`"},{"kind":"text","text":" instead. The request handler will be kept alive until the task\ncompletes."}]},"parameters":[{"name":"fn","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"A task function to execute. The request handler will be kept alive until this task finishes."}]},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"intrinsic","name":"unknown"}],"name":"Promise","package":"typescript"}}]}}}],"type":{"type":"intrinsic","name":"void"}}]},{"name":"setResponseHeaders","variant":"declaration","kind":64,"signatures":[{"name":"setResponseHeaders","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Sets headers on the "},{"kind":"code","text":"`Response`"},{"kind":"text","text":" the current request handler will return.\n\nThis only updates the headers once the request handler has finished and resolved a "},{"kind":"code","text":"`Response`"},{"kind":"text","text":".\nIt will either receive a set of "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" or an equivalent object containing headers, which will\nbe merged into the response's headers once it's returned."}]},"parameters":[{"name":"updateHeaders","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"A "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" object, a record of headers, or a function that receives "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" to be updated or can return a "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" object that will be merged into the response headers."}]},"type":{"type":"union","types":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Headers"},"name":"Headers","package":"typescript"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}],"name":"Record","package":"typescript"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"headers","variant":"param","kind":32768,"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Headers"},"name":"Headers","package":"typescript"}}],"type":{"type":"union","types":[{"type":"intrinsic","name":"void"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Headers"},"name":"Headers","package":"typescript"}]}}]}}]}}],"type":{"type":"intrinsic","name":"void"}}]}],"packageName":"expo-server"} \ No newline at end of file +{"schemaVersion":"2.0","name":"expo-server","variant":"project","kind":1,"children":[{"name":"StatusError","variant":"declaration","kind":128,"comment":{"summary":[{"kind":"text","text":"An error response representation which can be thrown anywhere in server-side code.\n\nA "},{"kind":"code","text":"`StatusError`"},{"kind":"text","text":" can be thrown by a request handler and will be caught by the "},{"kind":"code","text":"`expo-server`"},{"kind":"text","text":"\nruntime and replaced by a "},{"kind":"code","text":"`Response`"},{"kind":"text","text":" with the "},{"kind":"code","text":"`status`"},{"kind":"text","text":" and "},{"kind":"code","text":"`body`"},{"kind":"text","text":" that's been passed to\nthe "},{"kind":"code","text":"`StatusError`"},{"kind":"text","text":"."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport { StatusError } from 'expo-server';\n\nexport function GET(request, { postId }) {\n if (!postId) {\n throw new StatusError(400, 'postId parameter is required');\n }\n}\n```"}]}]},"children":[{"name":"constructor","variant":"declaration","kind":512,"signatures":[{"name":"StatusError","variant":"signature","kind":16384,"parameters":[{"name":"status","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}},{"name":"body","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Error"},"name":"Error","package":"typescript"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"error","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}}],"indexSignatures":[{"name":"__index","variant":"signature","kind":8192,"parameters":[{"name":"key","variant":"param","kind":32768,"type":{"type":"intrinsic","name":"string"}}],"type":{"type":"intrinsic","name":"any"}}]}}]}}],"type":{"type":"reference","name":"StatusError","package":"expo-server"},"overwrites":{"type":"reference","name":"Error.constructor","package":"typescript"}},{"name":"StatusError","variant":"signature","kind":16384,"parameters":[{"name":"status","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}},{"name":"errorOptions","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"cause","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"unknown"}},{"name":"error","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}}]}}}],"type":{"type":"reference","name":"StatusError","package":"expo-server"},"overwrites":{"type":"reference","name":"Error.constructor","package":"typescript"}},{"name":"StatusError","variant":"signature","kind":16384,"parameters":[{"name":"status","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}},{"name":"body","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Error"},"name":"Error","package":"typescript"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"error","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}}],"indexSignatures":[{"name":"__index","variant":"signature","kind":8192,"parameters":[{"name":"key","variant":"param","kind":32768,"type":{"type":"intrinsic","name":"string"}}],"type":{"type":"intrinsic","name":"any"}}]}}]}},{"name":"errorOptions","variant":"param","kind":32768,"flags":{"isOptional":true},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"cause","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"unknown"}}]}}}],"type":{"type":"reference","name":"StatusError","package":"expo-server"},"overwrites":{"type":"reference","name":"Error.constructor","package":"typescript"}}],"overwrites":{"type":"reference","name":"Error.constructor","package":"typescript"}},{"name":"body","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"string"}},{"name":"status","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"number"}}],"extendedTypes":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Error"},"name":"Error","package":"typescript"}]},{"name":"ImmutableHeaders","variant":"declaration","kind":256,"comment":{"summary":[{"kind":"text","text":"An immutable version of the Fetch API's "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" object. It cannot be mutated or modified."}]},"extendedTypes":[{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/ImmutableRequest.ts","qualifiedName":"_ImmutableHeaders"},"name":"_ImmutableHeaders","package":"expo-server"}]},{"name":"ImmutableRequest","variant":"declaration","kind":256,"comment":{"summary":[{"kind":"text","text":"An immutable version of the Fetch API's "},{"kind":"code","text":"`Request`"},{"kind":"text","text":" object. It cannot be mutated or modified, its\nheaders are immutable, and you won't have access to the request body."}]},"children":[{"name":"duplex","variant":"declaration","kind":1024,"flags":{"isOptional":true,"isInherited":true},"type":{"type":"literal","value":"half"},"inheritedFrom":{"type":"reference","name":"_ImmutableRequest.duplex","package":"expo-server"}},{"name":"headers","variant":"declaration","kind":1024,"flags":{"isInherited":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/ImmutableRequest.ts","qualifiedName":"ImmutableHeaders"},"name":"ImmutableHeaders","package":"expo-server"},"inheritedFrom":{"type":"reference","name":"_ImmutableRequest.headers"}},{"name":"method","variant":"declaration","kind":1024,"flags":{"isReadonly":true},"comment":{"summary":[{"kind":"text","text":"The **"},{"kind":"code","text":"`method`"},{"kind":"text","text":"** read-only property of the Request interface contains the request's method (GET, POST, etc.)\n\n[MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/method)"}]},"type":{"type":"intrinsic","name":"string"},"overwrites":{"type":"reference","name":"_ImmutableRequest.method","package":"typescript"}},{"name":"url","variant":"declaration","kind":1024,"flags":{"isReadonly":true},"comment":{"summary":[{"kind":"text","text":"The **"},{"kind":"code","text":"`url`"},{"kind":"text","text":"** read-only property of the Request interface contains the URL of the request.\n\n[MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/url)"}]},"type":{"type":"intrinsic","name":"string"},"overwrites":{"type":"reference","name":"_ImmutableRequest.url","package":"typescript"}}],"extendedTypes":[{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/ImmutableRequest.ts","qualifiedName":"_ImmutableRequest"},"name":"_ImmutableRequest","package":"expo-server"}]},{"name":"MiddlewareMatcher","variant":"declaration","kind":256,"comment":{"summary":[{"kind":"text","text":"Middleware matcher settings that restricts the middleware to run conditionally."}]},"children":[{"name":"methods","variant":"declaration","kind":1024,"flags":{"isOptional":true},"comment":{"summary":[{"kind":"text","text":"Set this to a list of HTTP methods to conditionally run middleware on. By default, middleware will\nmatch all HTTP methods."}],"blockTags":[{"tag":"@example","name":"['POST', 'PUT', 'DELETE']","content":[]}]},"type":{"type":"array","elementType":{"type":"intrinsic","name":"string"}}},{"name":"patterns","variant":"declaration","kind":1024,"flags":{"isOptional":true},"comment":{"summary":[{"kind":"text","text":"Set this to a list of path patterns to conditionally run middleware on. This may be exact paths,\npaths containing parameter or catch-all segments ("},{"kind":"code","text":"`'/posts/[postId]'`"},{"kind":"text","text":" or "},{"kind":"code","text":"`'/blog/[...slug]'`"},{"kind":"text","text":"), or\nregular expressions matching paths."}],"blockTags":[{"tag":"@example","name":"['/api', '/posts/[id]', '/blog/[...slug]']","content":[]}]},"type":{"type":"array","elementType":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"RegExp"},"name":"RegExp","package":"typescript"}]}}}]},{"name":"MiddlewareSettings","variant":"declaration","kind":256,"comment":{"summary":[{"kind":"text","text":"Exported from a "},{"kind":"code","text":"`+middleware.ts`"},{"kind":"text","text":" file to configure the server-side middleware function."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport type { MiddlewareSettings } from 'expo-server';\n\nexport const unstable_settings: MiddlewareSettings = {\n matcher: {\n methods: ['GET'],\n patterns: ['/api', '/admin/[...path]'],\n },\n};\n```"}]},{"tag":"@see","content":[{"kind":"text","text":"https://docs.expo.dev/router/web/middleware/"}]}]},"children":[{"name":"matcher","variant":"declaration","kind":1024,"flags":{"isOptional":true},"comment":{"summary":[{"kind":"text","text":"Matcher definition that restricts the middleware to run conditionally."}]},"type":{"type":"reference","name":"MiddlewareMatcher","package":"expo-server"}}]},{"name":"GenerateMetadataFunction","variant":"declaration","kind":2097152,"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"request","variant":"param","kind":32768,"type":{"type":"reference","name":"ImmutableRequest","package":"expo-server"}},{"name":"params","variant":"param","kind":32768,"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}],"name":"Record","package":"typescript"}}],"type":{"type":"union","types":[{"type":"reference","name":"Metadata","package":"expo-server"},{"type":"literal","value":null},{"type":"intrinsic","name":"undefined"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"union","types":[{"type":"reference","name":"Metadata","package":"expo-server"},{"type":"literal","value":null},{"type":"intrinsic","name":"undefined"}]}],"name":"Promise","package":"typescript"}]}}]}}},{"name":"LoaderFunction","variant":"declaration","kind":2097152,"comment":{"summary":[{"kind":"text","text":"Function type for route loaders. Loaders are executed on the server during\nSSR/SSG to fetch data required by a route.\n\nDuring SSG (Static Site Generation), the "},{"kind":"code","text":"`request`"},{"kind":"text","text":" parameter will be "},{"kind":"code","text":"`undefined`"},{"kind":"text","text":"\nas there is no HTTP request at build time."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport type { LoaderFunction } from 'expo-server';\n\nexport const loader: LoaderFunction = async (request, params) => {\n const data = await fetchData(params.id);\n return { data };\n};\n```"}]},{"tag":"@see","content":[{"kind":"text","text":"[Data loaders](/router/web/data-loaders) for more information."}]}]},"typeParameters":[{"name":"T","variant":"typeParam","kind":131072,"default":{"type":"intrinsic","name":"any"}}],"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"request","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"An "},{"kind":"code","text":"`ImmutableRequest`"},{"kind":"text","text":" with read-only headers and no body access. In SSG, this is "},{"kind":"code","text":"`undefined`"}]},"type":{"type":"union","types":[{"type":"reference","name":"ImmutableRequest","package":"expo-server"},{"type":"intrinsic","name":"undefined"}]}},{"name":"params","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"Route parameters extracted from the URL path"}]},"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}],"name":"Record","package":"typescript"}}],"type":{"type":"union","types":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true}],"name":"Promise","package":"typescript"},{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true}]}}]}}},{"name":"Metadata","variant":"declaration","kind":2097152,"children":[{"name":"alternates","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAlternates"},"name":"MetadataAlternates","package":"expo-server"}},{"name":"appleWebApp","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAppleWebApp"},"name":"MetadataAppleWebApp","package":"expo-server"}},{"name":"applicationName","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"appLinks","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAppLinks"},"name":"MetadataAppLinks","package":"expo-server"}},{"name":"archives","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"array","elementType":{"type":"intrinsic","name":"string"}}},{"name":"assets","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"array","elementType":{"type":"intrinsic","name":"string"}}},{"name":"authors","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAuthor"},"name":"MetadataAuthor","package":"expo-server"},{"type":"array","elementType":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataAuthor"},"name":"MetadataAuthor","package":"expo-server"}}]}},{"name":"bookmarks","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"array","elementType":{"type":"intrinsic","name":"string"}}},{"name":"category","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"creator","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"description","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"facebook","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataFacebook"},"name":"MetadataFacebook","package":"expo-server"}},{"name":"formatDetection","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataFormatDetection"},"name":"MetadataFormatDetection","package":"expo-server"}},{"name":"generator","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"icons","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataIcons"},"name":"MetadataIcons","package":"expo-server"}},{"name":"itunes","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataItunes"},"name":"MetadataItunes","package":"expo-server"}},{"name":"keywords","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}},{"name":"manifest","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"openGraph","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataOpenGraph"},"name":"MetadataOpenGraph","package":"expo-server"}},{"name":"other","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"reference","name":"MetadataValue","package":"expo-server"},{"type":"reference","name":"MetadataValueArray","package":"expo-server"},{"type":"literal","value":null},{"type":"intrinsic","name":"undefined"}]}],"name":"Record","package":"typescript"}},{"name":"pinterest","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataPinterest"},"name":"MetadataPinterest","package":"expo-server"}},{"name":"publisher","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"referrer","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"union","types":[{"type":"literal","value":"no-referrer"},{"type":"literal","value":"no-referrer-when-downgrade"},{"type":"literal","value":"origin"},{"type":"literal","value":"origin-when-cross-origin"},{"type":"literal","value":"same-origin"},{"type":"literal","value":"strict-origin"},{"type":"literal","value":"strict-origin-when-cross-origin"},{"type":"literal","value":"unsafe-url"}]}},{"name":"robots","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataRobots"},"name":"MetadataRobots","package":"expo-server"}},{"name":"title","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"twitter","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataTwitter"},"name":"MetadataTwitter","package":"expo-server"}},{"name":"verification","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/metadata.ts","qualifiedName":"MetadataVerification"},"name":"MetadataVerification","package":"expo-server"}}]},{"name":"MetadataIconDescriptor","variant":"declaration","kind":2097152,"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"media","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"rel","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"sizes","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"type","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"url","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"string"}}]}}]}},{"name":"MetadataImage","variant":"declaration","kind":2097152,"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"children":[{"name":"alt","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"height","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}},{"name":"secureUrl","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"type","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"string"}},{"name":"url","variant":"declaration","kind":1024,"type":{"type":"intrinsic","name":"string"}},{"name":"width","variant":"declaration","kind":1024,"flags":{"isOptional":true},"type":{"type":"intrinsic","name":"number"}}]}}]}},{"name":"MetadataValue","variant":"declaration","kind":2097152,"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"intrinsic","name":"number"},{"type":"intrinsic","name":"boolean"}]}},{"name":"MetadataValueArray","variant":"declaration","kind":2097152,"type":{"type":"array","elementType":{"type":"reference","name":"MetadataValue","package":"expo-server"}}},{"name":"MiddlewareFunction","variant":"declaration","kind":2097152,"comment":{"summary":[{"kind":"text","text":"Middleware function type. Middleware run for every request in your app, or on\nspecified conditionally matched methods and path patterns, as per "},{"kind":"inline-tag","tag":"@link","text":"MiddlewareMatcher"},{"kind":"text","text":"."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport type { MiddlewareFunction } from 'expo-server';\n\nconst middleware: MiddlewareFunction = async (request) => {\n console.log(`Middleware executed for: ${request.url}`);\n};\n\nexport default middleware;\n```"}]},{"tag":"@see","content":[{"kind":"text","text":"[Server middleware](https://docs.expo.dev/router/web/middleware/) for more information."}]}]},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"request","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"An "},{"kind":"code","text":"`ImmutableRequest`"},{"kind":"text","text":" with read-only headers and no body access"}]},"type":{"type":"reference","name":"ImmutableRequest","package":"expo-server"}}],"type":{"type":"union","types":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"union","types":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Response"},"name":"Response","package":"typescript"},{"type":"intrinsic","name":"void"}]}],"name":"Promise","package":"typescript"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Response"},"name":"Response","package":"typescript"},{"type":"intrinsic","name":"void"}]}}]}}},{"name":"createServerLoader","variant":"declaration","kind":64,"signatures":[{"name":"createServerLoader","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Creates a loader function for routes that need access to the incoming HTTP request.\nServer loaders run on every request during SSR. If called during SSG where no request is\navailable, this throws an error."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport { createServerLoader } from 'expo-server';\n\nexport const loader = createServerLoader(async (request, params) => {\n const authHeader = request.headers.get('Authorization');\n return { authenticated: !!authHeader };\n});\n```"}]},{"tag":"@see","content":[{"kind":"text","text":"[Data loaders](/router/web/data-loaders) for more information."}]}]},"typeParameters":[{"name":"T","variant":"typeParam","kind":131072}],"parameters":[{"name":"fn","variant":"param","kind":32768,"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"request","variant":"param","kind":32768,"type":{"type":"reference","name":"ImmutableRequest","package":"expo-server"}},{"name":"params","variant":"param","kind":32768,"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}],"name":"Record","package":"typescript"}}],"type":{"type":"union","types":[{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true}],"name":"Promise","package":"typescript"}]}}]}}}],"type":{"type":"reference","typeArguments":[{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true}],"name":"LoaderFunction","package":"expo-server"}}]},{"name":"createStaticLoader","variant":"declaration","kind":64,"signatures":[{"name":"createStaticLoader","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Creates a loader function for routes that only need route parameters to load data.\nThe callback receives no request object, making it safe to use for both SSG and SSR."}],"blockTags":[{"tag":"@example","content":[{"kind":"code","text":"```ts\nimport { createStaticLoader } from 'expo-server';\n\nexport const loader = createStaticLoader(async (params) => {\n const post = await fetchPost(params.id);\n return { post };\n});\n```"}]},{"tag":"@see","content":[{"kind":"text","text":"[Data loaders](/router/web/data-loaders) for more information."}]}]},"typeParameters":[{"name":"T","variant":"typeParam","kind":131072}],"parameters":[{"name":"fn","variant":"param","kind":32768,"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"params","variant":"param","kind":32768,"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}],"name":"Record","package":"typescript"}}],"type":{"type":"union","types":[{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true}],"name":"Promise","package":"typescript"}]}}]}}}],"type":{"type":"reference","typeArguments":[{"type":"reference","name":"T","package":"expo-server","refersToTypeParameter":true}],"name":"LoaderFunction","package":"expo-server"}}]},{"name":"deferTask","variant":"declaration","kind":64,"signatures":[{"name":"deferTask","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Defers a task until after a response has been sent.\n\nThis only calls the task function once the request handler has finished resolving a "},{"kind":"code","text":"`Response`"},{"kind":"text","text":"\nand keeps the request handler alive until the task is completed. This is useful to run non-critical\ntasks after the request handler, for example to log analytics datapoints. If the request handler\nrejects with an error, deferred tasks won't be executed."}]},"parameters":[{"name":"fn","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"A task function to execute after the request handler has finished."}]},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"type":{"type":"union","types":[{"type":"intrinsic","name":"void"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"intrinsic","name":"unknown"}],"name":"Promise","package":"typescript"}]}}]}}}],"type":{"type":"intrinsic","name":"void"}}]},{"name":"environment","variant":"declaration","kind":64,"signatures":[{"name":"environment","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Returns the request's environment, if the server runtime supports this.\n\nIn EAS Hosting, the returned environment name is the\n[alias or deployment identifier](https://docs.expo.dev/eas/hosting/deployments-and-aliases/),\nbut the value may differ for other providers."}],"blockTags":[{"tag":"@returns","content":[{"kind":"text","text":"A request environment name, or "},{"kind":"code","text":"`null`"},{"kind":"text","text":" for production."}]}]},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"literal","value":null}]}}]},{"name":"origin","variant":"declaration","kind":64,"signatures":[{"name":"origin","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Returns the current request's URL.\n\nThis typically returns the request's URL, or on certain platform,\nthe origin of the request. This does not use the "},{"kind":"code","text":"`Origin`"},{"kind":"text","text":" header\nin development as it may contain an untrusted value."}],"blockTags":[{"tag":"@returns","content":[{"kind":"text","text":"A request origin"}]}]},"type":{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"literal","value":null}]}}]},{"name":"requestHeaders","variant":"declaration","kind":64,"signatures":[{"name":"requestHeaders","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Returns an immutable copy of the current request's headers."}]},"type":{"type":"reference","target":{"packageName":"expo-server","packagePath":"src/ImmutableRequest.ts","qualifiedName":"ImmutableHeaders"},"name":"ImmutableHeaders","package":"expo-server"}}]},{"name":"runTask","variant":"declaration","kind":64,"signatures":[{"name":"runTask","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Runs a task immediately and instructs the runtime to complete the task.\n\nA request handler may be terminated as soon as the client has finished the full "},{"kind":"code","text":"`Response`"},{"kind":"text","text":"\nand unhandled promise rejections may not be logged properly. To run tasks concurrently to\na request handler and keep the request alive until the task is completed, pass a task\nfunction to "},{"kind":"code","text":"`runTask`"},{"kind":"text","text":" instead. The request handler will be kept alive until the task\ncompletes."}]},"parameters":[{"name":"fn","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"A task function to execute. The request handler will be kept alive until this task finishes."}]},"type":{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Promise"},"typeArguments":[{"type":"intrinsic","name":"unknown"}],"name":"Promise","package":"typescript"}}]}}}],"type":{"type":"intrinsic","name":"void"}}]},{"name":"setResponseHeaders","variant":"declaration","kind":64,"signatures":[{"name":"setResponseHeaders","variant":"signature","kind":4096,"comment":{"summary":[{"kind":"text","text":"Sets headers on the "},{"kind":"code","text":"`Response`"},{"kind":"text","text":" the current request handler will return.\n\nThis only updates the headers once the request handler has finished and resolved a "},{"kind":"code","text":"`Response`"},{"kind":"text","text":".\nIt will either receive a set of "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" or an equivalent object containing headers, which will\nbe merged into the response's headers once it's returned."}]},"parameters":[{"name":"updateHeaders","variant":"param","kind":32768,"comment":{"summary":[{"kind":"text","text":"A "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" object, a record of headers, or a function that receives "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" to be updated or can return a "},{"kind":"code","text":"`Headers`"},{"kind":"text","text":" object that will be merged into the response headers."}]},"type":{"type":"union","types":[{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Headers"},"name":"Headers","package":"typescript"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.es5.d.ts","qualifiedName":"Record"},"typeArguments":[{"type":"intrinsic","name":"string"},{"type":"union","types":[{"type":"intrinsic","name":"string"},{"type":"array","elementType":{"type":"intrinsic","name":"string"}}]}],"name":"Record","package":"typescript"},{"type":"reflection","declaration":{"name":"__type","variant":"declaration","kind":65536,"signatures":[{"name":"__type","variant":"signature","kind":4096,"parameters":[{"name":"headers","variant":"param","kind":32768,"type":{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Headers"},"name":"Headers","package":"typescript"}}],"type":{"type":"union","types":[{"type":"intrinsic","name":"void"},{"type":"reference","target":{"packageName":"typescript","packagePath":"lib/lib.dom.d.ts","qualifiedName":"Headers"},"name":"Headers","package":"typescript"}]}}]}}]}}],"type":{"type":"intrinsic","name":"void"}}]}],"packageName":"expo-server"} \ No newline at end of file diff --git a/packages/@expo/cli/CHANGELOG.md b/packages/@expo/cli/CHANGELOG.md index 60a3eee75f911e..92646c910f25ea 100644 --- a/packages/@expo/cli/CHANGELOG.md +++ b/packages/@expo/cli/CHANGELOG.md @@ -19,6 +19,7 @@ - Add support for metadata in streaming SSR ([#44731](https://github.com/expo/expo/pull/44731) by [@hassankhan](https://github.com/hassankhan)) - Support streaming SSR in development ([#45362](https://github.com/expo/expo/pull/45362) by [@hassankhan](https://github.com/hassankhan)) - Add `experiments.onDemandFilesystem` (enabled by default) to allow files in the server root outside of `watchFolders` to be accessed and crawled lazily ([#45391](https://github.com/expo/expo/pull/45391) by [@kitten](https://github.com/kitten)) +- Add `createStaticLoader()` and `createServerLoader()` helpers ([#45401](https://github.com/expo/expo/pull/45401) by [@hassankhan](https://github.com/hassankhan)) ### 🐛 Bug fixes diff --git a/packages/@expo/cli/e2e/__tests__/export/server-loader.test.ts b/packages/@expo/cli/e2e/__tests__/export/server-loader.test.ts index 61be42398defa2..c6aaab8e2b2b25 100644 --- a/packages/@expo/cli/e2e/__tests__/export/server-loader.test.ts +++ b/packages/@expo/cli/e2e/__tests__/export/server-loader.test.ts @@ -30,6 +30,7 @@ describe.each( env: { TEST_SECRET_RUNTIME_KEY: 'runtime-secret-value', TEST_THROW_ERROR: 'true', + E2E_ROUTER_SERVER_RENDERING: 'true', }, }, }) @@ -69,6 +70,8 @@ describe.each( expect(files).toContain('_expo/loaders/nullish/[value].js'); expect(files).toContain('_expo/loaders/posts/[postId].js'); expect(files).toContain('_expo/loaders/(group)/index.js'); + expect(files).toContain('_expo/loaders/static-helper.js'); + expect(files).toContain('_expo/loaders/server-helper.js'); }); (server.isExpoStart ? it.skip : it)('routes.json has loader paths', async () => { @@ -282,4 +285,17 @@ describe.each( ); expect(html.querySelector('meta[name="author"]')?.getAttribute('content')).toBe('Expo'); }); + + it.each(getPageAndLoaderData('/server-helper'))( + 'can access data from `createServerLoader()` for $url ($name)', + async ({ getData, url }) => { + const response = await server.fetchAsync(url); + expect(response.status).toBe(200); + const data = await getData(response); + + expect(data.source).toBe('server-helper'); + expect(new URL(data.url).pathname).toBe('/server-helper'); + expect(data.method).toBe('GET'); + } + ); }); diff --git a/packages/@expo/cli/e2e/__tests__/export/static-loader.test.ts b/packages/@expo/cli/e2e/__tests__/export/static-loader.test.ts index d8c32825848865..addecfa51d3258 100644 --- a/packages/@expo/cli/e2e/__tests__/export/static-loader.test.ts +++ b/packages/@expo/cli/e2e/__tests__/export/static-loader.test.ts @@ -55,6 +55,8 @@ describe.each( expect(files).toContain('posts/[postId].html'); expect(files).toContain('posts/static-post-1.html'); expect(files).toContain('posts/static-post-2.html'); + expect(files).toContain('static-helper.html'); + expect(files).toContain('server-helper.html'); // Loader outputs are pre-generated JSON files expect(files).toContain('_expo/loaders/index'); @@ -71,6 +73,7 @@ describe.each( expect(files).toContain('_expo/loaders/posts/static-post-1'); expect(files).toContain('_expo/loaders/posts/static-post-2'); expect(files).toContain('_expo/loaders/(group)/index'); + expect(files).toContain('_expo/loaders/static-helper'); }); it('returns 404 for loader endpoint when route has no loader', async () => { @@ -232,4 +235,15 @@ describe.each( ); expect(html.querySelector('meta[name="author"]')?.getAttribute('content')).toBe('Expo'); }); + + it.each(getPageAndLoaderData('/static-helper'))( + 'can access data from `createStaticLoader()` for $url ($name)', + async ({ getData, url }) => { + const response = await server.fetchAsync(url); + expect(response.status).toBe(200); + const data = await getData(response); + + expect(data).toEqual({ source: 'static-helper' }); + } + ); }); diff --git a/packages/expo-router/CHANGELOG.md b/packages/expo-router/CHANGELOG.md index e9d3a1f2505d9c..39fcd1eeefb787 100644 --- a/packages/expo-router/CHANGELOG.md +++ b/packages/expo-router/CHANGELOG.md @@ -23,6 +23,7 @@ - [web] Use stream rendering in SSR ([#43963](https://github.com/expo/expo/pull/43963) by [@hassankhan](https://github.com/hassankhan)) - [web] Add support for metadata in streaming SSR ([#44731](https://github.com/expo/expo/pull/44731) by [@hassankhan](https://github.com/hassankhan)) - [web] Support streaming SSR in development ([#45362](https://github.com/expo/expo/pull/45362) by [@hassankhan](https://github.com/hassankhan)) +- [web] Add `createStaticLoader()` and `createServerLoader()` helpers ([#45401](https://github.com/expo/expo/pull/45401) by [@hassankhan](https://github.com/hassankhan)) ### 🐛 Bug fixes diff --git a/packages/expo-router/server.d.ts b/packages/expo-router/server.d.ts index 57d211ccb76a9a..652898e9577d18 100644 --- a/packages/expo-router/server.d.ts +++ b/packages/expo-router/server.d.ts @@ -6,6 +6,8 @@ export type { MiddlewareFunction, } from 'expo-server'; +export { createStaticLoader, createServerLoader } from 'expo-server'; + export type RequestHandler = ( request: Request, params: Record diff --git a/packages/expo-router/server.js b/packages/expo-router/server.js index 7436537e333d8b..36f2b77503fce8 100644 --- a/packages/expo-router/server.js +++ b/packages/expo-router/server.js @@ -1 +1 @@ -// Use `expo-server` directly instead +export { createStaticLoader, createServerLoader } from 'expo-server'; diff --git a/packages/expo-server/CHANGELOG.md b/packages/expo-server/CHANGELOG.md index 56ed4e8d62fcb7..4693713cd7ac15 100644 --- a/packages/expo-server/CHANGELOG.md +++ b/packages/expo-server/CHANGELOG.md @@ -8,6 +8,7 @@ - Use stream rendering in SSR ([#43963](https://github.com/expo/expo/pull/43963) by [@hassankhan](https://github.com/hassankhan)) - Add support for metadata in streaming SSR ([#44731](https://github.com/expo/expo/pull/44731) by [@hassankhan](https://github.com/hassankhan)) +- Add `createStaticLoader()` and `createServerLoader()` helpers ([#45401](https://github.com/expo/expo/pull/45401) by [@hassankhan](https://github.com/hassankhan)) ### 🐛 Bug fixes diff --git a/packages/expo-server/build/cjs/index.d.ts b/packages/expo-server/build/cjs/index.d.ts index 0354360191d004..5615d1ffb400c1 100644 --- a/packages/expo-server/build/cjs/index.d.ts +++ b/packages/expo-server/build/cjs/index.d.ts @@ -1,2 +1,3 @@ export * from './runtime/api'; +export { createStaticLoader, createServerLoader } from './loaders/helpers'; export type * from './types'; diff --git a/packages/expo-server/build/cjs/index.js b/packages/expo-server/build/cjs/index.js index 97da55c07645ae..e16d4263f9641d 100644 --- a/packages/expo-server/build/cjs/index.js +++ b/packages/expo-server/build/cjs/index.js @@ -14,5 +14,9 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", { value: true }); +exports.createServerLoader = exports.createStaticLoader = void 0; __exportStar(require("./runtime/api"), exports); +var helpers_1 = require("./loaders/helpers"); +Object.defineProperty(exports, "createStaticLoader", { enumerable: true, get: function () { return helpers_1.createStaticLoader; } }); +Object.defineProperty(exports, "createServerLoader", { enumerable: true, get: function () { return helpers_1.createServerLoader; } }); //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/packages/expo-server/build/cjs/index.js.map b/packages/expo-server/build/cjs/index.js.map index 2273a6ecd1eb31..e9d13e504c7eaa 100644 --- a/packages/expo-server/build/cjs/index.js.map +++ b/packages/expo-server/build/cjs/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,gDAA8B"} \ No newline at end of file +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;AAAA,gDAA8B;AAC9B,6CAA2E;AAAlE,6GAAA,kBAAkB,OAAA;AAAE,6GAAA,kBAAkB,OAAA"} \ No newline at end of file diff --git a/packages/expo-server/build/cjs/loaders/helpers.d.ts b/packages/expo-server/build/cjs/loaders/helpers.d.ts new file mode 100644 index 00000000000000..1c93abfa17e4a1 --- /dev/null +++ b/packages/expo-server/build/cjs/loaders/helpers.d.ts @@ -0,0 +1,34 @@ +import type { ImmutableRequest, LoaderFunction } from '../types'; +/** + * Creates a loader function for routes that only need route parameters to load data. + * The callback receives no request object, making it safe to use for both SSG and SSR. + * + * @example + * ```ts + * import { createStaticLoader } from 'expo-server'; + * + * export const loader = createStaticLoader(async (params) => { + * const post = await fetchPost(params.id); + * return { post }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +export declare function createStaticLoader(fn: (params: Record) => Promise | T): LoaderFunction; +/** + * Creates a loader function for routes that need access to the incoming HTTP request. + * Server loaders run on every request during SSR. If called during SSG where no request is + * available, this throws an error. + * + * @example + * ```ts + * import { createServerLoader } from 'expo-server'; + * + * export const loader = createServerLoader(async (request, params) => { + * const authHeader = request.headers.get('Authorization'); + * return { authenticated: !!authHeader }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +export declare function createServerLoader(fn: (request: ImmutableRequest, params: Record) => Promise | T): LoaderFunction; diff --git a/packages/expo-server/build/cjs/loaders/helpers.js b/packages/expo-server/build/cjs/loaders/helpers.js new file mode 100644 index 00000000000000..5dd28266fcf34b --- /dev/null +++ b/packages/expo-server/build/cjs/loaders/helpers.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createStaticLoader = createStaticLoader; +exports.createServerLoader = createServerLoader; +/** + * Creates a loader function for routes that only need route parameters to load data. + * The callback receives no request object, making it safe to use for both SSG and SSR. + * + * @example + * ```ts + * import { createStaticLoader } from 'expo-server'; + * + * export const loader = createStaticLoader(async (params) => { + * const post = await fetchPost(params.id); + * return { post }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +function createStaticLoader(fn) { + return (_request, params) => fn(params); +} +/** + * Creates a loader function for routes that need access to the incoming HTTP request. + * Server loaders run on every request during SSR. If called during SSG where no request is + * available, this throws an error. + * + * @example + * ```ts + * import { createServerLoader } from 'expo-server'; + * + * export const loader = createServerLoader(async (request, params) => { + * const authHeader = request.headers.get('Authorization'); + * return { authenticated: !!authHeader }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +function createServerLoader(fn) { + return (request, params) => { + if (!request) { + throw new Error('Server loader was called without a request. Server loaders require SSR and cannot be ' + + 'used during static site generation (SSG). To create a loader that works with SSG, use ' + + 'createStaticLoader instead.'); + } + return fn(request, params); + }; +} +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/packages/expo-server/build/cjs/loaders/helpers.js.map b/packages/expo-server/build/cjs/loaders/helpers.js.map new file mode 100644 index 00000000000000..874997f7f07142 --- /dev/null +++ b/packages/expo-server/build/cjs/loaders/helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../../../src/loaders/helpers.ts"],"names":[],"mappings":";;AAiBA,gDAIC;AAkBD,gDAaC;AAlDD;;;;;;;;;;;;;;GAcG;AACH,SAAgB,kBAAkB,CAChC,EAAiE;IAEjE,OAAO,CAAC,QAAQ,EAAE,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC;AAC1C,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,kBAAkB,CAChC,EAA4F;IAE5F,OAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACzB,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,MAAM,IAAI,KAAK,CACb,uFAAuF;gBACrF,wFAAwF;gBACxF,6BAA6B,CAChC,CAAC;QACJ,CAAC;QACD,OAAO,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC7B,CAAC,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/packages/expo-server/build/mjs/index.d.ts b/packages/expo-server/build/mjs/index.d.ts index 0354360191d004..5615d1ffb400c1 100644 --- a/packages/expo-server/build/mjs/index.d.ts +++ b/packages/expo-server/build/mjs/index.d.ts @@ -1,2 +1,3 @@ export * from './runtime/api'; +export { createStaticLoader, createServerLoader } from './loaders/helpers'; export type * from './types'; diff --git a/packages/expo-server/build/mjs/index.js b/packages/expo-server/build/mjs/index.js index e7a605bc9d9ecf..6a92f1bfe3a18a 100644 --- a/packages/expo-server/build/mjs/index.js +++ b/packages/expo-server/build/mjs/index.js @@ -1,2 +1,3 @@ export * from './runtime/api'; +export { createStaticLoader, createServerLoader } from './loaders/helpers'; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/packages/expo-server/build/mjs/index.js.map b/packages/expo-server/build/mjs/index.js.map index e3098d9a794376..21c38641af96b4 100644 --- a/packages/expo-server/build/mjs/index.js.map +++ b/packages/expo-server/build/mjs/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC"} \ No newline at end of file +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/packages/expo-server/build/mjs/loaders/helpers.d.ts b/packages/expo-server/build/mjs/loaders/helpers.d.ts new file mode 100644 index 00000000000000..1c93abfa17e4a1 --- /dev/null +++ b/packages/expo-server/build/mjs/loaders/helpers.d.ts @@ -0,0 +1,34 @@ +import type { ImmutableRequest, LoaderFunction } from '../types'; +/** + * Creates a loader function for routes that only need route parameters to load data. + * The callback receives no request object, making it safe to use for both SSG and SSR. + * + * @example + * ```ts + * import { createStaticLoader } from 'expo-server'; + * + * export const loader = createStaticLoader(async (params) => { + * const post = await fetchPost(params.id); + * return { post }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +export declare function createStaticLoader(fn: (params: Record) => Promise | T): LoaderFunction; +/** + * Creates a loader function for routes that need access to the incoming HTTP request. + * Server loaders run on every request during SSR. If called during SSG where no request is + * available, this throws an error. + * + * @example + * ```ts + * import { createServerLoader } from 'expo-server'; + * + * export const loader = createServerLoader(async (request, params) => { + * const authHeader = request.headers.get('Authorization'); + * return { authenticated: !!authHeader }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +export declare function createServerLoader(fn: (request: ImmutableRequest, params: Record) => Promise | T): LoaderFunction; diff --git a/packages/expo-server/build/mjs/loaders/helpers.js b/packages/expo-server/build/mjs/loaders/helpers.js new file mode 100644 index 00000000000000..ab6ac9e92cb47d --- /dev/null +++ b/packages/expo-server/build/mjs/loaders/helpers.js @@ -0,0 +1,45 @@ +/** + * Creates a loader function for routes that only need route parameters to load data. + * The callback receives no request object, making it safe to use for both SSG and SSR. + * + * @example + * ```ts + * import { createStaticLoader } from 'expo-server'; + * + * export const loader = createStaticLoader(async (params) => { + * const post = await fetchPost(params.id); + * return { post }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +export function createStaticLoader(fn) { + return (_request, params) => fn(params); +} +/** + * Creates a loader function for routes that need access to the incoming HTTP request. + * Server loaders run on every request during SSR. If called during SSG where no request is + * available, this throws an error. + * + * @example + * ```ts + * import { createServerLoader } from 'expo-server'; + * + * export const loader = createServerLoader(async (request, params) => { + * const authHeader = request.headers.get('Authorization'); + * return { authenticated: !!authHeader }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +export function createServerLoader(fn) { + return (request, params) => { + if (!request) { + throw new Error('Server loader was called without a request. Server loaders require SSR and cannot be ' + + 'used during static site generation (SSG). To create a loader that works with SSG, use ' + + 'createStaticLoader instead.'); + } + return fn(request, params); + }; +} +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/packages/expo-server/build/mjs/loaders/helpers.js.map b/packages/expo-server/build/mjs/loaders/helpers.js.map new file mode 100644 index 00000000000000..1a908812286d82 --- /dev/null +++ b/packages/expo-server/build/mjs/loaders/helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../../../src/loaders/helpers.ts"],"names":[],"mappings":"AAEA;;;;;;;;;;;;;;GAcG;AACH,MAAM,UAAU,kBAAkB,CAChC,EAAiE;IAEjE,OAAO,CAAC,QAAQ,EAAE,MAAM,EAAE,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC;AAC1C,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,MAAM,UAAU,kBAAkB,CAChC,EAA4F;IAE5F,OAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACzB,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,MAAM,IAAI,KAAK,CACb,uFAAuF;gBACrF,wFAAwF;gBACxF,6BAA6B,CAChC,CAAC;QACJ,CAAC;QACD,OAAO,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC7B,CAAC,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/packages/expo-server/src/index.ts b/packages/expo-server/src/index.ts index 0354360191d004..5615d1ffb400c1 100644 --- a/packages/expo-server/src/index.ts +++ b/packages/expo-server/src/index.ts @@ -1,2 +1,3 @@ export * from './runtime/api'; +export { createStaticLoader, createServerLoader } from './loaders/helpers'; export type * from './types'; diff --git a/packages/expo-server/src/loaders/__tests__/helpers.test.ts b/packages/expo-server/src/loaders/__tests__/helpers.test.ts new file mode 100644 index 00000000000000..174a9a17a55703 --- /dev/null +++ b/packages/expo-server/src/loaders/__tests__/helpers.test.ts @@ -0,0 +1,80 @@ +import { ImmutableRequest } from '../../ImmutableRequest'; +import { createStaticLoader, createServerLoader } from '../helpers'; + +describe(createStaticLoader, () => { + it('passes only params to the callback, ignoring request', () => { + const fn = jest.fn((params: Record) => ({ id: params.id })); + const loader = createStaticLoader(fn); + + const request = new ImmutableRequest(new Request('https://example.com')); + const params = { id: '123' }; + + const result = loader(request, params); + + expect(fn).toHaveBeenCalledWith(params); + expect(fn).toHaveBeenCalledTimes(1); + expect(result).toEqual({ id: '123' }); + }); + + it('works when request is undefined (SSG context)', () => { + const fn = jest.fn((params: Record) => ({ id: params.id })); + const loader = createStaticLoader(fn); + + const result = loader(undefined, { id: '456' }); + + expect(fn).toHaveBeenCalledWith({ id: '456' }); + expect(result).toEqual({ id: '456' }); + }); + + it('supports async callbacks', async () => { + const loader = createStaticLoader(async (params) => { + return { id: params.id }; + }); + + const result = await loader(undefined, { id: '789' }); + expect(result).toEqual({ id: '789' }); + }); +}); + +describe(createServerLoader, () => { + it('passes request and params to the callback', () => { + const fn = jest.fn( + ( + request: InstanceType, + params: Record + ) => ({ + url: request.url, + id: params.id, + }) + ); + const loader = createServerLoader(fn); + + const request = new ImmutableRequest(new Request('https://example.com/test')); + const params = { id: '123' }; + + const result = loader(request, params); + + expect(fn).toHaveBeenCalledWith(request, params); + expect(result).toEqual({ url: 'https://example.com/test', id: '123' }); + }); + + it('throws when request is undefined (SSG context)', () => { + const fn = jest.fn(); + const loader = createServerLoader(fn); + + expect(() => loader(undefined, { id: '123' })).toThrow( + 'Server loader was called without a request' + ); + expect(fn).not.toHaveBeenCalled(); + }); + + it('supports async callbacks', async () => { + const loader = createServerLoader(async (request, _params) => { + return { method: request.method }; + }); + + const request = new ImmutableRequest(new Request('https://example.com', { method: 'POST' })); + const result = await loader(request, {}); + expect(result).toEqual({ method: 'POST' }); + }); +}); diff --git a/packages/expo-server/src/loaders/helpers.ts b/packages/expo-server/src/loaders/helpers.ts new file mode 100644 index 00000000000000..b1ab99a772979a --- /dev/null +++ b/packages/expo-server/src/loaders/helpers.ts @@ -0,0 +1,53 @@ +import type { ImmutableRequest, LoaderFunction } from '../types'; + +/** + * Creates a loader function for routes that only need route parameters to load data. + * The callback receives no request object, making it safe to use for both SSG and SSR. + * + * @example + * ```ts + * import { createStaticLoader } from 'expo-server'; + * + * export const loader = createStaticLoader(async (params) => { + * const post = await fetchPost(params.id); + * return { post }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +export function createStaticLoader( + fn: (params: Record) => Promise | T +): LoaderFunction { + return (_request, params) => fn(params); +} + +/** + * Creates a loader function for routes that need access to the incoming HTTP request. + * Server loaders run on every request during SSR. If called during SSG where no request is + * available, this throws an error. + * + * @example + * ```ts + * import { createServerLoader } from 'expo-server'; + * + * export const loader = createServerLoader(async (request, params) => { + * const authHeader = request.headers.get('Authorization'); + * return { authenticated: !!authHeader }; + * }); + * ``` + * @see [Data loaders](/router/web/data-loaders) for more information. + */ +export function createServerLoader( + fn: (request: ImmutableRequest, params: Record) => Promise | T +): LoaderFunction { + return (request, params) => { + if (!request) { + throw new Error( + 'Server loader was called without a request. Server loaders require SSR and cannot be ' + + 'used during static site generation (SSG). To create a loader that works with SSG, use ' + + 'createStaticLoader instead.' + ); + } + return fn(request, params); + }; +} From d78f400a1da6390d3f00c37267358e9987e999bf Mon Sep 17 00:00:00 2001 From: Phil Pluckthun Date: Tue, 5 May 2026 17:50:05 +0100 Subject: [PATCH 06/26] chore: Upgrade to metro@0.84.4 (`@expo/metro@~56.0.0` bump) (#45404) # Summary **No changes in the actual transitive dependencies. This just bumps us to the release version (just published) and switches the prerelease ranges back.** Changes within range (compared to sdk-55 w/o metro patch releases that were backported): https://github.com/facebook/metro/compare/v0.83.4...v0.84.4 # Checklist - [x] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- packages/@expo/cli/CHANGELOG.md | 1 + packages/@expo/cli/package.json | 2 +- packages/@expo/metro-config/CHANGELOG.md | 1 + packages/@expo/metro-config/package.json | 2 +- packages/@expo/metro-file-map/package.json | 2 +- packages/babel-preset-expo/package.json | 2 +- packages/expo-doctor/package.json | 2 +- packages/expo-module-scripts/package.json | 2 +- packages/expo/CHANGELOG.md | 1 + packages/expo/package.json | 2 +- pnpm-lock.yaml | 34 +++++++++++----------- 11 files changed, 27 insertions(+), 24 deletions(-) diff --git a/packages/@expo/cli/CHANGELOG.md b/packages/@expo/cli/CHANGELOG.md index 92646c910f25ea..fb1f9da8bf5b0e 100644 --- a/packages/@expo/cli/CHANGELOG.md +++ b/packages/@expo/cli/CHANGELOG.md @@ -69,6 +69,7 @@ - Add `@expo/metro-file-map` fork ([#45373](https://github.com/expo/expo/pull/45373) by [@kitten](https://github.com/kitten)) - Disable watchman by default ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) - Defer version check output to command table, and prefetch on start, to prevent it blocking/slowing down startup ([#45400](https://github.com/expo/expo/pull/45400) by [@kitten](https://github.com/kitten)) +- Bump to `@expo/metro@56.0.0` and `metro@0.84.4` ([#45404](https://github.com/expo/expo/pull/45404) by [@kitten](https://github.com/kitten)) ## 55.0.12 — 2026-02-25 diff --git a/packages/@expo/cli/package.json b/packages/@expo/cli/package.json index 599ef6ad3150e2..aff219a09aafcf 100644 --- a/packages/@expo/cli/package.json +++ b/packages/@expo/cli/package.json @@ -58,7 +58,7 @@ "@expo/inline-modules": "workspace:0.0.1", "@expo/json-file": "workspace:^10.0.12", "@expo/log-box": "workspace:55.0.7", - "@expo/metro": "56.0.0-rc.2", + "@expo/metro": "~56.0.0", "@expo/metro-config": "workspace:~55.0.9", "@expo/metro-file-map": "workspace:55.0.0-0", "@expo/osascript": "workspace:^2.4.2", diff --git a/packages/@expo/metro-config/CHANGELOG.md b/packages/@expo/metro-config/CHANGELOG.md index 13f9af2ab3b77e..65ff4b13ee99f1 100644 --- a/packages/@expo/metro-config/CHANGELOG.md +++ b/packages/@expo/metro-config/CHANGELOG.md @@ -22,6 +22,7 @@ - Add more stringent Babel config detection that disables redundant Babel config/rc file crawling, and support more Babel config filenames by default ([#45254](https://github.com/expo/expo/pull/45254) by [@kitten](https://github.com/kitten)) - Use Babel config path hint to Expo Metro transformer and add `loadPartialConfigSync` cache key to invalidate Babel transform cache more granularly ([#45260](https://github.com/expo/expo/pull/45260) by [@kitten](https://github.com/kitten)) - Skip `generateImportNames` traversal/phase when live bindings import/export support is enabled ([#45349](https://github.com/expo/expo/pull/45349) by [@kitten](https://github.com/kitten)) +- Bump to `@expo/metro@56.0.0` and `metro@0.84.4` ([#45404](https://github.com/expo/expo/pull/45404) by [@kitten](https://github.com/kitten)) ## 55.0.9 — 2026-02-25 diff --git a/packages/@expo/metro-config/package.json b/packages/@expo/metro-config/package.json index 6b9f28df221627..d990e664860463 100644 --- a/packages/@expo/metro-config/package.json +++ b/packages/@expo/metro-config/package.json @@ -68,7 +68,7 @@ "@expo/config": "workspace:~55.0.8", "@expo/env": "workspace:~2.1.1", "@expo/json-file": "workspace:~10.0.12", - "@expo/metro": "56.0.0-rc.2", + "@expo/metro": "~56.0.0", "@expo/spawn-async": "^1.7.2", "browserslist": "^4.25.0", "chalk": "^4.1.0", diff --git a/packages/@expo/metro-file-map/package.json b/packages/@expo/metro-file-map/package.json index 290dc68849010e..da8118eb8246f7 100644 --- a/packages/@expo/metro-file-map/package.json +++ b/packages/@expo/metro-file-map/package.json @@ -39,7 +39,7 @@ "walker": "^1.0.8" }, "devDependencies": { - "@expo/metro": "56.0.0-rc.2", + "@expo/metro": "~56.0.0", "@types/debug": "^4.1.7", "@types/fb-watchman": "^2.0.6", "@types/invariant": "^2.2.37", diff --git a/packages/babel-preset-expo/package.json b/packages/babel-preset-expo/package.json index 0b29fe7329080a..c2d64acce568d9 100644 --- a/packages/babel-preset-expo/package.json +++ b/packages/babel-preset-expo/package.json @@ -101,7 +101,7 @@ }, "devDependencies": { "@babel/core": "^7.26.0", - "@expo/metro": "56.0.0-rc.2", + "@expo/metro": "~56.0.0", "@expo/metro-config": "workspace:*", "@types/babel__core": "^7.20.5", "@types/babel__generator": "^7.27.0", diff --git a/packages/expo-doctor/package.json b/packages/expo-doctor/package.json index 864c37965d61ae..148e7ef73773a6 100644 --- a/packages/expo-doctor/package.json +++ b/packages/expo-doctor/package.json @@ -39,7 +39,7 @@ "@expo/config": "workspace:*", "@expo/env": "workspace:*", "@expo/json-file": "workspace:*", - "@expo/metro": "56.0.0-rc.2", + "@expo/metro": "~56.0.0", "@expo/schemer": "workspace:*", "@expo/spawn-async": "^1.7.2", "@types/debug": "^4.1.8", diff --git a/packages/expo-module-scripts/package.json b/packages/expo-module-scripts/package.json index 580a3c5b2964bc..c0b39a3dca5802 100644 --- a/packages/expo-module-scripts/package.json +++ b/packages/expo-module-scripts/package.json @@ -88,7 +88,7 @@ "ts-jest": "~29.4.7" }, "devDependencies": { - "@expo/metro": "56.0.0-rc.2", + "@expo/metro": "~56.0.0", "@babel/core": "^7.26.0" } } diff --git a/packages/expo/CHANGELOG.md b/packages/expo/CHANGELOG.md index bfb3e8b7697592..fd0b9c8f6adc57 100644 --- a/packages/expo/CHANGELOG.md +++ b/packages/expo/CHANGELOG.md @@ -28,6 +28,7 @@ - [iOS] Remove `RCTHostRuntimeDelegate` usage now that it's merged into `RCTHostDelegate`. ([#43838](https://github.com/expo/expo/pull/43838) by [@zoontek](https://github.com/zoontek)) - Bumped project templates to TypeScript v6 ([#45091](https://github.com/expo/expo/pull/45091) by [@hassankhan](https://github.com/hassankhan)) - [dom] Added opt-out `unstable_useExpoModulesBridge` flag. ([#45223](https://github.com/expo/expo/pull/45223) by [@kudo](https://github.com/kudo)) +- Bump to `@expo/metro@56.0.0` and `metro@0.84.4` ([#45404](https://github.com/expo/expo/pull/45404) by [@kitten](https://github.com/kitten)) ### ⚠️ Notices diff --git a/packages/expo/package.json b/packages/expo/package.json index a2cb288f0ab8bf..a07166b6d216ca 100644 --- a/packages/expo/package.json +++ b/packages/expo/package.json @@ -83,7 +83,7 @@ "@expo/fingerprint": "workspace:0.16.5", "@expo/local-build-cache-provider": "workspace:55.0.6", "@expo/log-box": "workspace:55.0.7", - "@expo/metro": "56.0.0-rc.2", + "@expo/metro": "~56.0.0", "@expo/metro-config": "workspace:55.0.9", "@expo/vector-icons": "^15.0.2", "@ungap/structured-clone": "^1.3.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ceb23194e497a3..6489e906f59541 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1804,8 +1804,8 @@ importers: specifier: workspace:55.0.7 version: link:../log-box '@expo/metro': - specifier: 56.0.0-rc.2 - version: 56.0.0-rc.2 + specifier: ~56.0.0 + version: 56.0.0 '@expo/metro-config': specifier: workspace:~55.0.9 version: link:../metro-config @@ -2533,8 +2533,8 @@ importers: specifier: workspace:~10.0.12 version: link:../json-file '@expo/metro': - specifier: 56.0.0-rc.2 - version: 56.0.0-rc.2 + specifier: ~56.0.0 + version: 56.0.0 '@expo/spawn-async': specifier: ^1.7.2 version: 1.7.2 @@ -2628,8 +2628,8 @@ importers: version: 1.0.8 devDependencies: '@expo/metro': - specifier: 56.0.0-rc.2 - version: 56.0.0-rc.2 + specifier: ~56.0.0 + version: 56.0.0 '@types/debug': specifier: ^4.1.7 version: 4.1.12 @@ -3097,8 +3097,8 @@ importers: specifier: ^7.26.0 version: 7.29.0 '@expo/metro': - specifier: 56.0.0-rc.2 - version: 56.0.0-rc.2 + specifier: ~56.0.0 + version: 56.0.0 '@expo/metro-config': specifier: workspace:* version: link:../@expo/metro-config @@ -3452,8 +3452,8 @@ importers: specifier: workspace:55.0.7 version: link:../@expo/log-box '@expo/metro': - specifier: 56.0.0-rc.2 - version: 56.0.0-rc.2 + specifier: ~56.0.0 + version: 56.0.0 '@expo/metro-config': specifier: workspace:55.0.9 version: link:../@expo/metro-config @@ -4208,8 +4208,8 @@ importers: specifier: workspace:* version: link:../@expo/json-file '@expo/metro': - specifier: 56.0.0-rc.2 - version: 56.0.0-rc.2 + specifier: ~56.0.0 + version: 56.0.0 '@expo/schemer': specifier: workspace:* version: link:../@expo/schemer @@ -4835,8 +4835,8 @@ importers: specifier: ^7.26.0 version: 7.29.0 '@expo/metro': - specifier: 56.0.0-rc.2 - version: 56.0.0-rc.2 + specifier: ~56.0.0 + version: 56.0.0 packages/expo-module-template: devDependencies: @@ -7494,8 +7494,8 @@ packages: peerDependencies: '@modelcontextprotocol/sdk': ^1.26.0 - '@expo/metro@56.0.0-rc.2': - resolution: {integrity: sha512-VgEkAshU/uVkOXux199M1i5in5rXw3PMOEHlTkl0Zi1BLP1zOL6mOIRXHG/8ZQ/6qDTJtpYgComI4/v42JOqFg==} + '@expo/metro@56.0.0': + resolution: {integrity: sha512-5gIgQHtEpjjvsjKfVtIv23a98LLRV0/y07PDShEwYSytAMlE3FSF8RHXqtHc1sUJL6dn7hnuIBpIbrLXXuVi0A==} '@expo/multipart-body-parser@1.1.0': resolution: {integrity: sha512-XOaS79wFIJgx0J7oUzRb+kZsnZmFqGpisu0r8RPO3b0wjbW7xpWgiXmRR4RavKeGiVAPauZOi4vad7cJ3KCspg==} @@ -17367,7 +17367,7 @@ snapshots: - bufferutil - utf-8-validate - '@expo/metro@56.0.0-rc.2': + '@expo/metro@56.0.0': dependencies: metro: 0.84.4 metro-babel-transformer: 0.84.4 From 702dc46bbaecf18ebcd1bcb6b9ee00e404c55a39 Mon Sep 17 00:00:00 2001 From: Jakub Tkacz <32908614+Ubax@users.noreply.github.com> Date: Tue, 5 May 2026 18:56:06 +0200 Subject: [PATCH 07/26] [expo-router][android] use toolbarContainerColor for toolbar background color (#45264) # Why On Android we can allow for controlling toolbar background color to match the app style # How Use `toolbarContentColor` prop # Test Plan 1. Router-e2e Screenshot_1777568026 # Checklist - [ ] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- .../build/toolbar/native.android.d.ts.map | 2 +- .../build/toolbar/native.android.js | 13 +++---------- .../build/toolbar/native.android.js.map | 2 +- .../expo-router/src/toolbar/native.android.tsx | 18 +++++------------- 4 files changed, 10 insertions(+), 25 deletions(-) diff --git a/packages/expo-router/build/toolbar/native.android.d.ts.map b/packages/expo-router/build/toolbar/native.android.d.ts.map index fdfcce9fe972e8..966bf16ddeff53 100644 --- a/packages/expo-router/build/toolbar/native.android.d.ts.map +++ b/packages/expo-router/build/toolbar/native.android.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"native.android.d.ts","sourceRoot":"","sources":["../../src/toolbar/native.android.tsx"],"names":[],"mappings":"AAYA,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAC;AAE7D,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,sBAAsB,2CA4B9D"} \ No newline at end of file +{"version":3,"file":"native.android.d.ts","sourceRoot":"","sources":["../../src/toolbar/native.android.tsx"],"names":[],"mappings":"AAMA,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAC;AAE7D,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,sBAAsB,2CA0B9D"} \ No newline at end of file diff --git a/packages/expo-router/build/toolbar/native.android.js b/packages/expo-router/build/toolbar/native.android.js index 4f11b24ab1f103..778569aa07b511 100644 --- a/packages/expo-router/build/toolbar/native.android.js +++ b/packages/expo-router/build/toolbar/native.android.js @@ -16,16 +16,9 @@ function RouterToolbarHost(props) { } return baseModifiers; }, [insets.bottom, props.withImePadding]); - return ((0, jsx_runtime_1.jsx)(react_native_1.View, { style: [react_native_1.StyleSheet.absoluteFill], pointerEvents: "box-none", children: (0, jsx_runtime_1.jsx)(jetpack_compose_1.Host, { style: styles.host, children: (0, jsx_runtime_1.jsx)(jetpack_compose_1.Box, { modifiers: modifiers, contentAlignment: "bottomCenter", children: (0, jsx_runtime_1.jsx)(jetpack_compose_1.HorizontalFloatingToolbar - // TODO: use toolbarContainerColor - // TODO: expose toolbarContainerColor from expo-ui - , { - // TODO: use toolbarContainerColor - // TODO: expose toolbarContainerColor from expo-ui - modifiers: [ - (0, modifiers_1.height)(64), - ...(props.backgroundColor ? [(0, modifiers_1.background)(props.backgroundColor)] : []), - ], children: props.children }) }) }) })); + return ((0, jsx_runtime_1.jsx)(react_native_1.View, { style: [react_native_1.StyleSheet.absoluteFill], pointerEvents: "box-none", children: (0, jsx_runtime_1.jsx)(jetpack_compose_1.Host, { style: styles.host, children: (0, jsx_runtime_1.jsx)(jetpack_compose_1.Box, { modifiers: modifiers, contentAlignment: "bottomCenter", children: (0, jsx_runtime_1.jsx)(jetpack_compose_1.HorizontalFloatingToolbar, { colors: { + ...(props.backgroundColor ? { toolbarContainerColor: props.backgroundColor } : {}), + }, modifiers: [(0, modifiers_1.height)(64)], children: props.children }) }) }) })); } const styles = react_native_1.StyleSheet.create({ host: { width: '100%', height: '100%', paddingHorizontal: 24 }, diff --git a/packages/expo-router/build/toolbar/native.android.js.map b/packages/expo-router/build/toolbar/native.android.js.map index 67c015804ac7b3..6ffd1e80bcf47b 100644 --- a/packages/expo-router/build/toolbar/native.android.js.map +++ b/packages/expo-router/build/toolbar/native.android.js.map @@ -1 +1 @@ -{"version":3,"file":"native.android.js","sourceRoot":"","sources":["../../src/toolbar/native.android.tsx"],"names":[],"mappings":";;AAcA,8CA4BC;;AA1CD,8DAAgF;AAChF,kEAM4C;AAC5C,iCAAgC;AAChC,+CAAgD;AAChD,mFAAmE;AAInE,SAAgB,iBAAiB,CAAC,KAA6B;IAC7D,MAAM,MAAM,GAAG,IAAA,kDAAiB,GAAE,CAAC;IAEnC,MAAM,SAAS,GAAG,IAAA,eAAO,EAAC,GAAG,EAAE;QAC7B,MAAM,aAAa,GAAG,CAAC,IAAA,wBAAY,GAAE,EAAE,IAAA,mBAAO,EAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;QACxE,IAAI,KAAK,CAAC,cAAc,EAAE,CAAC;YACzB,aAAa,CAAC,IAAI,CAAC,IAAA,sBAAU,GAAE,CAAC,CAAC;QACnC,CAAC;QACD,OAAO,aAAa,CAAC;IACvB,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC;IAE1C,OAAO,CACL,uBAAC,mBAAI,IAAC,KAAK,EAAE,CAAC,yBAAU,CAAC,YAAY,CAAC,EAAE,aAAa,EAAC,UAAU,YAC9D,uBAAC,sBAAI,IAAC,KAAK,EAAE,MAAM,CAAC,IAAI,YACtB,uBAAC,qBAAG,IAAC,SAAS,EAAE,SAAS,EAAE,gBAAgB,EAAC,cAAc,YACxD,uBAAC,2CAAyB;gBACxB,kCAAkC;gBAClC,kDAAkD;;oBADlD,kCAAkC;oBAClC,kDAAkD;oBAClD,SAAS,EAAE;wBACT,IAAA,kBAAM,EAAC,EAAE,CAAC;wBACV,GAAG,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,IAAA,sBAAU,EAAC,KAAK,CAAC,eAAyB,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;qBAChF,YACA,KAAK,CAAC,QAAQ,GACW,GACxB,GACD,GACF,CACR,CAAC;AACJ,CAAC;AAED,MAAM,MAAM,GAAG,yBAAU,CAAC,MAAM,CAAC;IAC/B,IAAI,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,iBAAiB,EAAE,EAAE,EAAE;CAC/D,CAAC,CAAC","sourcesContent":["import { Host, HorizontalFloatingToolbar, Box } from '@expo/ui/jetpack-compose';\nimport {\n background,\n fillMaxWidth,\n height,\n padding,\n imePadding,\n} from '@expo/ui/jetpack-compose/modifiers';\nimport { useMemo } from 'react';\nimport { StyleSheet, View } from 'react-native';\nimport { useSafeAreaInsets } from 'react-native-safe-area-context';\n\nimport type { RouterToolbarHostProps } from './native.types';\n\nexport function RouterToolbarHost(props: RouterToolbarHostProps) {\n const insets = useSafeAreaInsets();\n\n const modifiers = useMemo(() => {\n const baseModifiers = [fillMaxWidth(), padding(0, 0, 0, insets.bottom)];\n if (props.withImePadding) {\n baseModifiers.push(imePadding());\n }\n return baseModifiers;\n }, [insets.bottom, props.withImePadding]);\n\n return (\n \n \n \n \n {props.children}\n \n \n \n \n );\n}\n\nconst styles = StyleSheet.create({\n host: { width: '100%', height: '100%', paddingHorizontal: 24 },\n});\n"]} \ No newline at end of file +{"version":3,"file":"native.android.js","sourceRoot":"","sources":["../../src/toolbar/native.android.tsx"],"names":[],"mappings":";;AAQA,8CA0BC;;AAlCD,8DAAgF;AAChF,kEAA+F;AAC/F,iCAAgC;AAChC,+CAAgD;AAChD,mFAAmE;AAInE,SAAgB,iBAAiB,CAAC,KAA6B;IAC7D,MAAM,MAAM,GAAG,IAAA,kDAAiB,GAAE,CAAC;IAEnC,MAAM,SAAS,GAAG,IAAA,eAAO,EAAC,GAAG,EAAE;QAC7B,MAAM,aAAa,GAAG,CAAC,IAAA,wBAAY,GAAE,EAAE,IAAA,mBAAO,EAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;QACxE,IAAI,KAAK,CAAC,cAAc,EAAE,CAAC;YACzB,aAAa,CAAC,IAAI,CAAC,IAAA,sBAAU,GAAE,CAAC,CAAC;QACnC,CAAC;QACD,OAAO,aAAa,CAAC;IACvB,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC;IAE1C,OAAO,CACL,uBAAC,mBAAI,IAAC,KAAK,EAAE,CAAC,yBAAU,CAAC,YAAY,CAAC,EAAE,aAAa,EAAC,UAAU,YAC9D,uBAAC,sBAAI,IAAC,KAAK,EAAE,MAAM,CAAC,IAAI,YACtB,uBAAC,qBAAG,IAAC,SAAS,EAAE,SAAS,EAAE,gBAAgB,EAAC,cAAc,YACxD,uBAAC,2CAAyB,IACxB,MAAM,EAAE;wBACN,GAAG,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,qBAAqB,EAAE,KAAK,CAAC,eAAe,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;qBACnF,EACD,SAAS,EAAE,CAAC,IAAA,kBAAM,EAAC,EAAE,CAAC,CAAC,YACtB,KAAK,CAAC,QAAQ,GACW,GACxB,GACD,GACF,CACR,CAAC;AACJ,CAAC;AAED,MAAM,MAAM,GAAG,yBAAU,CAAC,MAAM,CAAC;IAC/B,IAAI,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,iBAAiB,EAAE,EAAE,EAAE;CAC/D,CAAC,CAAC","sourcesContent":["import { Host, HorizontalFloatingToolbar, Box } from '@expo/ui/jetpack-compose';\nimport { fillMaxWidth, height, padding, imePadding } from '@expo/ui/jetpack-compose/modifiers';\nimport { useMemo } from 'react';\nimport { StyleSheet, View } from 'react-native';\nimport { useSafeAreaInsets } from 'react-native-safe-area-context';\n\nimport type { RouterToolbarHostProps } from './native.types';\n\nexport function RouterToolbarHost(props: RouterToolbarHostProps) {\n const insets = useSafeAreaInsets();\n\n const modifiers = useMemo(() => {\n const baseModifiers = [fillMaxWidth(), padding(0, 0, 0, insets.bottom)];\n if (props.withImePadding) {\n baseModifiers.push(imePadding());\n }\n return baseModifiers;\n }, [insets.bottom, props.withImePadding]);\n\n return (\n \n \n \n \n {props.children}\n \n \n \n \n );\n}\n\nconst styles = StyleSheet.create({\n host: { width: '100%', height: '100%', paddingHorizontal: 24 },\n});\n"]} \ No newline at end of file diff --git a/packages/expo-router/src/toolbar/native.android.tsx b/packages/expo-router/src/toolbar/native.android.tsx index af6d5925697911..0e2a6407868387 100644 --- a/packages/expo-router/src/toolbar/native.android.tsx +++ b/packages/expo-router/src/toolbar/native.android.tsx @@ -1,11 +1,5 @@ import { Host, HorizontalFloatingToolbar, Box } from '@expo/ui/jetpack-compose'; -import { - background, - fillMaxWidth, - height, - padding, - imePadding, -} from '@expo/ui/jetpack-compose/modifiers'; +import { fillMaxWidth, height, padding, imePadding } from '@expo/ui/jetpack-compose/modifiers'; import { useMemo } from 'react'; import { StyleSheet, View } from 'react-native'; import { useSafeAreaInsets } from 'react-native-safe-area-context'; @@ -28,12 +22,10 @@ export function RouterToolbarHost(props: RouterToolbarHostProps) { + colors={{ + ...(props.backgroundColor ? { toolbarContainerColor: props.backgroundColor } : {}), + }} + modifiers={[height(64)]}> {props.children} From b247c01ea6aca248b62352f522d8a214eb74adb3 Mon Sep 17 00:00:00 2001 From: Gabriel Donadel Dall'Agnol Date: Tue, 5 May 2026 14:07:10 -0300 Subject: [PATCH 08/26] [brownfield][iOS] Add multiple frameworks support (#45347) # Why To allow users to use multiple Expo Apps in one (outer) brownfield app, we must prefix/rename the class names of each internal app so that they won't clash. Potentially, this will also allow users to use two different react-native versions in the same project # How The first implementation was using [cocoapods-mangle](https://github.com/intercom/cocoapods-mangle) to rename classes during pod post install, but we've now migrated to a full custom `mangle` command in the expo-brownfield CLI. Under the hood, it prefixes all necessary classes from Pods folder so that we can safely include two xcframeworks with the class names into a final brownfield app # Test Plan app.json ``` [ "expo-brownfield", { "ios": { "multipleFrameworks": true } } ] ``` and add the frameworks of two different inner apps to brownfiled tester # Checklist - [ ] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- packages/expo-brownfield/CHANGELOG.md | 1 + packages/expo-brownfield/README.md | 24 ++ .../cli/build/commands/index.d.ts | 1 + .../cli/build/commands/index.js | 4 +- .../cli/build/commands/index.js.map | 2 +- .../cli/build/commands/mangle.d.ts | 13 + .../cli/build/commands/mangle.js | 45 +++ .../cli/build/commands/mangle.js.map | 1 + packages/expo-brownfield/cli/build/index.js | 10 + .../expo-brownfield/cli/build/index.js.map | 2 +- .../expo-brownfield/cli/build/utils/ios.d.ts | 13 + .../expo-brownfield/cli/build/utils/ios.js | 155 +++++++- .../cli/build/utils/ios.js.map | 2 +- .../cli/build/utils/mangle.d.ts | 28 ++ .../expo-brownfield/cli/build/utils/mangle.js | 297 ++++++++++++++ .../cli/build/utils/mangle.js.map | 1 + .../expo-brownfield/cli/src/commands/index.ts | 1 + .../cli/src/commands/mangle.ts | 49 +++ packages/expo-brownfield/cli/src/index.ts | 13 +- packages/expo-brownfield/cli/src/utils/ios.ts | 183 ++++++++- .../expo-brownfield/cli/src/utils/mangle.ts | 363 ++++++++++++++++++ .../build/ios/plugins/withPodfilePlugin.js | 3 + .../ios/plugins/withXcodeProjectPlugin.js | 11 +- .../plugin/build/ios/types.d.ts | 1 + .../plugin/build/ios/utils/podfile.d.ts | 14 + .../plugin/build/ios/utils/podfile.js | 109 +++++- .../plugin/build/ios/utils/project.js | 1 + .../plugin/build/ios/utils/props.js | 1 + .../src/ios/plugins/withPodfilePlugin.ts | 8 +- .../src/ios/plugins/withXcodeProjectPlugin.ts | 13 +- .../expo-brownfield/plugin/src/ios/types.ts | 1 + .../plugin/src/ios/utils/podfile.ts | 122 ++++++ .../plugin/src/ios/utils/project.ts | 1 + .../plugin/src/ios/utils/props.ts | 1 + .../expo-brownfield/scripts/ios/mangle.rb | 140 +++++++ 35 files changed, 1611 insertions(+), 23 deletions(-) create mode 100644 packages/expo-brownfield/cli/build/commands/mangle.d.ts create mode 100644 packages/expo-brownfield/cli/build/commands/mangle.js create mode 100644 packages/expo-brownfield/cli/build/commands/mangle.js.map create mode 100644 packages/expo-brownfield/cli/build/utils/mangle.d.ts create mode 100644 packages/expo-brownfield/cli/build/utils/mangle.js create mode 100644 packages/expo-brownfield/cli/build/utils/mangle.js.map create mode 100644 packages/expo-brownfield/cli/src/commands/mangle.ts create mode 100644 packages/expo-brownfield/cli/src/utils/mangle.ts create mode 100644 packages/expo-brownfield/scripts/ios/mangle.rb diff --git a/packages/expo-brownfield/CHANGELOG.md b/packages/expo-brownfield/CHANGELOG.md index 460d55856c5a24..27426dc33c84c7 100644 --- a/packages/expo-brownfield/CHANGELOG.md +++ b/packages/expo-brownfield/CHANGELOG.md @@ -22,6 +22,7 @@ - [iOS] Support rendering multiple ReactNativeView simultaneously ([#44891](https://github.com/expo/expo/pull/44891) by [@gabrieldonadel](https://github.com/gabrieldonadel)) - Support registering custom turbo modules from the hosting app ([#44929](https://github.com/expo/expo/pull/44929) by [@gabrieldonadel](https://github.com/gabrieldonadel)) - [iOS] Add support for iOS prebuilds. ([#45148](https://github.com/expo/expo/pull/45148) by [@gabrieldonadel](https://github.com/gabrieldonadel)) +- [iOS] Add support for using multiple inner app frameworks in one host app via the new `multipleFrameworks` property. ([#45347](https://github.com/expo/expo/pull/45347) by [@gabrieldonadel](https://github.com/gabrieldonadel)) ### 🐛 Bug fixes diff --git a/packages/expo-brownfield/README.md b/packages/expo-brownfield/README.md index 95b3f3c9f6e63c..fdf099d65ac197 100644 --- a/packages/expo-brownfield/README.md +++ b/packages/expo-brownfield/README.md @@ -80,6 +80,30 @@ Several Expo modules link against shared Swift Package dependencies (e.g. `expo- If a declared SPM dependency can't be found in any of these locations, `build:ios` fails with an actionable error rather than shipping a Swift Package that would crash at runtime. +## Embedding multiple brownfield frameworks in one host app (iOS) + +When two or more Expo brownfield frameworks need to coexist inside the same host iOS app, set `multipleFrameworks: true` on the iOS plugin config: + +```json +{ + "plugins": [ + [ + "expo-brownfield", + { + "ios": { + "targetName": "MyBrownfield", + "multipleFrameworks": true + } + } + ] + ] +} +``` + +Each brownfield framework gets a unique Swift module name (the framework target name), so its public Swift types are already isolated per-app. Every ObjC class registered by the framework gets a `_` prefix via `@objc(...)`, and every ObjC symbol in the pod dependency graph is rewritten with the same prefix at compile time — so two frameworks can ship overlapping pods without duplicate-symbol errors at link time. + +The mangling runs as part of `pod install` via a small Ruby shim and a Node worker bundled with `expo-brownfield`. The first run after a dependency change rebuilds the symbol set; subsequent `pod install` invocations are skipped via a dependency-graph checksum. + ## Contributing Contributions are very welcome! Please refer to guidelines described in the [contributing guide](https://github.com/expo/expo#contributing). diff --git a/packages/expo-brownfield/cli/build/commands/index.d.ts b/packages/expo-brownfield/cli/build/commands/index.d.ts index 3ad397d285bfe7..bb9cff05b1172c 100644 --- a/packages/expo-brownfield/cli/build/commands/index.d.ts +++ b/packages/expo-brownfield/cli/build/commands/index.d.ts @@ -1,3 +1,4 @@ export { default as buildAndroid } from './build-android'; export { default as buildIos } from './build-ios'; +export { default as mangle } from './mangle'; export { default as tasksAndroid } from './tasks-android'; diff --git a/packages/expo-brownfield/cli/build/commands/index.js b/packages/expo-brownfield/cli/build/commands/index.js index 92b1c0809f7534..80dbe110330a1f 100644 --- a/packages/expo-brownfield/cli/build/commands/index.js +++ b/packages/expo-brownfield/cli/build/commands/index.js @@ -3,11 +3,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.tasksAndroid = exports.buildIos = exports.buildAndroid = void 0; +exports.tasksAndroid = exports.mangle = exports.buildIos = exports.buildAndroid = void 0; var build_android_1 = require("./build-android"); Object.defineProperty(exports, "buildAndroid", { enumerable: true, get: function () { return __importDefault(build_android_1).default; } }); var build_ios_1 = require("./build-ios"); Object.defineProperty(exports, "buildIos", { enumerable: true, get: function () { return __importDefault(build_ios_1).default; } }); +var mangle_1 = require("./mangle"); +Object.defineProperty(exports, "mangle", { enumerable: true, get: function () { return __importDefault(mangle_1).default; } }); var tasks_android_1 = require("./tasks-android"); Object.defineProperty(exports, "tasksAndroid", { enumerable: true, get: function () { return __importDefault(tasks_android_1).default; } }); //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/packages/expo-brownfield/cli/build/commands/index.js.map b/packages/expo-brownfield/cli/build/commands/index.js.map index a36c00f463c71a..2179e26d3bf927 100644 --- a/packages/expo-brownfield/cli/build/commands/index.js.map +++ b/packages/expo-brownfield/cli/build/commands/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/commands/index.ts"],"names":[],"mappings":";;;;;;AAAA,iDAA0D;AAAjD,8HAAA,OAAO,OAAgB;AAChC,yCAAkD;AAAzC,sHAAA,OAAO,OAAY;AAC5B,iDAA0D;AAAjD,8HAAA,OAAO,OAAgB"} \ No newline at end of file +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/commands/index.ts"],"names":[],"mappings":";;;;;;AAAA,iDAA0D;AAAjD,8HAAA,OAAO,OAAgB;AAChC,yCAAkD;AAAzC,sHAAA,OAAO,OAAY;AAC5B,mCAA6C;AAApC,iHAAA,OAAO,OAAU;AAC1B,iDAA0D;AAAjD,8HAAA,OAAO,OAAgB"} \ No newline at end of file diff --git a/packages/expo-brownfield/cli/build/commands/mangle.d.ts b/packages/expo-brownfield/cli/build/commands/mangle.d.ts new file mode 100644 index 00000000000000..567d7530b9f532 --- /dev/null +++ b/packages/expo-brownfield/cli/build/commands/mangle.d.ts @@ -0,0 +1,13 @@ +import type { Command } from 'commander'; +/** + * Internal command spawned by `scripts/ios/mangle.rb` from a Podfile's + * `post_install` block when the `multipleFrameworks` plugin option is set. + * Not intended for direct user invocation. + * + * Exits with code 1 on any failure with a single-line error message — the + * Ruby shim surfaces this back to CocoaPods. Without this catch the rejected + * promise bubbles up to Node's unhandled-rejection handler and prints a noisy + * stack trace that obscures the actual build failure. + */ +declare const mangle: (command: Command) => Promise; +export default mangle; diff --git a/packages/expo-brownfield/cli/build/commands/mangle.js b/packages/expo-brownfield/cli/build/commands/mangle.js new file mode 100644 index 00000000000000..7644494d15de95 --- /dev/null +++ b/packages/expo-brownfield/cli/build/commands/mangle.js @@ -0,0 +1,45 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const node_fs_1 = __importDefault(require("node:fs")); +const mangle_1 = require("../utils/mangle"); +const readContext = (options) => { + let raw; + if (options.contextFile) { + raw = node_fs_1.default.readFileSync(options.contextFile, 'utf8'); + } + else if (options.contextJson) { + raw = options.contextJson; + } + else { + throw new Error('expo-brownfield mangle: missing --context-json or --context-file. ' + + 'This command is normally invoked from the Ruby shim during `pod install`.'); + } + return JSON.parse(raw); +}; +/** + * Internal command spawned by `scripts/ios/mangle.rb` from a Podfile's + * `post_install` block when the `multipleFrameworks` plugin option is set. + * Not intended for direct user invocation. + * + * Exits with code 1 on any failure with a single-line error message — the + * Ruby shim surfaces this back to CocoaPods. Without this catch the rejected + * promise bubbles up to Node's unhandled-rejection handler and prints a noisy + * stack trace that obscures the actual build failure. + */ +const mangle = async (command) => { + try { + const options = command.opts(); + const context = readContext(options); + await (0, mangle_1.runMangle)(context, { verbose: options.verbose ?? false }); + } + catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`expo-brownfield mangle: ${message}`); + process.exit(1); + } +}; +exports.default = mangle; +//# sourceMappingURL=mangle.js.map \ No newline at end of file diff --git a/packages/expo-brownfield/cli/build/commands/mangle.js.map b/packages/expo-brownfield/cli/build/commands/mangle.js.map new file mode 100644 index 00000000000000..fed19191f709ca --- /dev/null +++ b/packages/expo-brownfield/cli/build/commands/mangle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mangle.js","sourceRoot":"","sources":["../../src/commands/mangle.ts"],"names":[],"mappings":";;;;;AACA,sDAAyB;AAEzB,4CAAgE;AAQhE,MAAM,WAAW,GAAG,CAAC,OAAsB,EAAiB,EAAE;IAC5D,IAAI,GAAW,CAAC;IAChB,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;QACxB,GAAG,GAAG,iBAAE,CAAC,YAAY,CAAC,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;IACrD,CAAC;SAAM,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;QAC/B,GAAG,GAAG,OAAO,CAAC,WAAW,CAAC;IAC5B,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,KAAK,CACb,oEAAoE;YAClE,2EAA2E,CAC9E,CAAC;IACJ,CAAC;IACD,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAkB,CAAC;AAC1C,CAAC,CAAC;AAEF;;;;;;;;;GASG;AACH,MAAM,MAAM,GAAG,KAAK,EAAE,OAAgB,EAAE,EAAE;IACxC,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,OAAO,CAAC,IAAI,EAAiB,CAAC;QAC9C,MAAM,OAAO,GAAG,WAAW,CAAC,OAAO,CAAC,CAAC;QACrC,MAAM,IAAA,kBAAS,EAAC,OAAO,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,KAAK,EAAE,CAAC,CAAC;IAClE,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO,CAAC,KAAK,CAAC,2BAA2B,OAAO,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;AACH,CAAC,CAAC;AAEF,kBAAe,MAAM,CAAC"} \ No newline at end of file diff --git a/packages/expo-brownfield/cli/build/index.js b/packages/expo-brownfield/cli/build/index.js index 7a29b0c96acf66..2a7712f1a6e637 100644 --- a/packages/expo-brownfield/cli/build/index.js +++ b/packages/expo-brownfield/cli/build/index.js @@ -40,6 +40,16 @@ program .action(async function () { await (0, commands_1.buildIos)(this); }); +// mangle (internal: invoked by scripts/ios/mangle.rb during pod install) +program + .command('mangle', { hidden: true }) + .description('Internal: regenerate brownfield mangling xcconfig') + .option('--context-json ', 'inline JSON describing the mangling context') + .option('--context-file ', 'path to a JSON file describing the mangling context') + .option('--verbose', 'forward all output to the terminal') + .action(async function () { + await (0, commands_1.mangle)(this); +}); // tasks:android program .command('tasks:android') diff --git a/packages/expo-brownfield/cli/build/index.js.map b/packages/expo-brownfield/cli/build/index.js.map index c1c8c4407e0afb..1fc1e1e6c26178 100644 --- a/packages/expo-brownfield/cli/build/index.js.map +++ b/packages/expo-brownfield/cli/build/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;AACA,yCAAoC;AAEpC,yCAAkE;AAClE,sEAA6C;AAE7C,MAAM,OAAO,GAAG,IAAI,mBAAO,EAAE,CAAC;AAE9B,eAAe;AACf,OAAO,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,sBAAW,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;AAE9E,gBAAgB;AAChB,OAAO;KACJ,OAAO,CAAC,eAAe,CAAC;KACxB,WAAW,CAAC,gDAAgD,CAAC;KAC7D,MAAM,CAAC,aAAa,EAAE,qBAAqB,CAAC;KAC5C,MAAM,CAAC,eAAe,EAAE,uBAAuB,CAAC;KAChD,MAAM,CAAC,WAAW,EAAE,uCAAuC,CAAC;KAC5D,MAAM,CAAC,WAAW,EAAE,oCAAoC,CAAC;KACzD,MAAM,CAAC,yBAAyB,EAAE,gCAAgC,CAAC;KACnE,MAAM,CAAC,sBAAsB,EAAE,oDAAoD,CAAC;KACpF,MAAM,CACL,sCAAsC,EACtC,mDAAmD,CACpD;KACA,MAAM,CAAC,WAAW,EAAE,gDAAgD,CAAC;KACrE,MAAM,CAAC,KAAK;IACX,MAAM,IAAA,uBAAY,EAAC,IAAI,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEL,YAAY;AACZ,OAAO;KACJ,OAAO,CAAC,WAAW,CAAC;KACpB,WAAW,CAAC,4CAA4C,CAAC;KACzD,MAAM,CAAC,aAAa,EAAE,2BAA2B,CAAC;KAClD,MAAM,CAAC,eAAe,EAAE,6BAA6B,CAAC;KACtD,MAAM,CAAC,WAAW,EAAE,oCAAoC,CAAC;KACzD,MAAM,CAAC,uBAAuB,EAAE,wBAAwB,CAAC;KACzD,MAAM,CAAC,iCAAiC,EAAE,4CAA4C,CAAC;KACvF,MAAM,CAAC,6BAA6B,EAAE,iCAAiC,CAAC;KACxE,MAAM,CAAC,WAAW,EAAE,gDAAgD,CAAC;KACrE,MAAM,CACL,yBAAyB,EACzB,0EAA0E,CAC3E;KACA,MAAM,CAAC,KAAK;IACX,MAAM,IAAA,mBAAQ,EAAC,IAAI,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEL,gBAAgB;AAChB,OAAO;KACJ,OAAO,CAAC,eAAe,CAAC;KACxB,WAAW,CAAC,8DAA8D,CAAC;KAC3E,MAAM,CAAC,WAAW,EAAE,oCAAoC,CAAC;KACzD,MAAM,CAAC,yBAAyB,EAAE,gCAAgC,CAAC;KACnE,MAAM,CAAC,WAAW,EAAE,gDAAgD,CAAC;KACrE,MAAM,CAAC,KAAK;IACX,MAAM,IAAA,uBAAY,EAAC,IAAI,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEL,OAAO,CAAC,KAAK,EAAE,CAAC"} \ No newline at end of file +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;AACA,yCAAoC;AAEpC,yCAA0E;AAC1E,sEAA6C;AAE7C,MAAM,OAAO,GAAG,IAAI,mBAAO,EAAE,CAAC;AAE9B,eAAe;AACf,OAAO,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,sBAAW,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;AAE9E,gBAAgB;AAChB,OAAO;KACJ,OAAO,CAAC,eAAe,CAAC;KACxB,WAAW,CAAC,gDAAgD,CAAC;KAC7D,MAAM,CAAC,aAAa,EAAE,qBAAqB,CAAC;KAC5C,MAAM,CAAC,eAAe,EAAE,uBAAuB,CAAC;KAChD,MAAM,CAAC,WAAW,EAAE,uCAAuC,CAAC;KAC5D,MAAM,CAAC,WAAW,EAAE,oCAAoC,CAAC;KACzD,MAAM,CAAC,yBAAyB,EAAE,gCAAgC,CAAC;KACnE,MAAM,CAAC,sBAAsB,EAAE,oDAAoD,CAAC;KACpF,MAAM,CACL,sCAAsC,EACtC,mDAAmD,CACpD;KACA,MAAM,CAAC,WAAW,EAAE,gDAAgD,CAAC;KACrE,MAAM,CAAC,KAAK;IACX,MAAM,IAAA,uBAAY,EAAC,IAAI,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEL,YAAY;AACZ,OAAO;KACJ,OAAO,CAAC,WAAW,CAAC;KACpB,WAAW,CAAC,4CAA4C,CAAC;KACzD,MAAM,CAAC,aAAa,EAAE,2BAA2B,CAAC;KAClD,MAAM,CAAC,eAAe,EAAE,6BAA6B,CAAC;KACtD,MAAM,CAAC,WAAW,EAAE,oCAAoC,CAAC;KACzD,MAAM,CAAC,uBAAuB,EAAE,wBAAwB,CAAC;KACzD,MAAM,CAAC,iCAAiC,EAAE,4CAA4C,CAAC;KACvF,MAAM,CAAC,6BAA6B,EAAE,iCAAiC,CAAC;KACxE,MAAM,CAAC,WAAW,EAAE,gDAAgD,CAAC;KACrE,MAAM,CACL,yBAAyB,EACzB,0EAA0E,CAC3E;KACA,MAAM,CAAC,KAAK;IACX,MAAM,IAAA,mBAAQ,EAAC,IAAI,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEL,yEAAyE;AACzE,OAAO;KACJ,OAAO,CAAC,QAAQ,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;KACnC,WAAW,CAAC,mDAAmD,CAAC;KAChE,MAAM,CAAC,uBAAuB,EAAE,6CAA6C,CAAC;KAC9E,MAAM,CAAC,uBAAuB,EAAE,qDAAqD,CAAC;KACtF,MAAM,CAAC,WAAW,EAAE,oCAAoC,CAAC;KACzD,MAAM,CAAC,KAAK;IACX,MAAM,IAAA,iBAAM,EAAC,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC,CAAC;AAEL,gBAAgB;AAChB,OAAO;KACJ,OAAO,CAAC,eAAe,CAAC;KACxB,WAAW,CAAC,8DAA8D,CAAC;KAC3E,MAAM,CAAC,WAAW,EAAE,oCAAoC,CAAC;KACzD,MAAM,CAAC,yBAAyB,EAAE,gCAAgC,CAAC;KACnE,MAAM,CAAC,WAAW,EAAE,gDAAgD,CAAC;KACrE,MAAM,CAAC,KAAK;IACX,MAAM,IAAA,uBAAY,EAAC,IAAI,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEL,OAAO,CAAC,KAAK,EAAE,CAAC"} \ No newline at end of file diff --git a/packages/expo-brownfield/cli/build/utils/ios.d.ts b/packages/expo-brownfield/cli/build/utils/ios.d.ts index 63621757477d9b..56b8da5fafe9af 100644 --- a/packages/expo-brownfield/cli/build/utils/ios.d.ts +++ b/packages/expo-brownfield/cli/build/utils/ios.d.ts @@ -1,4 +1,17 @@ import type { IosConfig } from './types'; +/** + * Inspect the built brownfield framework binary and return the names of `@rpath`-linked + * dynamic frameworks that are NOT already covered by the fixed XCFramework set, the + * brownfield target itself, or precompiled-module enumeration. + * + * Source-built pods (e.g. `ExpoModulesJSI` from a local podspec) are produced as dynamic + * `.framework`s alongside the brownfield framework, and the brownfield binary holds an + * `@rpath/.framework/` reference to each. Without shipping these as standalone + * xcframeworks the host app crashes at runtime with `dyld: Library not loaded: @rpath/…`. + * + * Returns names without the `.framework` suffix, deduped, in `otool -L` order. + */ +export declare const enumerateSourceBuiltDeps: (config: IosConfig, alreadyCovered: Set) => string[]; export declare const cleanUpArtifacts: (config: IosConfig) => Promise; export declare const buildFramework: (config: IosConfig) => Promise; export declare const copyXCFrameworks: (config: IosConfig, dest: string) => Promise; diff --git a/packages/expo-brownfield/cli/build/utils/ios.js b/packages/expo-brownfield/cli/build/utils/ios.js index aabcd10c9b7381..24fd2540eeb5c9 100644 --- a/packages/expo-brownfield/cli/build/utils/ios.js +++ b/packages/expo-brownfield/cli/build/utils/ios.js @@ -3,8 +3,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.shipSwiftPackage = exports.shipFrameworks = exports.printIosConfig = exports.makeArtifactsDirectory = exports.binaryTarget = exports.libraryProduct = exports.getSupportedPlatforms = exports.generatePackageMetadataFile = exports.findWorkspace = exports.findScheme = exports.createXCframework = exports.createSwiftPackage = exports.copyXCFrameworks = exports.buildFramework = exports.cleanUpArtifacts = void 0; +exports.shipSwiftPackage = exports.shipFrameworks = exports.printIosConfig = exports.makeArtifactsDirectory = exports.binaryTarget = exports.libraryProduct = exports.getSupportedPlatforms = exports.generatePackageMetadataFile = exports.findWorkspace = exports.findScheme = exports.createXCframework = exports.createSwiftPackage = exports.copyXCFrameworks = exports.buildFramework = exports.cleanUpArtifacts = exports.enumerateSourceBuiltDeps = void 0; const chalk_1 = __importDefault(require("chalk")); +const node_child_process_1 = require("node:child_process"); const node_fs_1 = __importDefault(require("node:fs")); const node_path_1 = __importDefault(require("node:path")); const commands_1 = require("./commands"); @@ -12,6 +13,108 @@ const constants_1 = require("./constants"); const error_1 = __importDefault(require("./error")); const precompiled_1 = require("./precompiled"); const spinner_1 = require("./spinner"); +/** + * Inspect the built brownfield framework binary and return the names of `@rpath`-linked + * dynamic frameworks that are NOT already covered by the fixed XCFramework set, the + * brownfield target itself, or precompiled-module enumeration. + * + * Source-built pods (e.g. `ExpoModulesJSI` from a local podspec) are produced as dynamic + * `.framework`s alongside the brownfield framework, and the brownfield binary holds an + * `@rpath/.framework/` reference to each. Without shipping these as standalone + * xcframeworks the host app crashes at runtime with `dyld: Library not loaded: @rpath/…`. + * + * Returns names without the `.framework` suffix, deduped, in `otool -L` order. + */ +const enumerateSourceBuiltDeps = (config, alreadyCovered) => { + const frameworkBinary = node_path_1.default.join(config.simulator, `${config.scheme}.framework`, config.scheme); + if (!node_fs_1.default.existsSync(frameworkBinary)) { + return []; + } + let stdout; + try { + stdout = (0, node_child_process_1.execSync)(`otool -L "${frameworkBinary}"`, { encoding: 'utf8' }); + } + catch { + // otool failure is non-fatal — degrade gracefully and let the user catch the missing dep + // at runtime rather than blocking the whole build. + return []; + } + const names = new Set(); + for (const line of stdout.split('\n')) { + const match = line.trim().match(/^@rpath\/([^/]+)\.framework\//); + if (match?.[1]) { + names.add(match[1]); + } + } + return Array.from(names).filter((name) => name !== config.scheme && !alreadyCovered.has(name)); +}; +exports.enumerateSourceBuiltDeps = enumerateSourceBuiltDeps; +/** + * Build the `-framework ` (+ optional `-debug-symbols `) arg + * sequence for one slice of a `xcodebuild -create-xcframework` invocation. + * + * `-debug-symbols` is strict — pointing it at a non-existent path fails the + * whole create step — so we only attach the flag when the dSYM has actually + * been produced for that slice. dSYMs land at `.dSYM` next to the + * `.framework` in the products dir whenever + * `DEBUG_INFORMATION_FORMAT=dwarf-with-dsym` is in effect (forced for + * brownfield builds in `buildFramework`, but not guaranteed for transitive + * source-built deps that build under their own pod build settings). + */ +const xcframeworkSliceArgs = (frameworkPath) => { + const args = ['-framework', frameworkPath]; + const dsymPath = `${frameworkPath}.dSYM`; + if (node_fs_1.default.existsSync(dsymPath)) { + args.push('-debug-symbols', dsymPath); + } + return args; +}; +/** + * Locate a source-built `.framework` for `name` inside one of the brownfield build product + * slices. Pods that set `FRAMEWORK_SEARCH_PATHS` to `${PODS_CONFIGURATION_BUILD_DIR}/XCFrameworkIntermediates/` + * (e.g. `ExpoModulesJSI`) land in `XCFrameworkIntermediates//.framework` rather + * than at the slice root, so we check both locations. + */ +const findSourceBuiltFramework = (slicePath, name) => { + const candidates = [ + node_path_1.default.join(slicePath, `${name}.framework`), + node_path_1.default.join(slicePath, 'XCFrameworkIntermediates', name, `${name}.framework`), + ]; + return candidates.find((candidate) => node_fs_1.default.existsSync(candidate)) ?? null; +}; +/** + * Build an xcframework from the device + simulator slices of a source-built `.framework` + * sitting in the brownfield build products dir, and copy it into `dest`. Returns whether + * the xcframework was produced (false when one or both slices are missing — typically a + * harmless skip for a system framework or a transitive dep that isn't actually built). + */ +const bundleSourceBuiltFramework = async (config, name, dest) => { + const deviceFramework = findSourceBuiltFramework(config.device, name); + const simulatorFramework = findSourceBuiltFramework(config.simulator, name); + if (!deviceFramework || !simulatorFramework) { + console.warn(`expo-brownfield: source-built dependency '${name}' is linked by ${config.scheme}.framework ` + + `but its device/simulator slices were not found under the brownfield build products dir. ` + + `Skipping. The host app may fail at runtime with 'Library not loaded: @rpath/${name}.framework/${name}'.`); + return false; + } + const outputPath = node_path_1.default.join(dest, `${name}.xcframework`); + if (node_fs_1.default.existsSync(outputPath)) { + node_fs_1.default.rmSync(outputPath, { recursive: true, force: true }); + } + const args = [ + '-create-xcframework', + ...xcframeworkSliceArgs(deviceFramework), + ...xcframeworkSliceArgs(simulatorFramework), + '-output', + outputPath, + ]; + if (config.dryRun) { + console.log(`xcodebuild ${args.join(' ')}`); + return true; + } + await (0, commands_1.runCommand)('xcodebuild', args, { verbose: config.verbose }); + return true; +}; const cleanUpArtifacts = async (config) => { if (config.dryRun) { console.log('Cleaning up previous artifacts'); @@ -47,6 +150,11 @@ const buildFramework = async (config) => { 'generic/platform=iphonesimulator', '-configuration', config.buildConfiguration, + // Ensure dSYMs are produced for both Debug and Release so they can be + // bundled into the resulting xcframework via `-create-xcframework + // -debug-symbols`. Release defaults to `dwarf-with-dsym`; Debug defaults + // to plain `dwarf` and would otherwise leave us with no dSYM to ship. + 'DEBUG_INFORMATION_FORMAT=dwarf-with-dsym', ]; if (config.dryRun) { console.log(`xcodebuild ${args.join(' ')}`); @@ -114,8 +222,36 @@ const copyXCFrameworks = async (config, dest) => { }); } } + // Bundle any source-built dynamic frameworks the brownfield binary links against + // (e.g. `ExpoModulesJSI` from a local podspec). Without this the host app crashes at + // runtime with `dyld: Library not loaded: @rpath/.framework/`. + const alreadyCovered = collectCoveredFrameworkNames(config); + const sourceBuiltDeps = (0, exports.enumerateSourceBuiltDeps)(config, alreadyCovered); + for (const depName of sourceBuiltDeps) { + await (0, spinner_1.withSpinner)({ + operation: () => bundleSourceBuiltFramework(config, depName, dest), + loaderMessage: `Bundling source-built ${depName} as xcframework...`, + successMessage: `Bundling source-built ${depName} as xcframework succeeded`, + errorMessage: `Bundling source-built ${depName} as xcframework failed`, + verbose: config.verbose, + }); + } }; exports.copyXCFrameworks = copyXCFrameworks; +/** + * Set of xcframework names the brownfield CLI already plans to ship (fixed XCFrameworks + + * prebuilt modules when enabled). Used to dedupe against `enumerateSourceBuiltDeps` so a + * dep that's already covered by a prebuilt artifact isn't re-built from source. + */ +const collectCoveredFrameworkNames = (config) => { + const covered = new Set([config.scheme, ...(0, precompiled_1.resolvedFixedXCFrameworks)()]); + if (config.usePrebuilds) { + for (const module of (0, precompiled_1.enumerateAllPrebuildModules)(process.cwd(), config.buildConfiguration)) { + covered.add(module.name); + } + } + return covered; +}; const createSwiftPackage = async (config) => { if (config.dryRun && config.output !== 'frameworks') { console.log(`Creating Swift package with name: ${config.output.packageName} at path: ${config.artifacts}`); @@ -146,10 +282,8 @@ const createXCframework = async (config, at) => { const outputPath = node_path_1.default.join(at, frameworkName); const args = [ '-create-xcframework', - '-framework', - `${config.device}/${config.scheme}.framework`, - '-framework', - `${config.simulator}/${config.scheme}.framework`, + ...xcframeworkSliceArgs(`${config.device}/${config.scheme}.framework`), + ...xcframeworkSliceArgs(`${config.simulator}/${config.scheme}.framework`), '-output', outputPath, ]; @@ -236,7 +370,16 @@ const generatePackageMetadataFile = async (config, packagePath) => { targets: [name], })) : []; - const xcframeworks = [...baseFrameworks, ...precompiledModules]; + // Source-built dynamic deps the brownfield framework links against (e.g. ExpoModulesJSI). + // `copyXCFrameworks` writes their xcframeworks to disk; we need to declare matching + // `.binaryTarget`s here so SPM consumers actually link them. + const sourceBuiltDepNames = (0, exports.enumerateSourceBuiltDeps)(config, new Set([ + config.scheme, + ...baseFrameworks.map(({ name }) => name), + ...precompiledModules.map(({ name }) => name), + ])); + const sourceBuiltDeps = sourceBuiltDepNames.map((name) => ({ name, targets: [name] })); + const xcframeworks = [...baseFrameworks, ...precompiledModules, ...sourceBuiltDeps]; // With prebuilds the module graph is large; expose a single aggregate library so consumers // `import ` once and Xcode links every underlying binary target automatically. // Without prebuilds keep one `.library` per framework for backwards compatibility. diff --git a/packages/expo-brownfield/cli/build/utils/ios.js.map b/packages/expo-brownfield/cli/build/utils/ios.js.map index 317db241703dde..d115474d166d81 100644 --- a/packages/expo-brownfield/cli/build/utils/ios.js.map +++ b/packages/expo-brownfield/cli/build/utils/ios.js.map @@ -1 +1 @@ -{"version":3,"file":"ios.js","sourceRoot":"","sources":["../../src/utils/ios.ts"],"names":[],"mappings":";;;;;;AAAA,kDAA0B;AAC1B,sDAAyB;AACzB,0DAA6B;AAE7B,yCAAwC;AACxC,2CAA0C;AAC1C,oDAA+B;AAC/B,+CAAiF;AACjF,uCAAwC;AAGjC,MAAM,gBAAgB,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAE;IAC1D,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAC;QAC9C,OAAO;IACT,CAAC;IAED,OAAO,IAAA,qBAAW,EAAC;QACjB,SAAS,EAAE,KAAK,IAAI,EAAE;YACpB,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;gBACrC,OAAO;YACT,CAAC;YAED,iBAAE,CAAC,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE;gBAChD,MAAM,QAAQ,GAAG,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,EAAE,CAAC;gBAC/C,iBAAE,CAAC,MAAM,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;YACxD,CAAC,CAAC,CAAC;QACL,CAAC;QACD,aAAa,EAAE,mCAAmC;QAClD,cAAc,EAAE,0CAA0C;QAC1D,YAAY,EAAE,uCAAuC;KACtD,CAAC,CAAC;AACL,CAAC,CAAC;AArBW,QAAA,gBAAgB,oBAqB3B;AAEK,MAAM,cAAc,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAE;IACxD,MAAM,IAAI,GAAG;QACX,YAAY;QACZ,MAAM,CAAC,SAAS;QAChB,SAAS;QACT,MAAM,CAAC,MAAM;QACb,kBAAkB;QAClB,MAAM,CAAC,eAAe;QACtB,cAAc;QACd,2BAA2B;QAC3B,cAAc;QACd,kCAAkC;QAClC,gBAAgB;QAChB,MAAM,CAAC,kBAAkB;KAC1B,CAAC;IAEF,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QAC5C,OAAO;IACT,CAAC;IAED,OAAO,IAAA,qBAAW,EAAC;QACjB,SAAS,EAAE,GAAG,EAAE,CAAC,IAAA,qBAAU,EAAC,YAAY,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC;QAC5E,aAAa,EAAE,wBAAwB;QACvC,cAAc,EAAE,+BAA+B;QAC/C,YAAY,EAAE,4BAA4B;QAC1C,OAAO,EAAE,MAAM,CAAC,OAAO;KACxB,CAAC,CAAC;AACL,CAAC,CAAC;AA5BW,QAAA,cAAc,kBA4BzB;AAEK,MAAM,gBAAgB,GAAG,KAAK,EAAE,MAAiB,EAAE,IAAY,EAAE,EAAE;IACxE,OAAO,CAAC,GAAG,CAAC,0BAA0B,EAAE,IAAI,CAAC,CAAC;IAE9C,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO;IACT,CAAC;IAED,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,uBAAW,CAAC,CAAC;IAChD,KAAK,MAAM,WAAW,IAAI,YAAY,EAAE,CAAC;QACvC,IAAI,iBAAE,CAAC,UAAU,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC;YACpC,MAAM,IAAA,qBAAW,EAAC;gBAChB,SAAS,EAAE,KAAK,IAAI,EAAE,CACpB,iBAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,WAAW,CAAC,IAAI,EAAE,mBAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,WAAW,CAAC,IAAI,cAAc,CAAC,EAAE;oBACnF,KAAK,EAAE,IAAI;oBACX,SAAS,EAAE,IAAI;iBAChB,CAAC;gBACJ,aAAa,EAAE,WAAW,WAAW,CAAC,IAAI,gCAAgC;gBAC1E,cAAc,EAAE,WAAW,WAAW,CAAC,IAAI,uCAAuC;gBAClF,YAAY,EAAE,WAAW,WAAW,CAAC,IAAI,oCAAoC;gBAC7E,OAAO,EAAE,MAAM,CAAC,OAAO;aACxB,CAAC,CAAC;QACL,CAAC;aAAM,IAAI,WAAW,CAAC,IAAI,KAAK,uBAAW,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;YACxD,eAAQ,CAAC,MAAM,CAAC,gCAAgC,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;QACtE,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,IAAI,CACV,GAAG,WAAW,CAAC,IAAI,8BAA8B,WAAW,CAAC,IAAI,oCAAoC,CACtG,CAAC;QACJ,CAAC;IACH,CAAC;IAED,IAAI,MAAM,CAAC,YAAY,EAAE,CAAC;QACxB,kFAAkF;QAClF,sFAAsF;QACtF,uFAAuF;QACvF,MAAM,OAAO,GAAG,IAAA,yCAA2B,EAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,kBAAkB,CAAC,CAAC;QAEtF,oFAAoF;QACpF,yFAAyF;QACzF,4FAA4F;QAC5F,2DAA2D;QAC3D,MAAM,cAAc,GAAG,IAAI,GAAG,EAAU,CAAC;QACzC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;YAC7B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC;gBACvC,cAAc,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;gBAClC,MAAM,IAAA,iCAAmB,EAAC,MAAM,EAAE,MAAM,CAAC,kBAAkB,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;YAC5F,CAAC;YACD,MAAM,IAAA,qBAAW,EAAC;gBAChB,SAAS,EAAE,KAAK,IAAI,EAAE,CACpB,iBAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,eAAe,EAAE,mBAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,MAAM,CAAC,IAAI,cAAc,CAAC,EAAE;oBACpF,KAAK,EAAE,IAAI;oBACX,SAAS,EAAE,IAAI;iBAChB,CAAC;gBACJ,aAAa,EAAE,WAAW,MAAM,CAAC,IAAI,gCAAgC;gBACrE,cAAc,EAAE,WAAW,MAAM,CAAC,IAAI,uCAAuC;gBAC7E,YAAY,EAAE,WAAW,MAAM,CAAC,IAAI,oCAAoC;gBACxE,OAAO,EAAE,MAAM,CAAC,OAAO;aACxB,CAAC,CAAC;QACL,CAAC;IACH,CAAC;AACH,CAAC,CAAC;AA3DW,QAAA,gBAAgB,oBA2D3B;AAEK,MAAM,kBAAkB,GAAG,KAAK,EAAE,MAAiB,EAAmB,EAAE;IAC7E,IAAI,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC;QACpD,OAAO,CAAC,GAAG,CACT,qCAAqC,MAAM,CAAC,MAAM,CAAC,WAAW,aAAa,MAAM,CAAC,SAAS,EAAE,CAC9F,CAAC;QACF,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,OAAO,MAAM,IAAA,qBAAW,EAAC;QACvB,SAAS,EAAE,KAAK,IAAI,EAAE;YACpB,IAAI,MAAM,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC;gBACnC,OAAO,EAAE,CAAC;YACZ,CAAC;YAED,MAAM,WAAW,GAAG,mBAAI,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;YAC3E,MAAM,iBAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,WAAW,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAE1D,8BAA8B;YAC9B,MAAM,eAAe,GAAG,mBAAI,CAAC,IAAI,CAAC,WAAW,EAAE,cAAc,CAAC,CAAC;YAC/D,MAAM,iBAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,eAAe,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAE9D,MAAM,IAAA,mCAA2B,EAAC,MAAM,EAAE,WAAW,CAAC,CAAC;YAEvD,OAAO,WAAW,CAAC;QACrB,CAAC;QACD,aAAa,EAAE,2BAA2B;QAC1C,cAAc,EAAE,kCAAkC;QAClD,YAAY,EAAE,+BAA+B;QAC7C,OAAO,EAAE,MAAM,CAAC,OAAO;KACxB,CAAC,CAAC;AACL,CAAC,CAAC;AA9BW,QAAA,kBAAkB,sBA8B7B;AAEK,MAAM,iBAAiB,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAU,EAAE,EAAE;IACvE,MAAM,aAAa,GAAG,GAAG,MAAM,CAAC,MAAM,cAAc,CAAC;IACrD,MAAM,UAAU,GAAG,mBAAI,CAAC,IAAI,CAAC,EAAE,EAAE,aAAa,CAAC,CAAC;IAEhD,MAAM,IAAI,GAAG;QACX,qBAAqB;QACrB,YAAY;QACZ,GAAG,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,YAAY;QAC7C,YAAY;QACZ,GAAG,MAAM,CAAC,SAAS,IAAI,MAAM,CAAC,MAAM,YAAY;QAChD,SAAS;QACT,UAAU;KACX,CAAC;IAEF,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QAC5C,OAAO;IACT,CAAC;IAED,OAAO,IAAA,qBAAW,EAAC;QACjB,SAAS,EAAE,GAAG,EAAE,CAAC,IAAA,qBAAU,EAAC,YAAY,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC;QAC5E,aAAa,EAAE,4CAA4C;QAC3D,cAAc,EAAE,mDAAmD;QACnE,YAAY,EAAE,gDAAgD;QAC9D,OAAO,EAAE,MAAM,CAAC,OAAO;KACxB,CAAC,CAAC;AACL,CAAC,CAAC;AA1BW,QAAA,iBAAiB,qBA0B5B;AAEK,MAAM,UAAU,GAAG,GAAuB,EAAE;IACjD,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,mBAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,KAAK,CAAC,CAAC;QAChD,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;YAC5B,eAAQ,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC;QAC7C,CAAC;QAED,MAAM,cAAc,GAAG,iBAAE;aACtB,WAAW,CAAC,OAAO,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC;aAC7C,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QACxC,MAAM,MAAM,GAAG,cAAc,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE;YAC/C,MAAM,aAAa,GAAG,mBAAI,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC;YACzD,MAAM,KAAK,GAAG,iBAAE,CAAC,WAAW,CAAC,aAAa,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YACjE,OAAO,KAAK,CAAC,IAAI,CACf,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,CAAC,8BAA8B,CAAC,CACpF,CAAC;QACJ,CAAC,CAAC,CAAC;QAEH,IAAI,MAAM,EAAE,CAAC;YACX,OAAO,MAAM,CAAC,IAAI,CAAC;QACrB,CAAC;QAED,eAAQ,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,eAAQ,CAAC,MAAM,CAAC,6BAA6B,EAAE,YAAY,CAAC,CAAC;IAC/D,CAAC;IAED,OAAO;AACT,CAAC,CAAC;AA7BW,QAAA,UAAU,cA6BrB;AAEK,MAAM,aAAa,GAAG,CAAC,MAAe,EAAsB,EAAE;IACnE,mDAAmD;IACnD,8BAA8B;IAC9B,IAAI,MAAM,EAAE,CAAC;QACX,OAAO,mBAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,mCAAmC,CAAC,CAAC;IACvE,CAAC;IAED,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,mBAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,KAAK,CAAC,CAAC;QAChD,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;YAC5B,eAAQ,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC;QAC7C,CAAC;QAED,MAAM,KAAK,GAAG,iBAAE,CAAC,WAAW,CAAC,OAAO,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;QAC/D,MAAM,SAAS,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC;QAC3E,IAAI,SAAS,EAAE,CAAC;YACd,OAAO,mBAAI,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC;QAC5C,CAAC;QAED,eAAQ,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC;IAC7C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,eAAQ,CAAC,MAAM,CAAC,6BAA6B,EAAE,YAAY,CAAC,CAAC;IAC/D,CAAC;IAED,OAAO;AACT,CAAC,CAAC;AA1BW,QAAA,aAAa,iBA0BxB;AAEK,MAAM,2BAA2B,GAAG,KAAK,EAAE,MAAiB,EAAE,WAAmB,EAAE,EAAE;IAC1F,IAAI,MAAM,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC;QACnC,OAAO;IACT,CAAC;IAED,MAAM,kBAAkB,GAAG,iBAAE,CAAC,UAAU,CAAC,uBAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IACjE,MAAM,cAAc,GAAG;QACrB,EAAE,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE;QACjD,EAAE,IAAI,EAAE,UAAU,EAAE,OAAO,EAAE,CAAC,UAAU,CAAC,EAAE;QAC3C,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,uBAAW,CAAC,KAAK,EAAE,uBAAW,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;KAClF,CAAC;IAEF,uFAAuF;IACvF,8FAA8F;IAC9F,4FAA4F;IAC5F,MAAM,kBAAkB,GAAG,MAAM,CAAC,YAAY;QAC5C,CAAC,CAAC,IAAA,yCAA2B,EAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,kBAAkB,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;YACvF,IAAI;YACJ,OAAO,EAAE,CAAC,IAAI,CAAC;SAChB,CAAC,CAAC;QACL,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,YAAY,GAAG,CAAC,GAAG,cAAc,EAAE,GAAG,kBAAkB,CAAC,CAAC;IAEhE,2FAA2F;IAC3F,4FAA4F;IAC5F,mFAAmF;IACnF,MAAM,QAAQ,GAAG,MAAM,CAAC,YAAY;QAClC,CAAC,CAAC;YACE,IAAA,sBAAc,EACZ,MAAM,CAAC,MAAM,CAAC,WAAW,EACzB,YAAY,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC,CACrC;SACF;QACH,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC,IAAA,sBAAc,EAAC,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;IAE3E,MAAM,QAAQ,GAAG;;;;aAIN,MAAM,CAAC,MAAM,CAAC,WAAW;kBACpB,CAAC,MAAM,IAAA,6BAAqB,EAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;;EAE/D,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;;;EAGnB,YAAY,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,IAAA,oBAAY,EAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;;;CAG9D,CAAC;IAEA,MAAM,iBAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,mBAAI,CAAC,IAAI,CAAC,WAAW,EAAE,eAAe,CAAC,EAAE,QAAQ,CAAC,CAAC;AACjF,CAAC,CAAC;AApDW,QAAA,2BAA2B,+BAoDtC;AAEK,MAAM,qBAAqB,GAAG,KAAK,EAAE,MAAiB,EAAqB,EAAE;IAClF,6DAA6D;IAC7D,MAAM,IAAI,GAAG,CAAC,YAAY,EAAE,MAAM,CAAC,SAAS,EAAE,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,oBAAoB,CAAC,CAAC;IAE9F,IAAI,CAAC;QACH,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAA,qBAAU,EAAC,YAAY,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC;QAC5E,MAAM,KAAK,GAAG,yCAAyC,CAAC;QACxD,MAAM,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC;QAC9C,IAAI,KAAK,EAAE,CAAC;YACV,OAAO,CAAC,SAAS,KAAK,IAAI,CAAC,CAAC;QAC9B,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,EAAE,CAAC;QACpB,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,OAAO,CAAC,IAAI,CACV,sFAAsF,CACvF,CAAC;IACJ,CAAC;IAED,wCAAwC;IACxC,OAAO,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC,CAAC;AArBW,QAAA,qBAAqB,yBAqBhC;AAEK,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,OAAiB,EAAE,EAAE;IAChE,OAAO;iBACQ,IAAI;qBACA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;SAChC,CAAC;AACV,CAAC,CAAC;AALW,QAAA,cAAc,kBAKzB;AAEK,MAAM,YAAY,GAAG,CAAC,IAAY,EAAE,EAAE;IAC3C,OAAO;iBACQ,IAAI;8BACS,IAAI;SACzB,CAAC;AACV,CAAC,CAAC;AALW,QAAA,YAAY,gBAKvB;AAEK,MAAM,sBAAsB,GAAG,CAAC,MAAiB,EAAE,EAAE;IAC1D,IAAI,CAAC;QACH,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;YACrC,iBAAE,CAAC,SAAS,CAAC,MAAM,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACtD,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,eAAQ,CAAC,MAAM,CAAC,uCAAuC,EAAE,YAAY,CAAC,CAAC;IACzE,CAAC;AACH,CAAC,CAAC;AATW,QAAA,sBAAsB,0BASjC;AAEK,MAAM,cAAc,GAAG,CAAC,MAAiB,EAAE,EAAE;IAClD,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,2BAA2B,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC,EAAE,CAAC,CAAC;IAChF,OAAO,CAAC,GAAG,CAAC,cAAc,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;IACvD,OAAO,CAAC,GAAG,CAAC,iBAAiB,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IAC7D,OAAO,CAAC,GAAG,CAAC,eAAe,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,eAAe,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IACzD,OAAO,CAAC,GAAG,CAAC,sBAAsB,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IAElE,IAAI,MAAM,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC;IAC3E,CAAC;IACD,OAAO,CAAC,GAAG,CAAC,kCAAkC,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC;IAEjF,OAAO,CAAC,GAAG,EAAE,CAAC;AAChB,CAAC,CAAC;AAfW,QAAA,cAAc,kBAezB;AAEK,MAAM,cAAc,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAE;IACxD,6BAA6B;IAC7B,MAAM,IAAA,wBAAgB,EAAC,MAAM,CAAC,CAAC;IAC/B,IAAA,8BAAsB,EAAC,MAAM,CAAC,CAAC;IAE/B,4CAA4C;IAC5C,MAAM,IAAA,yBAAiB,EAAC,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;IAClD,MAAM,IAAA,wBAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;AACnD,CAAC,CAAC;AARW,QAAA,cAAc,kBAQzB;AAEK,MAAM,gBAAgB,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAE;IAC1D,+CAA+C;IAC/C,MAAM,IAAA,wBAAgB,EAAC,MAAM,CAAC,CAAC;IAC/B,IAAA,8BAAsB,EAAC,MAAM,CAAC,CAAC;IAC/B,MAAM,WAAW,GAAG,MAAM,IAAA,0BAAkB,EAAC,MAAM,CAAC,CAAC;IACrD,MAAM,gBAAgB,GAAG,mBAAI,CAAC,IAAI,CAAC,WAAW,EAAE,cAAc,CAAC,CAAC;IAEhE,4CAA4C;IAC5C,MAAM,IAAA,yBAAiB,EAAC,MAAM,EAAE,gBAAgB,CAAC,CAAC;IAClD,MAAM,IAAA,wBAAgB,EAAC,MAAM,EAAE,gBAAgB,CAAC,CAAC;AACnD,CAAC,CAAC;AAVW,QAAA,gBAAgB,oBAU3B"} \ No newline at end of file +{"version":3,"file":"ios.js","sourceRoot":"","sources":["../../src/utils/ios.ts"],"names":[],"mappings":";;;;;;AAAA,kDAA0B;AAC1B,2DAA8C;AAC9C,sDAAyB;AACzB,0DAA6B;AAE7B,yCAAwC;AACxC,2CAA0C;AAC1C,oDAA+B;AAC/B,+CAIuB;AACvB,uCAAwC;AAGxC;;;;;;;;;;;GAWG;AACI,MAAM,wBAAwB,GAAG,CACtC,MAAiB,EACjB,cAA2B,EACjB,EAAE;IACZ,MAAM,eAAe,GAAG,mBAAI,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,GAAG,MAAM,CAAC,MAAM,YAAY,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;IACjG,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC;QACpC,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,IAAI,MAAc,CAAC;IACnB,IAAI,CAAC;QACH,MAAM,GAAG,IAAA,6BAAQ,EAAC,aAAa,eAAe,GAAG,EAAE,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC,CAAC;IAC3E,CAAC;IAAC,MAAM,CAAC;QACP,yFAAyF;QACzF,mDAAmD;QACnD,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,MAAM,KAAK,GAAG,IAAI,GAAG,EAAU,CAAC;IAChC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;QACtC,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,+BAA+B,CAAC,CAAC;QACjE,IAAI,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;YACf,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACtB,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,KAAK,MAAM,CAAC,MAAM,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACjG,CAAC,CAAC;AA1BW,QAAA,wBAAwB,4BA0BnC;AAEF;;;;;;;;;;;GAWG;AACH,MAAM,oBAAoB,GAAG,CAAC,aAAqB,EAAY,EAAE;IAC/D,MAAM,IAAI,GAAG,CAAC,YAAY,EAAE,aAAa,CAAC,CAAC;IAC3C,MAAM,QAAQ,GAAG,GAAG,aAAa,OAAO,CAAC;IACzC,IAAI,iBAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC5B,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,QAAQ,CAAC,CAAC;IACxC,CAAC;IACD,OAAO,IAAI,CAAC;AACd,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,wBAAwB,GAAG,CAAC,SAAiB,EAAE,IAAY,EAAiB,EAAE;IAClF,MAAM,UAAU,GAAG;QACjB,mBAAI,CAAC,IAAI,CAAC,SAAS,EAAE,GAAG,IAAI,YAAY,CAAC;QACzC,mBAAI,CAAC,IAAI,CAAC,SAAS,EAAE,0BAA0B,EAAE,IAAI,EAAE,GAAG,IAAI,YAAY,CAAC;KAC5E,CAAC;IACF,OAAO,UAAU,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,iBAAE,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,IAAI,IAAI,CAAC;AAC1E,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,0BAA0B,GAAG,KAAK,EACtC,MAAiB,EACjB,IAAY,EACZ,IAAY,EACM,EAAE;IACpB,MAAM,eAAe,GAAG,wBAAwB,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;IACtE,MAAM,kBAAkB,GAAG,wBAAwB,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;IAE5E,IAAI,CAAC,eAAe,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAC5C,OAAO,CAAC,IAAI,CACV,6CAA6C,IAAI,kBAAkB,MAAM,CAAC,MAAM,aAAa;YAC3F,0FAA0F;YAC1F,+EAA+E,IAAI,cAAc,IAAI,IAAI,CAC5G,CAAC;QACF,OAAO,KAAK,CAAC;IACf,CAAC;IAED,MAAM,UAAU,GAAG,mBAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,IAAI,cAAc,CAAC,CAAC;IAC1D,IAAI,iBAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAC9B,iBAAE,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;IAC1D,CAAC;IAED,MAAM,IAAI,GAAG;QACX,qBAAqB;QACrB,GAAG,oBAAoB,CAAC,eAAe,CAAC;QACxC,GAAG,oBAAoB,CAAC,kBAAkB,CAAC;QAC3C,SAAS;QACT,UAAU;KACX,CAAC;IAEF,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QAC5C,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,IAAA,qBAAU,EAAC,YAAY,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;IAClE,OAAO,IAAI,CAAC;AACd,CAAC,CAAC;AAEK,MAAM,gBAAgB,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAE;IAC1D,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAC;QAC9C,OAAO;IACT,CAAC;IAED,OAAO,IAAA,qBAAW,EAAC;QACjB,SAAS,EAAE,KAAK,IAAI,EAAE;YACpB,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;gBACrC,OAAO;YACT,CAAC;YAED,iBAAE,CAAC,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE;gBAChD,MAAM,QAAQ,GAAG,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,EAAE,CAAC;gBAC/C,iBAAE,CAAC,MAAM,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;YACxD,CAAC,CAAC,CAAC;QACL,CAAC;QACD,aAAa,EAAE,mCAAmC;QAClD,cAAc,EAAE,0CAA0C;QAC1D,YAAY,EAAE,uCAAuC;KACtD,CAAC,CAAC;AACL,CAAC,CAAC;AArBW,QAAA,gBAAgB,oBAqB3B;AAEK,MAAM,cAAc,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAE;IACxD,MAAM,IAAI,GAAG;QACX,YAAY;QACZ,MAAM,CAAC,SAAS;QAChB,SAAS;QACT,MAAM,CAAC,MAAM;QACb,kBAAkB;QAClB,MAAM,CAAC,eAAe;QACtB,cAAc;QACd,2BAA2B;QAC3B,cAAc;QACd,kCAAkC;QAClC,gBAAgB;QAChB,MAAM,CAAC,kBAAkB;QACzB,sEAAsE;QACtE,kEAAkE;QAClE,yEAAyE;QACzE,sEAAsE;QACtE,0CAA0C;KAC3C,CAAC;IAEF,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QAC5C,OAAO;IACT,CAAC;IAED,OAAO,IAAA,qBAAW,EAAC;QACjB,SAAS,EAAE,GAAG,EAAE,CAAC,IAAA,qBAAU,EAAC,YAAY,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC;QAC5E,aAAa,EAAE,wBAAwB;QACvC,cAAc,EAAE,+BAA+B;QAC/C,YAAY,EAAE,4BAA4B;QAC1C,OAAO,EAAE,MAAM,CAAC,OAAO;KACxB,CAAC,CAAC;AACL,CAAC,CAAC;AAjCW,QAAA,cAAc,kBAiCzB;AAEK,MAAM,gBAAgB,GAAG,KAAK,EAAE,MAAiB,EAAE,IAAY,EAAE,EAAE;IACxE,OAAO,CAAC,GAAG,CAAC,0BAA0B,EAAE,IAAI,CAAC,CAAC;IAE9C,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO;IACT,CAAC;IAED,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,uBAAW,CAAC,CAAC;IAChD,KAAK,MAAM,WAAW,IAAI,YAAY,EAAE,CAAC;QACvC,IAAI,iBAAE,CAAC,UAAU,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC;YACpC,MAAM,IAAA,qBAAW,EAAC;gBAChB,SAAS,EAAE,KAAK,IAAI,EAAE,CACpB,iBAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,WAAW,CAAC,IAAI,EAAE,mBAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,WAAW,CAAC,IAAI,cAAc,CAAC,EAAE;oBACnF,KAAK,EAAE,IAAI;oBACX,SAAS,EAAE,IAAI;iBAChB,CAAC;gBACJ,aAAa,EAAE,WAAW,WAAW,CAAC,IAAI,gCAAgC;gBAC1E,cAAc,EAAE,WAAW,WAAW,CAAC,IAAI,uCAAuC;gBAClF,YAAY,EAAE,WAAW,WAAW,CAAC,IAAI,oCAAoC;gBAC7E,OAAO,EAAE,MAAM,CAAC,OAAO;aACxB,CAAC,CAAC;QACL,CAAC;aAAM,IAAI,WAAW,CAAC,IAAI,KAAK,uBAAW,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;YACxD,eAAQ,CAAC,MAAM,CAAC,gCAAgC,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;QACtE,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,IAAI,CACV,GAAG,WAAW,CAAC,IAAI,8BAA8B,WAAW,CAAC,IAAI,oCAAoC,CACtG,CAAC;QACJ,CAAC;IACH,CAAC;IAED,IAAI,MAAM,CAAC,YAAY,EAAE,CAAC;QACxB,kFAAkF;QAClF,sFAAsF;QACtF,uFAAuF;QACvF,MAAM,OAAO,GAAG,IAAA,yCAA2B,EAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,kBAAkB,CAAC,CAAC;QAEtF,oFAAoF;QACpF,yFAAyF;QACzF,4FAA4F;QAC5F,2DAA2D;QAC3D,MAAM,cAAc,GAAG,IAAI,GAAG,EAAU,CAAC;QACzC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;YAC7B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC;gBACvC,cAAc,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;gBAClC,MAAM,IAAA,iCAAmB,EAAC,MAAM,EAAE,MAAM,CAAC,kBAAkB,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;YAC5F,CAAC;YACD,MAAM,IAAA,qBAAW,EAAC;gBAChB,SAAS,EAAE,KAAK,IAAI,EAAE,CACpB,iBAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,eAAe,EAAE,mBAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,MAAM,CAAC,IAAI,cAAc,CAAC,EAAE;oBACpF,KAAK,EAAE,IAAI;oBACX,SAAS,EAAE,IAAI;iBAChB,CAAC;gBACJ,aAAa,EAAE,WAAW,MAAM,CAAC,IAAI,gCAAgC;gBACrE,cAAc,EAAE,WAAW,MAAM,CAAC,IAAI,uCAAuC;gBAC7E,YAAY,EAAE,WAAW,MAAM,CAAC,IAAI,oCAAoC;gBACxE,OAAO,EAAE,MAAM,CAAC,OAAO;aACxB,CAAC,CAAC;QACL,CAAC;IACH,CAAC;IAED,iFAAiF;IACjF,qFAAqF;IACrF,qEAAqE;IACrE,MAAM,cAAc,GAAG,4BAA4B,CAAC,MAAM,CAAC,CAAC;IAC5D,MAAM,eAAe,GAAG,IAAA,gCAAwB,EAAC,MAAM,EAAE,cAAc,CAAC,CAAC;IACzE,KAAK,MAAM,OAAO,IAAI,eAAe,EAAE,CAAC;QACtC,MAAM,IAAA,qBAAW,EAAC;YAChB,SAAS,EAAE,GAAG,EAAE,CAAC,0BAA0B,CAAC,MAAM,EAAE,OAAO,EAAE,IAAI,CAAC;YAClE,aAAa,EAAE,yBAAyB,OAAO,oBAAoB;YACnE,cAAc,EAAE,yBAAyB,OAAO,2BAA2B;YAC3E,YAAY,EAAE,yBAAyB,OAAO,wBAAwB;YACtE,OAAO,EAAE,MAAM,CAAC,OAAO;SACxB,CAAC,CAAC;IACL,CAAC;AACH,CAAC,CAAC;AA1EW,QAAA,gBAAgB,oBA0E3B;AAEF;;;;GAIG;AACH,MAAM,4BAA4B,GAAG,CAAC,MAAiB,EAAe,EAAE;IACtE,MAAM,OAAO,GAAG,IAAI,GAAG,CAAS,CAAC,MAAM,CAAC,MAAM,EAAE,GAAG,IAAA,uCAAyB,GAAE,CAAC,CAAC,CAAC;IACjF,IAAI,MAAM,CAAC,YAAY,EAAE,CAAC;QACxB,KAAK,MAAM,MAAM,IAAI,IAAA,yCAA2B,EAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,kBAAkB,CAAC,EAAE,CAAC;YAC3F,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC3B,CAAC;IACH,CAAC;IACD,OAAO,OAAO,CAAC;AACjB,CAAC,CAAC;AAEK,MAAM,kBAAkB,GAAG,KAAK,EAAE,MAAiB,EAAmB,EAAE;IAC7E,IAAI,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC;QACpD,OAAO,CAAC,GAAG,CACT,qCAAqC,MAAM,CAAC,MAAM,CAAC,WAAW,aAAa,MAAM,CAAC,SAAS,EAAE,CAC9F,CAAC;QACF,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,OAAO,MAAM,IAAA,qBAAW,EAAC;QACvB,SAAS,EAAE,KAAK,IAAI,EAAE;YACpB,IAAI,MAAM,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC;gBACnC,OAAO,EAAE,CAAC;YACZ,CAAC;YAED,MAAM,WAAW,GAAG,mBAAI,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;YAC3E,MAAM,iBAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,WAAW,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAE1D,8BAA8B;YAC9B,MAAM,eAAe,GAAG,mBAAI,CAAC,IAAI,CAAC,WAAW,EAAE,cAAc,CAAC,CAAC;YAC/D,MAAM,iBAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,eAAe,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAE9D,MAAM,IAAA,mCAA2B,EAAC,MAAM,EAAE,WAAW,CAAC,CAAC;YAEvD,OAAO,WAAW,CAAC;QACrB,CAAC;QACD,aAAa,EAAE,2BAA2B;QAC1C,cAAc,EAAE,kCAAkC;QAClD,YAAY,EAAE,+BAA+B;QAC7C,OAAO,EAAE,MAAM,CAAC,OAAO;KACxB,CAAC,CAAC;AACL,CAAC,CAAC;AA9BW,QAAA,kBAAkB,sBA8B7B;AAEK,MAAM,iBAAiB,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAU,EAAE,EAAE;IACvE,MAAM,aAAa,GAAG,GAAG,MAAM,CAAC,MAAM,cAAc,CAAC;IACrD,MAAM,UAAU,GAAG,mBAAI,CAAC,IAAI,CAAC,EAAE,EAAE,aAAa,CAAC,CAAC;IAEhD,MAAM,IAAI,GAAG;QACX,qBAAqB;QACrB,GAAG,oBAAoB,CAAC,GAAG,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,YAAY,CAAC;QACtE,GAAG,oBAAoB,CAAC,GAAG,MAAM,CAAC,SAAS,IAAI,MAAM,CAAC,MAAM,YAAY,CAAC;QACzE,SAAS;QACT,UAAU;KACX,CAAC;IAEF,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QAC5C,OAAO;IACT,CAAC;IAED,OAAO,IAAA,qBAAW,EAAC;QACjB,SAAS,EAAE,GAAG,EAAE,CAAC,IAAA,qBAAU,EAAC,YAAY,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC;QAC5E,aAAa,EAAE,4CAA4C;QAC3D,cAAc,EAAE,mDAAmD;QACnE,YAAY,EAAE,gDAAgD;QAC9D,OAAO,EAAE,MAAM,CAAC,OAAO;KACxB,CAAC,CAAC;AACL,CAAC,CAAC;AAxBW,QAAA,iBAAiB,qBAwB5B;AAEK,MAAM,UAAU,GAAG,GAAuB,EAAE;IACjD,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,mBAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,KAAK,CAAC,CAAC;QAChD,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;YAC5B,eAAQ,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC;QAC7C,CAAC;QAED,MAAM,cAAc,GAAG,iBAAE;aACtB,WAAW,CAAC,OAAO,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC;aAC7C,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QACxC,MAAM,MAAM,GAAG,cAAc,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE;YAC/C,MAAM,aAAa,GAAG,mBAAI,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC;YACzD,MAAM,KAAK,GAAG,iBAAE,CAAC,WAAW,CAAC,aAAa,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YACjE,OAAO,KAAK,CAAC,IAAI,CACf,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,CAAC,8BAA8B,CAAC,CACpF,CAAC;QACJ,CAAC,CAAC,CAAC;QAEH,IAAI,MAAM,EAAE,CAAC;YACX,OAAO,MAAM,CAAC,IAAI,CAAC;QACrB,CAAC;QAED,eAAQ,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,eAAQ,CAAC,MAAM,CAAC,6BAA6B,EAAE,YAAY,CAAC,CAAC;IAC/D,CAAC;IAED,OAAO;AACT,CAAC,CAAC;AA7BW,QAAA,UAAU,cA6BrB;AAEK,MAAM,aAAa,GAAG,CAAC,MAAe,EAAsB,EAAE;IACnE,mDAAmD;IACnD,8BAA8B;IAC9B,IAAI,MAAM,EAAE,CAAC;QACX,OAAO,mBAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,mCAAmC,CAAC,CAAC;IACvE,CAAC;IAED,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,mBAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,KAAK,CAAC,CAAC;QAChD,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;YAC5B,eAAQ,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC;QAC7C,CAAC;QAED,MAAM,KAAK,GAAG,iBAAE,CAAC,WAAW,CAAC,OAAO,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;QAC/D,MAAM,SAAS,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC;QAC3E,IAAI,SAAS,EAAE,CAAC;YACd,OAAO,mBAAI,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC;QAC5C,CAAC;QAED,eAAQ,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC;IAC7C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,eAAQ,CAAC,MAAM,CAAC,6BAA6B,EAAE,YAAY,CAAC,CAAC;IAC/D,CAAC;IAED,OAAO;AACT,CAAC,CAAC;AA1BW,QAAA,aAAa,iBA0BxB;AAEK,MAAM,2BAA2B,GAAG,KAAK,EAAE,MAAiB,EAAE,WAAmB,EAAE,EAAE;IAC1F,IAAI,MAAM,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC;QACnC,OAAO;IACT,CAAC;IAED,MAAM,kBAAkB,GAAG,iBAAE,CAAC,UAAU,CAAC,uBAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IACjE,MAAM,cAAc,GAAG;QACrB,EAAE,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE;QACjD,EAAE,IAAI,EAAE,UAAU,EAAE,OAAO,EAAE,CAAC,UAAU,CAAC,EAAE;QAC3C,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,uBAAW,CAAC,KAAK,EAAE,uBAAW,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;KAClF,CAAC;IAEF,uFAAuF;IACvF,8FAA8F;IAC9F,4FAA4F;IAC5F,MAAM,kBAAkB,GAAG,MAAM,CAAC,YAAY;QAC5C,CAAC,CAAC,IAAA,yCAA2B,EAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,kBAAkB,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;YACvF,IAAI;YACJ,OAAO,EAAE,CAAC,IAAI,CAAC;SAChB,CAAC,CAAC;QACL,CAAC,CAAC,EAAE,CAAC;IAEP,0FAA0F;IAC1F,oFAAoF;IACpF,6DAA6D;IAC7D,MAAM,mBAAmB,GAAG,IAAA,gCAAwB,EAClD,MAAM,EACN,IAAI,GAAG,CAAC;QACN,MAAM,CAAC,MAAM;QACb,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC;QACzC,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC;KAC9C,CAAC,CACH,CAAC;IACF,MAAM,eAAe,GAAG,mBAAmB,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;IAEvF,MAAM,YAAY,GAAG,CAAC,GAAG,cAAc,EAAE,GAAG,kBAAkB,EAAE,GAAG,eAAe,CAAC,CAAC;IAEpF,2FAA2F;IAC3F,4FAA4F;IAC5F,mFAAmF;IACnF,MAAM,QAAQ,GAAG,MAAM,CAAC,YAAY;QAClC,CAAC,CAAC;YACE,IAAA,sBAAc,EACZ,MAAM,CAAC,MAAM,CAAC,WAAW,EACzB,YAAY,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC,CACrC;SACF;QACH,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC,IAAA,sBAAc,EAAC,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;IAE3E,MAAM,QAAQ,GAAG;;;;aAIN,MAAM,CAAC,MAAM,CAAC,WAAW;kBACpB,CAAC,MAAM,IAAA,6BAAqB,EAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;;EAE/D,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;;;EAGnB,YAAY,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,IAAA,oBAAY,EAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;;;CAG9D,CAAC;IAEA,MAAM,iBAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,mBAAI,CAAC,IAAI,CAAC,WAAW,EAAE,eAAe,CAAC,EAAE,QAAQ,CAAC,CAAC;AACjF,CAAC,CAAC;AAjEW,QAAA,2BAA2B,+BAiEtC;AAEK,MAAM,qBAAqB,GAAG,KAAK,EAAE,MAAiB,EAAqB,EAAE;IAClF,6DAA6D;IAC7D,MAAM,IAAI,GAAG,CAAC,YAAY,EAAE,MAAM,CAAC,SAAS,EAAE,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,oBAAoB,CAAC,CAAC;IAE9F,IAAI,CAAC;QACH,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAA,qBAAU,EAAC,YAAY,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC;QAC5E,MAAM,KAAK,GAAG,yCAAyC,CAAC;QACxD,MAAM,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC;QAC9C,IAAI,KAAK,EAAE,CAAC;YACV,OAAO,CAAC,SAAS,KAAK,IAAI,CAAC,CAAC;QAC9B,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,EAAE,CAAC;QACpB,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,OAAO,CAAC,IAAI,CACV,sFAAsF,CACvF,CAAC;IACJ,CAAC;IAED,wCAAwC;IACxC,OAAO,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC,CAAC;AArBW,QAAA,qBAAqB,yBAqBhC;AAEK,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,OAAiB,EAAE,EAAE;IAChE,OAAO;iBACQ,IAAI;qBACA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;SAChC,CAAC;AACV,CAAC,CAAC;AALW,QAAA,cAAc,kBAKzB;AAEK,MAAM,YAAY,GAAG,CAAC,IAAY,EAAE,EAAE;IAC3C,OAAO;iBACQ,IAAI;8BACS,IAAI;SACzB,CAAC;AACV,CAAC,CAAC;AALW,QAAA,YAAY,gBAKvB;AAEK,MAAM,sBAAsB,GAAG,CAAC,MAAiB,EAAE,EAAE;IAC1D,IAAI,CAAC;QACH,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;YACrC,iBAAE,CAAC,SAAS,CAAC,MAAM,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACtD,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,eAAQ,CAAC,MAAM,CAAC,uCAAuC,EAAE,YAAY,CAAC,CAAC;IACzE,CAAC;AACH,CAAC,CAAC;AATW,QAAA,sBAAsB,0BASjC;AAEK,MAAM,cAAc,GAAG,CAAC,MAAiB,EAAE,EAAE;IAClD,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,2BAA2B,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC,EAAE,CAAC,CAAC;IAChF,OAAO,CAAC,GAAG,CAAC,cAAc,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;IACvD,OAAO,CAAC,GAAG,CAAC,iBAAiB,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IAC7D,OAAO,CAAC,GAAG,CAAC,eAAe,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,eAAe,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IACzD,OAAO,CAAC,GAAG,CAAC,sBAAsB,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IAElE,IAAI,MAAM,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC;IAC3E,CAAC;IACD,OAAO,CAAC,GAAG,CAAC,kCAAkC,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC;IAEjF,OAAO,CAAC,GAAG,EAAE,CAAC;AAChB,CAAC,CAAC;AAfW,QAAA,cAAc,kBAezB;AAEK,MAAM,cAAc,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAE;IACxD,6BAA6B;IAC7B,MAAM,IAAA,wBAAgB,EAAC,MAAM,CAAC,CAAC;IAC/B,IAAA,8BAAsB,EAAC,MAAM,CAAC,CAAC;IAE/B,4CAA4C;IAC5C,MAAM,IAAA,yBAAiB,EAAC,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;IAClD,MAAM,IAAA,wBAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;AACnD,CAAC,CAAC;AARW,QAAA,cAAc,kBAQzB;AAEK,MAAM,gBAAgB,GAAG,KAAK,EAAE,MAAiB,EAAE,EAAE;IAC1D,+CAA+C;IAC/C,MAAM,IAAA,wBAAgB,EAAC,MAAM,CAAC,CAAC;IAC/B,IAAA,8BAAsB,EAAC,MAAM,CAAC,CAAC;IAC/B,MAAM,WAAW,GAAG,MAAM,IAAA,0BAAkB,EAAC,MAAM,CAAC,CAAC;IACrD,MAAM,gBAAgB,GAAG,mBAAI,CAAC,IAAI,CAAC,WAAW,EAAE,cAAc,CAAC,CAAC;IAEhE,4CAA4C;IAC5C,MAAM,IAAA,yBAAiB,EAAC,MAAM,EAAE,gBAAgB,CAAC,CAAC;IAClD,MAAM,IAAA,wBAAgB,EAAC,MAAM,EAAE,gBAAgB,CAAC,CAAC;AACnD,CAAC,CAAC;AAVW,QAAA,gBAAgB,oBAU3B"} \ No newline at end of file diff --git a/packages/expo-brownfield/cli/build/utils/mangle.d.ts b/packages/expo-brownfield/cli/build/utils/mangle.d.ts new file mode 100644 index 00000000000000..a41a72cde64ebf --- /dev/null +++ b/packages/expo-brownfield/cli/build/utils/mangle.d.ts @@ -0,0 +1,28 @@ +export interface MangleContext { + podsProjectPath: string; + podTargetLabels: string[]; + podXcconfigPaths: string[]; + manglePrefix: string; + xcconfigPath: string; + specsChecksum: string; +} +/** + * Entry point invoked by the Ruby shim during `pod install`. Responsibilities: + * 1. Build the pod targets to iphonesimulator so we have binaries to scan. + * 2. nm those binaries, filter Swift symbols, and assemble `MANGLING_DEFINES`. + * 3. Write the mangling xcconfig + patch each pod's xcconfig to consume it. + * + * The Ruby shim already short-circuited on a checksum match before reaching + * here, so this function unconditionally regenerates. + */ +export declare const runMangle: (context: MangleContext, options: { + verbose: boolean; +}) => Promise; +export declare const isManglingUpToDate: (xcconfigPath: string, expectedChecksum: string) => boolean; +export declare const __testing: { + isSwiftSymbol: (line: string) => boolean; + extractClasses: (lines: string[]) => string[]; + extractConstants: (lines: string[]) => string[]; + extractCategorySelectors: (lines: string[], classes: string[]) => string[]; + prefixSelectors: (prefix: string, selectors: string[]) => string[]; +}; diff --git a/packages/expo-brownfield/cli/build/utils/mangle.js b/packages/expo-brownfield/cli/build/utils/mangle.js new file mode 100644 index 00000000000000..dd206894584ae5 --- /dev/null +++ b/packages/expo-brownfield/cli/build/utils/mangle.js @@ -0,0 +1,297 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.__testing = exports.isManglingUpToDate = exports.runMangle = void 0; +const node_fs_1 = __importDefault(require("node:fs")); +const node_path_1 = __importDefault(require("node:path")); +const commands_1 = require("./commands"); +const MANGLING_DEFINES_KEY = 'MANGLING_DEFINES'; +const MANGLED_SPECS_CHECKSUM_KEY = 'MANGLED_SPECS_CHECKSUM'; +const BUILD_DIR_NAME = 'build'; +const BUILT_PRODUCTS_SUBDIR = node_path_1.default.join('build', 'Release-iphonesimulator'); +/** + * Symbol regexes lifted verbatim from the cocoapods-mangle gem + * (lib/cocoapods_mangle/defines.rb) — they're the result of significant + * empirical discovery of Swift symbol shapes that must NOT be mangled. + * Keep behavior byte-equivalent so an existing project's xcconfig diff is + * limited to whitespace/checksum. + */ +const SWIFT_SYMBOL_PATTERNS = [ + /\$s/, + / __(_)?swift/, + /\d+Swift(\d+)?/, + /Swift\d+/, + /\d+SwiftUI(\d+)?/, + /symbolic /, + /associated conformance/, + / globalinit/, + /globalinit/, + /_OBJC_CLASS_\$__/, + /____ /, + /_PROTOCOL/, + /_\w+_swiftoverride_/, + /_Z\w+swift/, + /get_witness_table /, +]; +const isSwiftSymbol = (line) => SWIFT_SYMBOL_PATTERNS.some((re) => re.test(line)); +const LOG_FILE_NAME = 'expo-brownfield-mangle.log'; +/** + * Build the `iphonesimulator` Release configuration for each pod target so the + * resulting `.framework`/`.a` binaries can be `nm`-scanned for symbols. Mirrors + * `CocoapodsMangle::Builder#build!` from the gem. + * + * Two build-setting overrides are passed on the command line: + * - `SWIFT_VERIFY_EMITTED_MODULE_INTERFACE=NO` + * - `OTHER_SWIFT_FLAGS='$(inherited) -no-verify-emitted-module-interface'` + * + * These are normally written into the Pods project by `addPrebuiltSettings` + * inside the Podfile's `post_install` block, but CocoaPods doesn't persist + * post_install mutations to disk until *after* all hooks finish — which means + * our mangle hook reads the un-mutated on-disk project. Without these + * overrides, Swift module emission fails for modules that import prebuilt RN + * frameworks (e.g. `ExpoModulesCore`). + * + * On failure the full xcodebuild output is written to + * `/build/expo-brownfield-mangle.log` and the path surfaced in the + * thrown error so the user has somewhere to look. + */ +const buildPodTargets = async (podsProjectPath, podTargetLabels, options) => { + const podsDir = node_path_1.default.dirname(podsProjectPath); + const buildDir = node_path_1.default.join(podsDir, BUILD_DIR_NAME); + node_fs_1.default.rmSync(buildDir, { recursive: true, force: true }); + node_fs_1.default.mkdirSync(buildDir, { recursive: true }); + const logPath = node_path_1.default.join(buildDir, LOG_FILE_NAME); + node_fs_1.default.writeFileSync(logPath, ''); + const sharedArgs = [ + '-project', + podsProjectPath, + '-configuration', + 'Release', + '-sdk', + 'iphonesimulator', + 'build', + 'SWIFT_VERIFY_EMITTED_MODULE_INTERFACE=NO', + 'OTHER_SWIFT_FLAGS=$(inherited) -no-verify-emitted-module-interface', + ]; + for (const target of podTargetLabels) { + const args = ['-target', target, ...sharedArgs]; + try { + const { stdout } = await (0, commands_1.runCommand)('xcodebuild', args, { + cwd: podsDir, + verbose: options.verbose, + }); + node_fs_1.default.appendFileSync(logPath, `\n=== xcodebuild ${args.join(' ')} ===\n${stdout}`); + } + catch (error) { + node_fs_1.default.appendFileSync(logPath, `\n=== xcodebuild ${args.join(' ')} (FAILED) ===\n${error instanceof Error ? error.message : String(error)}`); + throw new Error(`expo-brownfield: failed to build pod target '${target}' for symbol mangling. ` + + `This usually means a Swift module couldn't compile against the current Pod xcconfigs. ` + + `Inspect the full xcodebuild output at: ${logPath}`); + } + } + return node_path_1.default.join(podsDir, BUILT_PRODUCTS_SUBDIR); +}; +/** + * After the simulator build, find the binaries to scan with `nm`. Skip the + * umbrella `Pods_*` and `libPods-*` outputs since they're aggregator targets, + * not the per-pod libraries we want to mangle. + */ +const findBinariesToMangle = (builtProductsDir) => { + const binaries = []; + if (!node_fs_1.default.existsSync(builtProductsDir)) { + return binaries; + } + const walk = (dir) => { + for (const entry of node_fs_1.default.readdirSync(dir, { withFileTypes: true })) { + const full = node_path_1.default.join(dir, entry.name); + if (entry.isDirectory()) { + if (entry.name.endsWith('.framework')) { + if (!entry.name.startsWith('Pods_')) { + const fwName = entry.name.slice(0, -'.framework'.length); + binaries.push(node_path_1.default.join(full, fwName)); + } + continue; + } + walk(full); + continue; + } + if (entry.isFile() && entry.name.endsWith('.a') && !entry.name.startsWith('libPods-')) { + binaries.push(full); + } + } + }; + walk(builtProductsDir); + return binaries; +}; +const runNm = async (binaries, flags) => { + if (binaries.length === 0) { + return []; + } + const { stdout } = await (0, commands_1.runCommand)('nm', [flags, ...binaries], { verbose: false }); + return stdout.split('\n').filter((line) => line.length > 0); +}; +const extractClasses = (lines) => { + const filtered = lines.filter((line) => !isSwiftSymbol(line)); + const classSymbols = filtered + .filter((line) => /OBJC_CLASS_\$_/.test(line)) + .map((line) => line.replace(/^.*\$_/, '')); + return Array.from(new Set(classSymbols)); +}; +const extractConstants = (lines) => { + const filtered = lines.filter((line) => !isSwiftSymbol(line)); + const sConsts = filtered + .filter((line) => / S /.test(line)) + .filter((line) => !/_OBJC_/.test(line)) + .filter((line) => !/__block_descriptor.*/.test(line)) + .map((line) => line.replace(/^.* _/, '')); + const tConsts = filtered + .filter((line) => / T /.test(line)) + .filter((line) => !/__copy_helper_block.*/.test(line)) + .filter((line) => !/__destroy_helper_block.*/.test(line)) + .map((line) => line.replace(/^.* _/, '')); + return Array.from(new Set([...sConsts, ...tConsts])); +}; +/** + * Category selectors are emitted as ` t -[Class(Category) selector]` lines + * by `nm`. We skip selectors on classes that are themselves being mangled + * (their selectors get carried implicitly by the class rename) and otherwise + * extract just the selector head. + */ +const extractCategorySelectors = (lines, classes) => { + const classSet = new Set(classes); + const selectorLineRe = / t [-|+]\[[^ ]*\([^ ]*\) [^ ]*\]/; + const classNameRe = /[-|+]\[(.*?)\(/; + const selectors = []; + for (const line of lines) { + if (!selectorLineRe.test(line)) { + continue; + } + const className = classNameRe.exec(line)?.[1]; + if (className && classSet.has(className)) { + continue; + } + const beforeRBracket = line.split(']')[0] + ']'; + const lastToken = beforeRBracket.match(/[^ ]*\]$/)?.[0]; + if (!lastToken) { + continue; + } + const selector = lastToken.slice(0, -1).split(':')[0]; + if (selector) { + selectors.push(selector); + } + } + return Array.from(new Set(selectors)); +}; +const prefixSymbols = (prefix, symbols) => symbols.map((sym) => `${sym}=${prefix}${sym}`); +/** + * Property setter/getter pairs need symmetric handling so that `setFoo:` → + * `setFoo:` and `foo` → `foo` both round-trip. Lifted from + * `CocoapodsMangle::Defines.prefix_selectors` in the gem. + */ +const prefixSelectors = (prefix, selectors) => { + const remaining = new Set(selectors); + const defines = []; + const setters = selectors.filter((sel) => /^set[A-Z]/.test(sel)); + for (const setter of setters) { + const upperGetter = setter.slice(3); + if (!upperGetter) { + continue; + } + const lowerGetter = upperGetter[0].toLowerCase() + upperGetter.slice(1); + const getter = selectors.find((sel) => sel === upperGetter || sel === lowerGetter); + if (!getter) { + continue; + } + remaining.delete(setter); + remaining.delete(getter); + defines.push(`${setter}=set${prefix}${getter}`); + defines.push(`${getter}=${prefix}${getter}`); + } + defines.push(...prefixSymbols(prefix, Array.from(remaining))); + return defines; +}; +const buildManglingDefines = async (prefix, binaries) => { + const allSymbolsGU = await runNm(binaries, '-gU'); + const allSymbolsU = await runNm(binaries, '-U'); + const classes = extractClasses(allSymbolsGU); + const constants = extractConstants(allSymbolsGU); + const categorySelectors = extractCategorySelectors(allSymbolsU, classes); + return [ + ...prefixSymbols(prefix, classes), + ...prefixSymbols(prefix, constants), + ...prefixSelectors(prefix, categorySelectors), + ]; +}; +/** Read the existing xcconfig (if any) and return its `MANGLED_SPECS_CHECKSUM` value. */ +const readExistingChecksum = (xcconfigPath) => { + if (!node_fs_1.default.existsSync(xcconfigPath)) { + return null; + } + const contents = node_fs_1.default.readFileSync(xcconfigPath, 'utf8'); + const match = new RegExp(`^${MANGLED_SPECS_CHECKSUM_KEY}\\s*=\\s*(\\S+)`, 'm').exec(contents); + return match?.[1] ?? null; +}; +const writeManglingXcconfig = (xcconfigPath, defines, specsChecksum) => { + const contents = `// This config file is automatically generated by expo-brownfield any time the +// pod dependency graph changes. Commit it alongside Podfile.lock. + +${MANGLING_DEFINES_KEY} = ${defines.join(' ')} + +// Used to skip rebuilding the mangling defines when the dependency graph hasn't changed. +${MANGLED_SPECS_CHECKSUM_KEY} = ${specsChecksum} +`; + node_fs_1.default.mkdirSync(node_path_1.default.dirname(xcconfigPath), { recursive: true }); + node_fs_1.default.writeFileSync(xcconfigPath, contents); +}; +/** + * Patch a per-pod xcconfig so it (1) `#include`s our mangling xcconfig and + * (2) appends `$(MANGLING_DEFINES)` to its `GCC_PREPROCESSOR_DEFINITIONS`. + * The transform is idempotent: re-running on an already-patched file leaves + * it unchanged. + */ +const patchPodXcconfig = (podXcconfigPath, manglingXcconfigPath) => { + if (!node_fs_1.default.existsSync(podXcconfigPath)) { + return; + } + const includeLine = `#include "${manglingXcconfigPath}"`; + let contents = node_fs_1.default.readFileSync(podXcconfigPath, 'utf8'); + if (!contents.includes(includeLine)) { + contents = `${includeLine}\n${contents}`; + } + const definesRefRe = new RegExp(`\\$\\(${MANGLING_DEFINES_KEY}\\)`); + if (!definesRefRe.test(contents)) { + contents = contents.replace(/^(GCC_PREPROCESSOR_DEFINITIONS\s*=\s*[^\n]*)$/m, `$1 $(${MANGLING_DEFINES_KEY})`); + } + node_fs_1.default.writeFileSync(podXcconfigPath, contents); +}; +/** + * Entry point invoked by the Ruby shim during `pod install`. Responsibilities: + * 1. Build the pod targets to iphonesimulator so we have binaries to scan. + * 2. nm those binaries, filter Swift symbols, and assemble `MANGLING_DEFINES`. + * 3. Write the mangling xcconfig + patch each pod's xcconfig to consume it. + * + * The Ruby shim already short-circuited on a checksum match before reaching + * here, so this function unconditionally regenerates. + */ +const runMangle = async (context, options) => { + const builtProductsDir = await buildPodTargets(context.podsProjectPath, context.podTargetLabels, options); + const binaries = findBinariesToMangle(builtProductsDir); + const defines = await buildManglingDefines(context.manglePrefix, binaries); + writeManglingXcconfig(context.xcconfigPath, defines, context.specsChecksum); + for (const podXcconfig of context.podXcconfigPaths) { + patchPodXcconfig(podXcconfig, context.xcconfigPath); + } +}; +exports.runMangle = runMangle; +const isManglingUpToDate = (xcconfigPath, expectedChecksum) => readExistingChecksum(xcconfigPath) === expectedChecksum; +exports.isManglingUpToDate = isManglingUpToDate; +exports.__testing = { + isSwiftSymbol, + extractClasses, + extractConstants, + extractCategorySelectors, + prefixSelectors, +}; +//# sourceMappingURL=mangle.js.map \ No newline at end of file diff --git a/packages/expo-brownfield/cli/build/utils/mangle.js.map b/packages/expo-brownfield/cli/build/utils/mangle.js.map new file mode 100644 index 00000000000000..0f1a3d752ecc62 --- /dev/null +++ b/packages/expo-brownfield/cli/build/utils/mangle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mangle.js","sourceRoot":"","sources":["../../src/utils/mangle.ts"],"names":[],"mappings":";;;;;;AAAA,sDAAyB;AACzB,0DAA6B;AAE7B,yCAAwC;AAWxC,MAAM,oBAAoB,GAAG,kBAAkB,CAAC;AAChD,MAAM,0BAA0B,GAAG,wBAAwB,CAAC;AAE5D,MAAM,cAAc,GAAG,OAAO,CAAC;AAC/B,MAAM,qBAAqB,GAAG,mBAAI,CAAC,IAAI,CAAC,OAAO,EAAE,yBAAyB,CAAC,CAAC;AAE5E;;;;;;GAMG;AACH,MAAM,qBAAqB,GAAa;IACtC,KAAK;IACL,cAAc;IACd,gBAAgB;IAChB,UAAU;IACV,kBAAkB;IAClB,WAAW;IACX,wBAAwB;IACxB,aAAa;IACb,YAAY;IACZ,kBAAkB;IAClB,OAAO;IACP,WAAW;IACX,qBAAqB;IACrB,YAAY;IACZ,oBAAoB;CACrB,CAAC;AAEF,MAAM,aAAa,GAAG,CAAC,IAAY,EAAW,EAAE,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAEnG,MAAM,aAAa,GAAG,4BAA4B,CAAC;AAEnD;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,eAAe,GAAG,KAAK,EAC3B,eAAuB,EACvB,eAAyB,EACzB,OAA6B,EACZ,EAAE;IACnB,MAAM,OAAO,GAAG,mBAAI,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9C,MAAM,QAAQ,GAAG,mBAAI,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;IACpD,iBAAE,CAAC,MAAM,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;IACtD,iBAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAC5C,MAAM,OAAO,GAAG,mBAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,aAAa,CAAC,CAAC;IACnD,iBAAE,CAAC,aAAa,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;IAE9B,MAAM,UAAU,GAAG;QACjB,UAAU;QACV,eAAe;QACf,gBAAgB;QAChB,SAAS;QACT,MAAM;QACN,iBAAiB;QACjB,OAAO;QACP,0CAA0C;QAC1C,oEAAoE;KACrE,CAAC;IAEF,KAAK,MAAM,MAAM,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,IAAI,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,UAAU,CAAC,CAAC;QAChD,IAAI,CAAC;YACH,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAA,qBAAU,EAAC,YAAY,EAAE,IAAI,EAAE;gBACtD,GAAG,EAAE,OAAO;gBACZ,OAAO,EAAE,OAAO,CAAC,OAAO;aACzB,CAAC,CAAC;YACH,iBAAE,CAAC,cAAc,CAAC,OAAO,EAAE,oBAAoB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,MAAM,EAAE,CAAC,CAAC;QAClF,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,iBAAE,CAAC,cAAc,CACf,OAAO,EACP,oBAAoB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,kBAChC,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;YACF,MAAM,IAAI,KAAK,CACb,gDAAgD,MAAM,yBAAyB;gBAC7E,wFAAwF;gBACxF,0CAA0C,OAAO,EAAE,CACtD,CAAC;QACJ,CAAC;IACH,CAAC;IAED,OAAO,mBAAI,CAAC,IAAI,CAAC,OAAO,EAAE,qBAAqB,CAAC,CAAC;AACnD,CAAC,CAAC;AAEF;;;;GAIG;AACH,MAAM,oBAAoB,GAAG,CAAC,gBAAwB,EAAY,EAAE;IAClE,MAAM,QAAQ,GAAa,EAAE,CAAC;IAC9B,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE,CAAC;QACrC,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,MAAM,IAAI,GAAG,CAAC,GAAW,EAAQ,EAAE;QACjC,KAAK,MAAM,KAAK,IAAI,iBAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,EAAE,CAAC;YACjE,MAAM,IAAI,GAAG,mBAAI,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YACxC,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;gBACxB,IAAI,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;oBACtC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;wBACpC,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;wBACzD,QAAQ,CAAC,IAAI,CAAC,mBAAI,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,CAAC;oBACzC,CAAC;oBACD,SAAS;gBACX,CAAC;gBACD,IAAI,CAAC,IAAI,CAAC,CAAC;gBACX,SAAS;YACX,CAAC;YACD,IAAI,KAAK,CAAC,MAAM,EAAE,IAAI,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;gBACtF,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACtB,CAAC;QACH,CAAC;IACH,CAAC,CAAC;IAEF,IAAI,CAAC,gBAAgB,CAAC,CAAC;IACvB,OAAO,QAAQ,CAAC;AAClB,CAAC,CAAC;AAEF,MAAM,KAAK,GAAG,KAAK,EAAE,QAAkB,EAAE,KAAa,EAAqB,EAAE;IAC3E,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC1B,OAAO,EAAE,CAAC;IACZ,CAAC;IACD,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAA,qBAAU,EAAC,IAAI,EAAE,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC;IACpF,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AAC9D,CAAC,CAAC;AAEF,MAAM,cAAc,GAAG,CAAC,KAAe,EAAY,EAAE;IACnD,MAAM,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;IAC9D,MAAM,YAAY,GAAG,QAAQ;SAC1B,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAC7C,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC,CAAC;IAC7C,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;AAC3C,CAAC,CAAC;AAEF,MAAM,gBAAgB,GAAG,CAAC,KAAe,EAAY,EAAE;IACrD,MAAM,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;IAE9D,MAAM,OAAO,GAAG,QAAQ;SACrB,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAClC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACtC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,sBAAsB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACpD,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;IAE5C,MAAM,OAAO,GAAG,QAAQ;SACrB,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAClC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,uBAAuB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACrD,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,0BAA0B,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACxD,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;IAE5C,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,OAAO,EAAE,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,wBAAwB,GAAG,CAAC,KAAe,EAAE,OAAiB,EAAY,EAAE;IAChF,MAAM,QAAQ,GAAG,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;IAClC,MAAM,cAAc,GAAG,kCAAkC,CAAC;IAC1D,MAAM,WAAW,GAAG,gBAAgB,CAAC;IAErC,MAAM,SAAS,GAAa,EAAE,CAAC;IAC/B,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QACzB,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YAC/B,SAAS;QACX,CAAC;QACD,MAAM,SAAS,GAAG,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAC9C,IAAI,SAAS,IAAI,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC;YACzC,SAAS;QACX,CAAC;QACD,MAAM,cAAc,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC;QAChD,MAAM,SAAS,GAAG,cAAc,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACxD,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,SAAS;QACX,CAAC;QACD,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACtD,IAAI,QAAQ,EAAE,CAAC;YACb,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC3B,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACxC,CAAC,CAAC;AAEF,MAAM,aAAa,GAAG,CAAC,MAAc,EAAE,OAAiB,EAAY,EAAE,CACpE,OAAO,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,GAAG,IAAI,MAAM,GAAG,GAAG,EAAE,CAAC,CAAC;AAEjD;;;;GAIG;AACH,MAAM,eAAe,GAAG,CAAC,MAAc,EAAE,SAAmB,EAAY,EAAE;IACxE,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IACrC,MAAM,OAAO,GAAa,EAAE,CAAC;IAE7B,MAAM,OAAO,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;IACjE,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;QAC7B,MAAM,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACpC,IAAI,CAAC,WAAW,EAAE,CAAC;YACjB,SAAS;QACX,CAAC;QACD,MAAM,WAAW,GAAG,WAAW,CAAC,CAAC,CAAE,CAAC,WAAW,EAAE,GAAG,WAAW,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACzE,MAAM,MAAM,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,KAAK,WAAW,IAAI,GAAG,KAAK,WAAW,CAAC,CAAC;QACnF,IAAI,CAAC,MAAM,EAAE,CAAC;YACZ,SAAS;QACX,CAAC;QACD,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QACzB,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QACzB,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,OAAO,MAAM,GAAG,MAAM,EAAE,CAAC,CAAC;QAChD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,IAAI,MAAM,GAAG,MAAM,EAAE,CAAC,CAAC;IAC/C,CAAC;IAED,OAAO,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;IAC9D,OAAO,OAAO,CAAC;AACjB,CAAC,CAAC;AAEF,MAAM,oBAAoB,GAAG,KAAK,EAAE,MAAc,EAAE,QAAkB,EAAqB,EAAE;IAC3F,MAAM,YAAY,GAAG,MAAM,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAClD,MAAM,WAAW,GAAG,MAAM,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;IAEhD,MAAM,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAC7C,MAAM,SAAS,GAAG,gBAAgB,CAAC,YAAY,CAAC,CAAC;IACjD,MAAM,iBAAiB,GAAG,wBAAwB,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;IAEzE,OAAO;QACL,GAAG,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC;QACjC,GAAG,aAAa,CAAC,MAAM,EAAE,SAAS,CAAC;QACnC,GAAG,eAAe,CAAC,MAAM,EAAE,iBAAiB,CAAC;KAC9C,CAAC;AACJ,CAAC,CAAC;AAEF,yFAAyF;AACzF,MAAM,oBAAoB,GAAG,CAAC,YAAoB,EAAiB,EAAE;IACnE,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,CAAC;QACjC,OAAO,IAAI,CAAC;IACd,CAAC;IACD,MAAM,QAAQ,GAAG,iBAAE,CAAC,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IACvD,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,IAAI,0BAA0B,iBAAiB,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC9F,OAAO,KAAK,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;AAC5B,CAAC,CAAC;AAEF,MAAM,qBAAqB,GAAG,CAC5B,YAAoB,EACpB,OAAiB,EACjB,aAAqB,EACf,EAAE;IACR,MAAM,QAAQ,GAAG;;;EAGjB,oBAAoB,MAAM,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;;;EAG3C,0BAA0B,MAAM,aAAa;CAC9C,CAAC;IACA,iBAAE,CAAC,SAAS,CAAC,mBAAI,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAC9D,iBAAE,CAAC,aAAa,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AAC3C,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,gBAAgB,GAAG,CAAC,eAAuB,EAAE,oBAA4B,EAAQ,EAAE;IACvF,IAAI,CAAC,iBAAE,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC;QACpC,OAAO;IACT,CAAC;IACD,MAAM,WAAW,GAAG,aAAa,oBAAoB,GAAG,CAAC;IACzD,IAAI,QAAQ,GAAG,iBAAE,CAAC,YAAY,CAAC,eAAe,EAAE,MAAM,CAAC,CAAC;IAExD,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;QACpC,QAAQ,GAAG,GAAG,WAAW,KAAK,QAAQ,EAAE,CAAC;IAC3C,CAAC;IAED,MAAM,YAAY,GAAG,IAAI,MAAM,CAAC,SAAS,oBAAoB,KAAK,CAAC,CAAC;IACpE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC;QACjC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CACzB,gDAAgD,EAChD,QAAQ,oBAAoB,GAAG,CAChC,CAAC;IACJ,CAAC;IAED,iBAAE,CAAC,aAAa,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;AAC9C,CAAC,CAAC;AAEF;;;;;;;;GAQG;AACI,MAAM,SAAS,GAAG,KAAK,EAC5B,OAAsB,EACtB,OAA6B,EACd,EAAE;IACjB,MAAM,gBAAgB,GAAG,MAAM,eAAe,CAC5C,OAAO,CAAC,eAAe,EACvB,OAAO,CAAC,eAAe,EACvB,OAAO,CACR,CAAC;IAEF,MAAM,QAAQ,GAAG,oBAAoB,CAAC,gBAAgB,CAAC,CAAC;IACxD,MAAM,OAAO,GAAG,MAAM,oBAAoB,CAAC,OAAO,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;IAE3E,qBAAqB,CAAC,OAAO,CAAC,YAAY,EAAE,OAAO,EAAE,OAAO,CAAC,aAAa,CAAC,CAAC;IAE5E,KAAK,MAAM,WAAW,IAAI,OAAO,CAAC,gBAAgB,EAAE,CAAC;QACnD,gBAAgB,CAAC,WAAW,EAAE,OAAO,CAAC,YAAY,CAAC,CAAC;IACtD,CAAC;AACH,CAAC,CAAC;AAlBW,QAAA,SAAS,aAkBpB;AAEK,MAAM,kBAAkB,GAAG,CAAC,YAAoB,EAAE,gBAAwB,EAAW,EAAE,CAC5F,oBAAoB,CAAC,YAAY,CAAC,KAAK,gBAAgB,CAAC;AAD7C,QAAA,kBAAkB,sBAC2B;AAE7C,QAAA,SAAS,GAAG;IACvB,aAAa;IACb,cAAc;IACd,gBAAgB;IAChB,wBAAwB;IACxB,eAAe;CAChB,CAAC"} \ No newline at end of file diff --git a/packages/expo-brownfield/cli/src/commands/index.ts b/packages/expo-brownfield/cli/src/commands/index.ts index 3ad397d285bfe7..bb9cff05b1172c 100644 --- a/packages/expo-brownfield/cli/src/commands/index.ts +++ b/packages/expo-brownfield/cli/src/commands/index.ts @@ -1,3 +1,4 @@ export { default as buildAndroid } from './build-android'; export { default as buildIos } from './build-ios'; +export { default as mangle } from './mangle'; export { default as tasksAndroid } from './tasks-android'; diff --git a/packages/expo-brownfield/cli/src/commands/mangle.ts b/packages/expo-brownfield/cli/src/commands/mangle.ts new file mode 100644 index 00000000000000..5b1e8cb932806d --- /dev/null +++ b/packages/expo-brownfield/cli/src/commands/mangle.ts @@ -0,0 +1,49 @@ +import type { Command } from 'commander'; +import fs from 'node:fs'; + +import { runMangle, type MangleContext } from '../utils/mangle'; + +interface MangleOptions { + contextJson?: string; + contextFile?: string; + verbose?: boolean; +} + +const readContext = (options: MangleOptions): MangleContext => { + let raw: string; + if (options.contextFile) { + raw = fs.readFileSync(options.contextFile, 'utf8'); + } else if (options.contextJson) { + raw = options.contextJson; + } else { + throw new Error( + 'expo-brownfield mangle: missing --context-json or --context-file. ' + + 'This command is normally invoked from the Ruby shim during `pod install`.' + ); + } + return JSON.parse(raw) as MangleContext; +}; + +/** + * Internal command spawned by `scripts/ios/mangle.rb` from a Podfile's + * `post_install` block when the `multipleFrameworks` plugin option is set. + * Not intended for direct user invocation. + * + * Exits with code 1 on any failure with a single-line error message — the + * Ruby shim surfaces this back to CocoaPods. Without this catch the rejected + * promise bubbles up to Node's unhandled-rejection handler and prints a noisy + * stack trace that obscures the actual build failure. + */ +const mangle = async (command: Command) => { + try { + const options = command.opts(); + const context = readContext(options); + await runMangle(context, { verbose: options.verbose ?? false }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`expo-brownfield mangle: ${message}`); + process.exit(1); + } +}; + +export default mangle; diff --git a/packages/expo-brownfield/cli/src/index.ts b/packages/expo-brownfield/cli/src/index.ts index af05f8ce5ba24c..fdd7e6b31ff1d6 100644 --- a/packages/expo-brownfield/cli/src/index.ts +++ b/packages/expo-brownfield/cli/src/index.ts @@ -1,7 +1,7 @@ #!/usr/bin/env node import { Command } from 'commander'; -import { buildAndroid, buildIos, tasksAndroid } from './commands'; +import { buildAndroid, buildIos, mangle, tasksAndroid } from './commands'; import packageJson from '../../package.json'; const program = new Command(); @@ -47,6 +47,17 @@ program await buildIos(this); }); +// mangle (internal: invoked by scripts/ios/mangle.rb during pod install) +program + .command('mangle', { hidden: true }) + .description('Internal: regenerate brownfield mangling xcconfig') + .option('--context-json ', 'inline JSON describing the mangling context') + .option('--context-file ', 'path to a JSON file describing the mangling context') + .option('--verbose', 'forward all output to the terminal') + .action(async function (this: Command) { + await mangle(this); + }); + // tasks:android program .command('tasks:android') diff --git a/packages/expo-brownfield/cli/src/utils/ios.ts b/packages/expo-brownfield/cli/src/utils/ios.ts index 398868f827c6cc..ad6b8e16f9c8a4 100644 --- a/packages/expo-brownfield/cli/src/utils/ios.ts +++ b/packages/expo-brownfield/cli/src/utils/ios.ts @@ -1,14 +1,139 @@ import chalk from 'chalk'; +import { execSync } from 'node:child_process'; import fs from 'node:fs'; import path from 'node:path'; import { runCommand } from './commands'; import { XCFramework } from './constants'; import CLIError from './error'; -import { ensureCorrectFlavor, enumerateAllPrebuildModules } from './precompiled'; +import { + ensureCorrectFlavor, + enumerateAllPrebuildModules, + resolvedFixedXCFrameworks, +} from './precompiled'; import { withSpinner } from './spinner'; import type { IosConfig } from './types'; +/** + * Inspect the built brownfield framework binary and return the names of `@rpath`-linked + * dynamic frameworks that are NOT already covered by the fixed XCFramework set, the + * brownfield target itself, or precompiled-module enumeration. + * + * Source-built pods (e.g. `ExpoModulesJSI` from a local podspec) are produced as dynamic + * `.framework`s alongside the brownfield framework, and the brownfield binary holds an + * `@rpath/.framework/` reference to each. Without shipping these as standalone + * xcframeworks the host app crashes at runtime with `dyld: Library not loaded: @rpath/…`. + * + * Returns names without the `.framework` suffix, deduped, in `otool -L` order. + */ +export const enumerateSourceBuiltDeps = ( + config: IosConfig, + alreadyCovered: Set +): string[] => { + const frameworkBinary = path.join(config.simulator, `${config.scheme}.framework`, config.scheme); + if (!fs.existsSync(frameworkBinary)) { + return []; + } + + let stdout: string; + try { + stdout = execSync(`otool -L "${frameworkBinary}"`, { encoding: 'utf8' }); + } catch { + // otool failure is non-fatal — degrade gracefully and let the user catch the missing dep + // at runtime rather than blocking the whole build. + return []; + } + + const names = new Set(); + for (const line of stdout.split('\n')) { + const match = line.trim().match(/^@rpath\/([^/]+)\.framework\//); + if (match?.[1]) { + names.add(match[1]); + } + } + return Array.from(names).filter((name) => name !== config.scheme && !alreadyCovered.has(name)); +}; + +/** + * Build the `-framework ` (+ optional `-debug-symbols `) arg + * sequence for one slice of a `xcodebuild -create-xcframework` invocation. + * + * `-debug-symbols` is strict — pointing it at a non-existent path fails the + * whole create step — so we only attach the flag when the dSYM has actually + * been produced for that slice. dSYMs land at `.dSYM` next to the + * `.framework` in the products dir whenever + * `DEBUG_INFORMATION_FORMAT=dwarf-with-dsym` is in effect (forced for + * brownfield builds in `buildFramework`, but not guaranteed for transitive + * source-built deps that build under their own pod build settings). + */ +const xcframeworkSliceArgs = (frameworkPath: string): string[] => { + const args = ['-framework', frameworkPath]; + const dsymPath = `${frameworkPath}.dSYM`; + if (fs.existsSync(dsymPath)) { + args.push('-debug-symbols', dsymPath); + } + return args; +}; + +/** + * Locate a source-built `.framework` for `name` inside one of the brownfield build product + * slices. Pods that set `FRAMEWORK_SEARCH_PATHS` to `${PODS_CONFIGURATION_BUILD_DIR}/XCFrameworkIntermediates/` + * (e.g. `ExpoModulesJSI`) land in `XCFrameworkIntermediates//.framework` rather + * than at the slice root, so we check both locations. + */ +const findSourceBuiltFramework = (slicePath: string, name: string): string | null => { + const candidates = [ + path.join(slicePath, `${name}.framework`), + path.join(slicePath, 'XCFrameworkIntermediates', name, `${name}.framework`), + ]; + return candidates.find((candidate) => fs.existsSync(candidate)) ?? null; +}; + +/** + * Build an xcframework from the device + simulator slices of a source-built `.framework` + * sitting in the brownfield build products dir, and copy it into `dest`. Returns whether + * the xcframework was produced (false when one or both slices are missing — typically a + * harmless skip for a system framework or a transitive dep that isn't actually built). + */ +const bundleSourceBuiltFramework = async ( + config: IosConfig, + name: string, + dest: string +): Promise => { + const deviceFramework = findSourceBuiltFramework(config.device, name); + const simulatorFramework = findSourceBuiltFramework(config.simulator, name); + + if (!deviceFramework || !simulatorFramework) { + console.warn( + `expo-brownfield: source-built dependency '${name}' is linked by ${config.scheme}.framework ` + + `but its device/simulator slices were not found under the brownfield build products dir. ` + + `Skipping. The host app may fail at runtime with 'Library not loaded: @rpath/${name}.framework/${name}'.` + ); + return false; + } + + const outputPath = path.join(dest, `${name}.xcframework`); + if (fs.existsSync(outputPath)) { + fs.rmSync(outputPath, { recursive: true, force: true }); + } + + const args = [ + '-create-xcframework', + ...xcframeworkSliceArgs(deviceFramework), + ...xcframeworkSliceArgs(simulatorFramework), + '-output', + outputPath, + ]; + + if (config.dryRun) { + console.log(`xcodebuild ${args.join(' ')}`); + return true; + } + + await runCommand('xcodebuild', args, { verbose: config.verbose }); + return true; +}; + export const cleanUpArtifacts = async (config: IosConfig) => { if (config.dryRun) { console.log('Cleaning up previous artifacts'); @@ -46,6 +171,11 @@ export const buildFramework = async (config: IosConfig) => { 'generic/platform=iphonesimulator', '-configuration', config.buildConfiguration, + // Ensure dSYMs are produced for both Debug and Release so they can be + // bundled into the resulting xcframework via `-create-xcframework + // -debug-symbols`. Release defaults to `dwarf-with-dsym`; Debug defaults + // to plain `dwarf` and would otherwise leave us with no dSYM to ship. + 'DEBUG_INFORMATION_FORMAT=dwarf-with-dsym', ]; if (config.dryRun) { @@ -121,6 +251,36 @@ export const copyXCFrameworks = async (config: IosConfig, dest: string) => { }); } } + + // Bundle any source-built dynamic frameworks the brownfield binary links against + // (e.g. `ExpoModulesJSI` from a local podspec). Without this the host app crashes at + // runtime with `dyld: Library not loaded: @rpath/.framework/`. + const alreadyCovered = collectCoveredFrameworkNames(config); + const sourceBuiltDeps = enumerateSourceBuiltDeps(config, alreadyCovered); + for (const depName of sourceBuiltDeps) { + await withSpinner({ + operation: () => bundleSourceBuiltFramework(config, depName, dest), + loaderMessage: `Bundling source-built ${depName} as xcframework...`, + successMessage: `Bundling source-built ${depName} as xcframework succeeded`, + errorMessage: `Bundling source-built ${depName} as xcframework failed`, + verbose: config.verbose, + }); + } +}; + +/** + * Set of xcframework names the brownfield CLI already plans to ship (fixed XCFrameworks + + * prebuilt modules when enabled). Used to dedupe against `enumerateSourceBuiltDeps` so a + * dep that's already covered by a prebuilt artifact isn't re-built from source. + */ +const collectCoveredFrameworkNames = (config: IosConfig): Set => { + const covered = new Set([config.scheme, ...resolvedFixedXCFrameworks()]); + if (config.usePrebuilds) { + for (const module of enumerateAllPrebuildModules(process.cwd(), config.buildConfiguration)) { + covered.add(module.name); + } + } + return covered; }; export const createSwiftPackage = async (config: IosConfig): Promise => { @@ -161,10 +321,8 @@ export const createXCframework = async (config: IosConfig, at: string) => { const args = [ '-create-xcframework', - '-framework', - `${config.device}/${config.scheme}.framework`, - '-framework', - `${config.simulator}/${config.scheme}.framework`, + ...xcframeworkSliceArgs(`${config.device}/${config.scheme}.framework`), + ...xcframeworkSliceArgs(`${config.simulator}/${config.scheme}.framework`), '-output', outputPath, ]; @@ -264,7 +422,20 @@ export const generatePackageMetadataFile = async (config: IosConfig, packagePath })) : []; - const xcframeworks = [...baseFrameworks, ...precompiledModules]; + // Source-built dynamic deps the brownfield framework links against (e.g. ExpoModulesJSI). + // `copyXCFrameworks` writes their xcframeworks to disk; we need to declare matching + // `.binaryTarget`s here so SPM consumers actually link them. + const sourceBuiltDepNames = enumerateSourceBuiltDeps( + config, + new Set([ + config.scheme, + ...baseFrameworks.map(({ name }) => name), + ...precompiledModules.map(({ name }) => name), + ]) + ); + const sourceBuiltDeps = sourceBuiltDepNames.map((name) => ({ name, targets: [name] })); + + const xcframeworks = [...baseFrameworks, ...precompiledModules, ...sourceBuiltDeps]; // With prebuilds the module graph is large; expose a single aggregate library so consumers // `import ` once and Xcode links every underlying binary target automatically. diff --git a/packages/expo-brownfield/cli/src/utils/mangle.ts b/packages/expo-brownfield/cli/src/utils/mangle.ts new file mode 100644 index 00000000000000..cc07ca309dcc46 --- /dev/null +++ b/packages/expo-brownfield/cli/src/utils/mangle.ts @@ -0,0 +1,363 @@ +import fs from 'node:fs'; +import path from 'node:path'; + +import { runCommand } from './commands'; + +export interface MangleContext { + podsProjectPath: string; + podTargetLabels: string[]; + podXcconfigPaths: string[]; + manglePrefix: string; + xcconfigPath: string; + specsChecksum: string; +} + +const MANGLING_DEFINES_KEY = 'MANGLING_DEFINES'; +const MANGLED_SPECS_CHECKSUM_KEY = 'MANGLED_SPECS_CHECKSUM'; + +const BUILD_DIR_NAME = 'build'; +const BUILT_PRODUCTS_SUBDIR = path.join('build', 'Release-iphonesimulator'); + +/** + * Symbol regexes lifted verbatim from the cocoapods-mangle gem + * (lib/cocoapods_mangle/defines.rb) — they're the result of significant + * empirical discovery of Swift symbol shapes that must NOT be mangled. + * Keep behavior byte-equivalent so an existing project's xcconfig diff is + * limited to whitespace/checksum. + */ +const SWIFT_SYMBOL_PATTERNS: RegExp[] = [ + /\$s/, + / __(_)?swift/, + /\d+Swift(\d+)?/, + /Swift\d+/, + /\d+SwiftUI(\d+)?/, + /symbolic /, + /associated conformance/, + / globalinit/, + /globalinit/, + /_OBJC_CLASS_\$__/, + /____ /, + /_PROTOCOL/, + /_\w+_swiftoverride_/, + /_Z\w+swift/, + /get_witness_table /, +]; + +const isSwiftSymbol = (line: string): boolean => SWIFT_SYMBOL_PATTERNS.some((re) => re.test(line)); + +const LOG_FILE_NAME = 'expo-brownfield-mangle.log'; + +/** + * Build the `iphonesimulator` Release configuration for each pod target so the + * resulting `.framework`/`.a` binaries can be `nm`-scanned for symbols. Mirrors + * `CocoapodsMangle::Builder#build!` from the gem. + * + * Two build-setting overrides are passed on the command line: + * - `SWIFT_VERIFY_EMITTED_MODULE_INTERFACE=NO` + * - `OTHER_SWIFT_FLAGS='$(inherited) -no-verify-emitted-module-interface'` + * + * These are normally written into the Pods project by `addPrebuiltSettings` + * inside the Podfile's `post_install` block, but CocoaPods doesn't persist + * post_install mutations to disk until *after* all hooks finish — which means + * our mangle hook reads the un-mutated on-disk project. Without these + * overrides, Swift module emission fails for modules that import prebuilt RN + * frameworks (e.g. `ExpoModulesCore`). + * + * On failure the full xcodebuild output is written to + * `/build/expo-brownfield-mangle.log` and the path surfaced in the + * thrown error so the user has somewhere to look. + */ +const buildPodTargets = async ( + podsProjectPath: string, + podTargetLabels: string[], + options: { verbose: boolean } +): Promise => { + const podsDir = path.dirname(podsProjectPath); + const buildDir = path.join(podsDir, BUILD_DIR_NAME); + fs.rmSync(buildDir, { recursive: true, force: true }); + fs.mkdirSync(buildDir, { recursive: true }); + const logPath = path.join(buildDir, LOG_FILE_NAME); + fs.writeFileSync(logPath, ''); + + const sharedArgs = [ + '-project', + podsProjectPath, + '-configuration', + 'Release', + '-sdk', + 'iphonesimulator', + 'build', + 'SWIFT_VERIFY_EMITTED_MODULE_INTERFACE=NO', + 'OTHER_SWIFT_FLAGS=$(inherited) -no-verify-emitted-module-interface', + ]; + + for (const target of podTargetLabels) { + const args = ['-target', target, ...sharedArgs]; + try { + const { stdout } = await runCommand('xcodebuild', args, { + cwd: podsDir, + verbose: options.verbose, + }); + fs.appendFileSync(logPath, `\n=== xcodebuild ${args.join(' ')} ===\n${stdout}`); + } catch (error) { + fs.appendFileSync( + logPath, + `\n=== xcodebuild ${args.join(' ')} (FAILED) ===\n${ + error instanceof Error ? error.message : String(error) + }` + ); + throw new Error( + `expo-brownfield: failed to build pod target '${target}' for symbol mangling. ` + + `This usually means a Swift module couldn't compile against the current Pod xcconfigs. ` + + `Inspect the full xcodebuild output at: ${logPath}` + ); + } + } + + return path.join(podsDir, BUILT_PRODUCTS_SUBDIR); +}; + +/** + * After the simulator build, find the binaries to scan with `nm`. Skip the + * umbrella `Pods_*` and `libPods-*` outputs since they're aggregator targets, + * not the per-pod libraries we want to mangle. + */ +const findBinariesToMangle = (builtProductsDir: string): string[] => { + const binaries: string[] = []; + if (!fs.existsSync(builtProductsDir)) { + return binaries; + } + + const walk = (dir: string): void => { + for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { + const full = path.join(dir, entry.name); + if (entry.isDirectory()) { + if (entry.name.endsWith('.framework')) { + if (!entry.name.startsWith('Pods_')) { + const fwName = entry.name.slice(0, -'.framework'.length); + binaries.push(path.join(full, fwName)); + } + continue; + } + walk(full); + continue; + } + if (entry.isFile() && entry.name.endsWith('.a') && !entry.name.startsWith('libPods-')) { + binaries.push(full); + } + } + }; + + walk(builtProductsDir); + return binaries; +}; + +const runNm = async (binaries: string[], flags: string): Promise => { + if (binaries.length === 0) { + return []; + } + const { stdout } = await runCommand('nm', [flags, ...binaries], { verbose: false }); + return stdout.split('\n').filter((line) => line.length > 0); +}; + +const extractClasses = (lines: string[]): string[] => { + const filtered = lines.filter((line) => !isSwiftSymbol(line)); + const classSymbols = filtered + .filter((line) => /OBJC_CLASS_\$_/.test(line)) + .map((line) => line.replace(/^.*\$_/, '')); + return Array.from(new Set(classSymbols)); +}; + +const extractConstants = (lines: string[]): string[] => { + const filtered = lines.filter((line) => !isSwiftSymbol(line)); + + const sConsts = filtered + .filter((line) => / S /.test(line)) + .filter((line) => !/_OBJC_/.test(line)) + .filter((line) => !/__block_descriptor.*/.test(line)) + .map((line) => line.replace(/^.* _/, '')); + + const tConsts = filtered + .filter((line) => / T /.test(line)) + .filter((line) => !/__copy_helper_block.*/.test(line)) + .filter((line) => !/__destroy_helper_block.*/.test(line)) + .map((line) => line.replace(/^.* _/, '')); + + return Array.from(new Set([...sConsts, ...tConsts])); +}; + +/** + * Category selectors are emitted as ` t -[Class(Category) selector]` lines + * by `nm`. We skip selectors on classes that are themselves being mangled + * (their selectors get carried implicitly by the class rename) and otherwise + * extract just the selector head. + */ +const extractCategorySelectors = (lines: string[], classes: string[]): string[] => { + const classSet = new Set(classes); + const selectorLineRe = / t [-|+]\[[^ ]*\([^ ]*\) [^ ]*\]/; + const classNameRe = /[-|+]\[(.*?)\(/; + + const selectors: string[] = []; + for (const line of lines) { + if (!selectorLineRe.test(line)) { + continue; + } + const className = classNameRe.exec(line)?.[1]; + if (className && classSet.has(className)) { + continue; + } + const beforeRBracket = line.split(']')[0] + ']'; + const lastToken = beforeRBracket.match(/[^ ]*\]$/)?.[0]; + if (!lastToken) { + continue; + } + const selector = lastToken.slice(0, -1).split(':')[0]; + if (selector) { + selectors.push(selector); + } + } + return Array.from(new Set(selectors)); +}; + +const prefixSymbols = (prefix: string, symbols: string[]): string[] => + symbols.map((sym) => `${sym}=${prefix}${sym}`); + +/** + * Property setter/getter pairs need symmetric handling so that `setFoo:` → + * `setFoo:` and `foo` → `foo` both round-trip. Lifted from + * `CocoapodsMangle::Defines.prefix_selectors` in the gem. + */ +const prefixSelectors = (prefix: string, selectors: string[]): string[] => { + const remaining = new Set(selectors); + const defines: string[] = []; + + const setters = selectors.filter((sel) => /^set[A-Z]/.test(sel)); + for (const setter of setters) { + const upperGetter = setter.slice(3); + if (!upperGetter) { + continue; + } + const lowerGetter = upperGetter[0]!.toLowerCase() + upperGetter.slice(1); + const getter = selectors.find((sel) => sel === upperGetter || sel === lowerGetter); + if (!getter) { + continue; + } + remaining.delete(setter); + remaining.delete(getter); + defines.push(`${setter}=set${prefix}${getter}`); + defines.push(`${getter}=${prefix}${getter}`); + } + + defines.push(...prefixSymbols(prefix, Array.from(remaining))); + return defines; +}; + +const buildManglingDefines = async (prefix: string, binaries: string[]): Promise => { + const allSymbolsGU = await runNm(binaries, '-gU'); + const allSymbolsU = await runNm(binaries, '-U'); + + const classes = extractClasses(allSymbolsGU); + const constants = extractConstants(allSymbolsGU); + const categorySelectors = extractCategorySelectors(allSymbolsU, classes); + + return [ + ...prefixSymbols(prefix, classes), + ...prefixSymbols(prefix, constants), + ...prefixSelectors(prefix, categorySelectors), + ]; +}; + +/** Read the existing xcconfig (if any) and return its `MANGLED_SPECS_CHECKSUM` value. */ +const readExistingChecksum = (xcconfigPath: string): string | null => { + if (!fs.existsSync(xcconfigPath)) { + return null; + } + const contents = fs.readFileSync(xcconfigPath, 'utf8'); + const match = new RegExp(`^${MANGLED_SPECS_CHECKSUM_KEY}\\s*=\\s*(\\S+)`, 'm').exec(contents); + return match?.[1] ?? null; +}; + +const writeManglingXcconfig = ( + xcconfigPath: string, + defines: string[], + specsChecksum: string +): void => { + const contents = `// This config file is automatically generated by expo-brownfield any time the +// pod dependency graph changes. Commit it alongside Podfile.lock. + +${MANGLING_DEFINES_KEY} = ${defines.join(' ')} + +// Used to skip rebuilding the mangling defines when the dependency graph hasn't changed. +${MANGLED_SPECS_CHECKSUM_KEY} = ${specsChecksum} +`; + fs.mkdirSync(path.dirname(xcconfigPath), { recursive: true }); + fs.writeFileSync(xcconfigPath, contents); +}; + +/** + * Patch a per-pod xcconfig so it (1) `#include`s our mangling xcconfig and + * (2) appends `$(MANGLING_DEFINES)` to its `GCC_PREPROCESSOR_DEFINITIONS`. + * The transform is idempotent: re-running on an already-patched file leaves + * it unchanged. + */ +const patchPodXcconfig = (podXcconfigPath: string, manglingXcconfigPath: string): void => { + if (!fs.existsSync(podXcconfigPath)) { + return; + } + const includeLine = `#include "${manglingXcconfigPath}"`; + let contents = fs.readFileSync(podXcconfigPath, 'utf8'); + + if (!contents.includes(includeLine)) { + contents = `${includeLine}\n${contents}`; + } + + const definesRefRe = new RegExp(`\\$\\(${MANGLING_DEFINES_KEY}\\)`); + if (!definesRefRe.test(contents)) { + contents = contents.replace( + /^(GCC_PREPROCESSOR_DEFINITIONS\s*=\s*[^\n]*)$/m, + `$1 $(${MANGLING_DEFINES_KEY})` + ); + } + + fs.writeFileSync(podXcconfigPath, contents); +}; + +/** + * Entry point invoked by the Ruby shim during `pod install`. Responsibilities: + * 1. Build the pod targets to iphonesimulator so we have binaries to scan. + * 2. nm those binaries, filter Swift symbols, and assemble `MANGLING_DEFINES`. + * 3. Write the mangling xcconfig + patch each pod's xcconfig to consume it. + * + * The Ruby shim already short-circuited on a checksum match before reaching + * here, so this function unconditionally regenerates. + */ +export const runMangle = async ( + context: MangleContext, + options: { verbose: boolean } +): Promise => { + const builtProductsDir = await buildPodTargets( + context.podsProjectPath, + context.podTargetLabels, + options + ); + + const binaries = findBinariesToMangle(builtProductsDir); + const defines = await buildManglingDefines(context.manglePrefix, binaries); + + writeManglingXcconfig(context.xcconfigPath, defines, context.specsChecksum); + + for (const podXcconfig of context.podXcconfigPaths) { + patchPodXcconfig(podXcconfig, context.xcconfigPath); + } +}; + +export const isManglingUpToDate = (xcconfigPath: string, expectedChecksum: string): boolean => + readExistingChecksum(xcconfigPath) === expectedChecksum; + +export const __testing = { + isSwiftSymbol, + extractClasses, + extractConstants, + extractCategorySelectors, + prefixSelectors, +}; diff --git a/packages/expo-brownfield/plugin/build/ios/plugins/withPodfilePlugin.js b/packages/expo-brownfield/plugin/build/ios/plugins/withPodfilePlugin.js index 0cdf842faffcd9..3802354c98576b 100644 --- a/packages/expo-brownfield/plugin/build/ios/plugins/withPodfilePlugin.js +++ b/packages/expo-brownfield/plugin/build/ios/plugins/withPodfilePlugin.js @@ -4,6 +4,9 @@ const config_plugins_1 = require("expo/config-plugins"); const utils_1 = require("../utils"); const withPodfilePlugin = (config, pluginConfig) => { return (0, config_plugins_1.withPodfile)(config, (config) => { + if (pluginConfig.multipleFrameworks) { + config.modResults.contents = (0, utils_1.addManglePlugin)(config.modResults.contents, pluginConfig.targetName); + } config.modResults.contents = (0, utils_1.addNewPodsTarget)(config.modResults.contents, pluginConfig.targetName); if (!pluginConfig.buildReactNativeFromSource) { config.modResults.contents = (0, utils_1.addPrebuiltSettings)(config.modResults.contents); diff --git a/packages/expo-brownfield/plugin/build/ios/plugins/withXcodeProjectPlugin.js b/packages/expo-brownfield/plugin/build/ios/plugins/withXcodeProjectPlugin.js index 1e18ef89a426b9..32c0ec5321ea28 100644 --- a/packages/expo-brownfield/plugin/build/ios/plugins/withXcodeProjectPlugin.js +++ b/packages/expo-brownfield/plugin/build/ios/plugins/withXcodeProjectPlugin.js @@ -34,8 +34,15 @@ const withXcodeProjectPlugin = (config, pluginConfig) => { // ReactNativeDelegate 'ReactNativeDelegate.swift', ]; - // Create files from templates - templateFiles.forEach((templateFile) => (0, utils_1.createFileFromTemplate)(templateFile, groupPath)); + // Per-target prefix is interpolated into `@objc(...)` annotations so the + // ObjC runtime sees a unique class name per inner-app framework. + // Swift type names themselves stay unprefixed: each brownfield framework + // has a unique Swift module name, which is enough namespace isolation, and + // typealias-based unprefixing breaks linking under library-evolution mode + // (clients reference symbols by the typealias path, but the framework + // exports symbols under the underlying class name → undefined symbol). + const templateVars = { prefix: pluginConfig.targetName }; + templateFiles.forEach((templateFile) => (0, utils_1.createFileFromTemplate)(templateFile, groupPath, templateVars)); // Apply patch to ExpoAppDelegate.swift to make it compatible with the brownfield framework (0, utils_1.applyPatchToFile)('ExpoAppDelegate.patch', node_path_1.default.join(groupPath, 'ExpoAppDelegate.swift')); // Create and properly add a new group for the framework diff --git a/packages/expo-brownfield/plugin/build/ios/types.d.ts b/packages/expo-brownfield/plugin/build/ios/types.d.ts index 9144e947a00656..054f89d39d0bd8 100644 --- a/packages/expo-brownfield/plugin/build/ios/types.d.ts +++ b/packages/expo-brownfield/plugin/build/ios/types.d.ts @@ -2,6 +2,7 @@ export interface PluginConfig { bundleIdentifier: string; targetName: string; buildReactNativeFromSource: boolean; + multipleFrameworks: boolean; } export type IOSPluginProps = Partial; export type PluginProps = IOSPluginProps | undefined; diff --git a/packages/expo-brownfield/plugin/build/ios/utils/podfile.d.ts b/packages/expo-brownfield/plugin/build/ios/utils/podfile.d.ts index 65e3e5bcbd9c50..aaacfc5b3ba73d 100644 --- a/packages/expo-brownfield/plugin/build/ios/utils/podfile.d.ts +++ b/packages/expo-brownfield/plugin/build/ios/utils/podfile.d.ts @@ -1,2 +1,16 @@ +/** + * Wire up expo-brownfield's bundled mangling logic in the Podfile. + * Replaces the third-party `cocoapods-mangle` gem so users don't need a + * Gemfile entry. Two insertions: + * 1. A `require` line near the top of the Podfile that loads + * `scripts/ios/mangle.rb` from the expo-brownfield npm package. + * 2. A `ExpoBrownfield::Mangle.run!(installer, ...)` call inside the + * `post_install` block (created if absent) that invokes the + * bundled Node worker to generate the mangling xcconfig. + * + * Both insertions are idempotent — re-running prebuild against an already + * patched Podfile is a no-op. + */ +export declare const addManglePlugin: (podfile: string, targetName: string) => string; export declare const addNewPodsTarget: (podfile: string, targetName: string) => string; export declare const addPrebuiltSettings: (podfile: string) => string; diff --git a/packages/expo-brownfield/plugin/build/ios/utils/podfile.js b/packages/expo-brownfield/plugin/build/ios/utils/podfile.js index 74a332cf97335b..676e788b8f16a8 100644 --- a/packages/expo-brownfield/plugin/build/ios/utils/podfile.js +++ b/packages/expo-brownfield/plugin/build/ios/utils/podfile.js @@ -1,6 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.addPrebuiltSettings = exports.addNewPodsTarget = void 0; +exports.addPrebuiltSettings = exports.addNewPodsTarget = exports.addManglePlugin = void 0; const getTargetNameLines = (targetName) => { return [` target '${targetName}' do`, ' inherit! :complete', ' end']; }; @@ -19,6 +19,113 @@ const getPrebuiltSettingsLines = () => { end end`.split('\n'); }; +const MANGLE_REQUIRE_MARKER = "require File.join(File.dirname(`node --print \"require.resolve('expo-brownfield/package.json')\"`), 'scripts/ios/mangle')"; +const MANGLE_RUN_MARKER = 'ExpoBrownfield::Mangle.run!'; +const getMangleRunLines = (targetName) => { + return [ + ` ExpoBrownfield::Mangle.run!(installer, targets: ['${targetName}'], mangle_prefix: '${targetName}_')`, + ]; +}; +/** + * Wire up expo-brownfield's bundled mangling logic in the Podfile. + * Replaces the third-party `cocoapods-mangle` gem so users don't need a + * Gemfile entry. Two insertions: + * 1. A `require` line near the top of the Podfile that loads + * `scripts/ios/mangle.rb` from the expo-brownfield npm package. + * 2. A `ExpoBrownfield::Mangle.run!(installer, ...)` call inside the + * `post_install` block (created if absent) that invokes the + * bundled Node worker to generate the mangling xcconfig. + * + * Both insertions are idempotent — re-running prebuild against an already + * patched Podfile is a no-op. + */ +const addManglePlugin = (podfile, targetName) => { + let result = addMangleRequire(podfile); + result = addMangleRunCall(result, targetName); + return result; +}; +exports.addManglePlugin = addManglePlugin; +const addMangleRequire = (podfile) => { + if (podfile.includes(MANGLE_REQUIRE_MARKER)) { + return podfile; + } + const lines = podfile.split('\n'); + // Insert after the last existing `require ` line (typically the + // react_native_pods/autolinking requires) so we sit alongside them. + const lastRequireIndex = lines.reduce((acc, line, index) => { + if (line.trimStart().startsWith('require ')) { + return index; + } + return acc; + }, -1); + const insertAt = lastRequireIndex >= 0 ? lastRequireIndex + 1 : 0; + lines.splice(insertAt, 0, MANGLE_REQUIRE_MARKER); + return lines.join('\n'); +}; +/** + * Count Ruby block-opening keywords on `line` (after stripping `#` comments). + * Recognizes `do` (with or without `|args|`) anywhere on the line, plus the + * keywords `if/unless/while/until/case/begin/def/class/module/for` *only* + * when they appear at the start of the line — that excludes the trailing- + * modifier forms (`puts x if y`, `… while cond`) which don't take a matching + * `end`. Strings and regexes containing the literal text `do`/`end` would + * still be miscounted; Podfiles in practice don't put block keywords inside + * string literals, so this is good enough for our scope. + */ +const countBlockOpens = (line) => { + const stripped = line.replace(/#.*$/, ''); + let opens = 0; + const doMatches = stripped.match(/\bdo\b/g); + if (doMatches) { + opens += doMatches.length; + } + if (/^\s*(?:if|unless|while|until|case|begin|def|class|module|for)\b/.test(stripped)) { + opens += 1; + } + return opens; +}; +const countBlockCloses = (line) => { + const stripped = line.replace(/#.*$/, ''); + const matches = stripped.match(/\bend\b/g); + return matches ? matches.length : 0; +}; +/** + * Find the matching `end` for the block whose opener is on `openerIndex`. + * Walks forward tracking nesting depth so nested `do…end` (or `if…end`, + * `case…end`, …) inside the block don't get mistaken for the outer block's + * closer. Returns -1 if the input is unbalanced. + */ +const findMatchingEnd = (lines, openerIndex) => { + let depth = 0; + for (let i = openerIndex; i < lines.length; i++) { + const line = lines[i] ?? ''; + depth += countBlockOpens(line); + depth -= countBlockCloses(line); + if (depth === 0 && i > openerIndex) { + return i; + } + } + return -1; +}; +const addMangleRunCall = (podfile, targetName) => { + if (podfile.includes(MANGLE_RUN_MARKER)) { + return podfile; + } + const runLines = getMangleRunLines(targetName); + const lines = podfile.split('\n'); + const postInstallIndex = lines.findIndex((line) => line.includes('post_install do |installer|')); + if (postInstallIndex === -1) { + // No post_install block exists yet — append one at the bottom. + lines.push('', 'post_install do |installer|', ...runLines, 'end'); + return lines.join('\n'); + } + const blockEnd = findMatchingEnd(lines, postInstallIndex); + if (blockEnd === -1) { + return podfile; + } + lines.splice(blockEnd, 0, ...runLines); + return lines.join('\n'); +}; const addNewPodsTarget = (podfile, targetName) => { const targetLines = getTargetNameLines(targetName); let podFileLines = podfile.split('\n'); diff --git a/packages/expo-brownfield/plugin/build/ios/utils/project.js b/packages/expo-brownfield/plugin/build/ios/utils/project.js index ef1964611b2326..e85a15d5aaa1ed 100644 --- a/packages/expo-brownfield/plugin/build/ios/utils/project.js +++ b/packages/expo-brownfield/plugin/build/ios/utils/project.js @@ -97,6 +97,7 @@ const getCommonBuildSettings = (targetName, currentProjectVersion, bundleIdentif USER_SCRIPT_SANDBOXING: '"NO"', SKIP_INSTALL: '"NO"', ENABLE_MODULE_VERIFIER: '"NO"', + GCC_SYMBOLS_PRIVATE_EXTERN: '"YES"', }; }; const inferProjectName = (platformProjectRoot) => { diff --git a/packages/expo-brownfield/plugin/build/ios/utils/props.js b/packages/expo-brownfield/plugin/build/ios/utils/props.js index 3fb1c0ba1adbad..ffd5d2b9b82679 100644 --- a/packages/expo-brownfield/plugin/build/ios/utils/props.js +++ b/packages/expo-brownfield/plugin/build/ios/utils/props.js @@ -7,6 +7,7 @@ const getPluginConfig = (props, config) => { bundleIdentifier: getBundleIdentifier(props, config, targetName), targetName, buildReactNativeFromSource: props?.buildReactNativeFromSource ?? false, + multipleFrameworks: props?.multipleFrameworks ?? false, }; }; exports.getPluginConfig = getPluginConfig; diff --git a/packages/expo-brownfield/plugin/src/ios/plugins/withPodfilePlugin.ts b/packages/expo-brownfield/plugin/src/ios/plugins/withPodfilePlugin.ts index 5df4be04bdf9f3..dd24b498f22086 100644 --- a/packages/expo-brownfield/plugin/src/ios/plugins/withPodfilePlugin.ts +++ b/packages/expo-brownfield/plugin/src/ios/plugins/withPodfilePlugin.ts @@ -1,10 +1,16 @@ import { type ConfigPlugin, withPodfile } from 'expo/config-plugins'; import type { PluginConfig } from '../types'; -import { addNewPodsTarget, addPrebuiltSettings } from '../utils'; +import { addManglePlugin, addNewPodsTarget, addPrebuiltSettings } from '../utils'; const withPodfilePlugin: ConfigPlugin = (config, pluginConfig) => { return withPodfile(config, (config) => { + if (pluginConfig.multipleFrameworks) { + config.modResults.contents = addManglePlugin( + config.modResults.contents, + pluginConfig.targetName + ); + } config.modResults.contents = addNewPodsTarget( config.modResults.contents, pluginConfig.targetName diff --git a/packages/expo-brownfield/plugin/src/ios/plugins/withXcodeProjectPlugin.ts b/packages/expo-brownfield/plugin/src/ios/plugins/withXcodeProjectPlugin.ts index 4e94e47cb30a82..fa61ebad64f14e 100644 --- a/packages/expo-brownfield/plugin/src/ios/plugins/withXcodeProjectPlugin.ts +++ b/packages/expo-brownfield/plugin/src/ios/plugins/withXcodeProjectPlugin.ts @@ -51,8 +51,17 @@ const withXcodeProjectPlugin: ConfigPlugin = (config, pluginConfig 'ReactNativeDelegate.swift', ]; - // Create files from templates - templateFiles.forEach((templateFile) => createFileFromTemplate(templateFile, groupPath)); + // Per-target prefix is interpolated into `@objc(...)` annotations so the + // ObjC runtime sees a unique class name per inner-app framework. + // Swift type names themselves stay unprefixed: each brownfield framework + // has a unique Swift module name, which is enough namespace isolation, and + // typealias-based unprefixing breaks linking under library-evolution mode + // (clients reference symbols by the typealias path, but the framework + // exports symbols under the underlying class name → undefined symbol). + const templateVars = { prefix: pluginConfig.targetName }; + templateFiles.forEach((templateFile) => + createFileFromTemplate(templateFile, groupPath, templateVars) + ); // Apply patch to ExpoAppDelegate.swift to make it compatible with the brownfield framework applyPatchToFile('ExpoAppDelegate.patch', path.join(groupPath, 'ExpoAppDelegate.swift')); diff --git a/packages/expo-brownfield/plugin/src/ios/types.ts b/packages/expo-brownfield/plugin/src/ios/types.ts index c1a0422d3c122e..a1d2ce6037eb03 100644 --- a/packages/expo-brownfield/plugin/src/ios/types.ts +++ b/packages/expo-brownfield/plugin/src/ios/types.ts @@ -2,6 +2,7 @@ export interface PluginConfig { bundleIdentifier: string; targetName: string; buildReactNativeFromSource: boolean; + multipleFrameworks: boolean; } export type IOSPluginProps = Partial; diff --git a/packages/expo-brownfield/plugin/src/ios/utils/podfile.ts b/packages/expo-brownfield/plugin/src/ios/utils/podfile.ts index 728621d9c53df0..9001ff90dea28d 100644 --- a/packages/expo-brownfield/plugin/src/ios/utils/podfile.ts +++ b/packages/expo-brownfield/plugin/src/ios/utils/podfile.ts @@ -18,6 +18,128 @@ const getPrebuiltSettingsLines = (): string[] => { end`.split('\n'); }; +const MANGLE_REQUIRE_MARKER = + "require File.join(File.dirname(`node --print \"require.resolve('expo-brownfield/package.json')\"`), 'scripts/ios/mangle')"; +const MANGLE_RUN_MARKER = 'ExpoBrownfield::Mangle.run!'; + +const getMangleRunLines = (targetName: string): string[] => { + return [ + ` ExpoBrownfield::Mangle.run!(installer, targets: ['${targetName}'], mangle_prefix: '${targetName}_')`, + ]; +}; + +/** + * Wire up expo-brownfield's bundled mangling logic in the Podfile. + * Replaces the third-party `cocoapods-mangle` gem so users don't need a + * Gemfile entry. Two insertions: + * 1. A `require` line near the top of the Podfile that loads + * `scripts/ios/mangle.rb` from the expo-brownfield npm package. + * 2. A `ExpoBrownfield::Mangle.run!(installer, ...)` call inside the + * `post_install` block (created if absent) that invokes the + * bundled Node worker to generate the mangling xcconfig. + * + * Both insertions are idempotent — re-running prebuild against an already + * patched Podfile is a no-op. + */ +export const addManglePlugin = (podfile: string, targetName: string): string => { + let result = addMangleRequire(podfile); + result = addMangleRunCall(result, targetName); + return result; +}; + +const addMangleRequire = (podfile: string): string => { + if (podfile.includes(MANGLE_REQUIRE_MARKER)) { + return podfile; + } + + const lines = podfile.split('\n'); + // Insert after the last existing `require ` line (typically the + // react_native_pods/autolinking requires) so we sit alongside them. + const lastRequireIndex = lines.reduce((acc, line, index) => { + if (line.trimStart().startsWith('require ')) { + return index; + } + return acc; + }, -1); + + const insertAt = lastRequireIndex >= 0 ? lastRequireIndex + 1 : 0; + lines.splice(insertAt, 0, MANGLE_REQUIRE_MARKER); + return lines.join('\n'); +}; + +/** + * Count Ruby block-opening keywords on `line` (after stripping `#` comments). + * Recognizes `do` (with or without `|args|`) anywhere on the line, plus the + * keywords `if/unless/while/until/case/begin/def/class/module/for` *only* + * when they appear at the start of the line — that excludes the trailing- + * modifier forms (`puts x if y`, `… while cond`) which don't take a matching + * `end`. Strings and regexes containing the literal text `do`/`end` would + * still be miscounted; Podfiles in practice don't put block keywords inside + * string literals, so this is good enough for our scope. + */ +const countBlockOpens = (line: string): number => { + const stripped = line.replace(/#.*$/, ''); + let opens = 0; + const doMatches = stripped.match(/\bdo\b/g); + if (doMatches) { + opens += doMatches.length; + } + if (/^\s*(?:if|unless|while|until|case|begin|def|class|module|for)\b/.test(stripped)) { + opens += 1; + } + return opens; +}; + +const countBlockCloses = (line: string): number => { + const stripped = line.replace(/#.*$/, ''); + const matches = stripped.match(/\bend\b/g); + return matches ? matches.length : 0; +}; + +/** + * Find the matching `end` for the block whose opener is on `openerIndex`. + * Walks forward tracking nesting depth so nested `do…end` (or `if…end`, + * `case…end`, …) inside the block don't get mistaken for the outer block's + * closer. Returns -1 if the input is unbalanced. + */ +const findMatchingEnd = (lines: string[], openerIndex: number): number => { + let depth = 0; + for (let i = openerIndex; i < lines.length; i++) { + const line = lines[i] ?? ''; + depth += countBlockOpens(line); + depth -= countBlockCloses(line); + if (depth === 0 && i > openerIndex) { + return i; + } + } + return -1; +}; + +const addMangleRunCall = (podfile: string, targetName: string): string => { + if (podfile.includes(MANGLE_RUN_MARKER)) { + return podfile; + } + + const runLines = getMangleRunLines(targetName); + const lines = podfile.split('\n'); + + const postInstallIndex = lines.findIndex((line) => line.includes('post_install do |installer|')); + + if (postInstallIndex === -1) { + // No post_install block exists yet — append one at the bottom. + lines.push('', 'post_install do |installer|', ...runLines, 'end'); + return lines.join('\n'); + } + + const blockEnd = findMatchingEnd(lines, postInstallIndex); + if (blockEnd === -1) { + return podfile; + } + + lines.splice(blockEnd, 0, ...runLines); + return lines.join('\n'); +}; + export const addNewPodsTarget = (podfile: string, targetName: string): string => { const targetLines = getTargetNameLines(targetName); let podFileLines = podfile.split('\n'); diff --git a/packages/expo-brownfield/plugin/src/ios/utils/project.ts b/packages/expo-brownfield/plugin/src/ios/utils/project.ts index 99d65b820f2cf7..0835f3f1e6bac0 100644 --- a/packages/expo-brownfield/plugin/src/ios/utils/project.ts +++ b/packages/expo-brownfield/plugin/src/ios/utils/project.ts @@ -169,6 +169,7 @@ const getCommonBuildSettings = ( USER_SCRIPT_SANDBOXING: '"NO"', SKIP_INSTALL: '"NO"', ENABLE_MODULE_VERIFIER: '"NO"', + GCC_SYMBOLS_PRIVATE_EXTERN: '"YES"', }; }; diff --git a/packages/expo-brownfield/plugin/src/ios/utils/props.ts b/packages/expo-brownfield/plugin/src/ios/utils/props.ts index 6c2468eaf27eb5..64417780d7247e 100644 --- a/packages/expo-brownfield/plugin/src/ios/utils/props.ts +++ b/packages/expo-brownfield/plugin/src/ios/utils/props.ts @@ -9,6 +9,7 @@ export const getPluginConfig = (props: PluginProps, config: ExpoConfig): PluginC bundleIdentifier: getBundleIdentifier(props, config, targetName), targetName, buildReactNativeFromSource: props?.buildReactNativeFromSource ?? false, + multipleFrameworks: props?.multipleFrameworks ?? false, }; }; diff --git a/packages/expo-brownfield/scripts/ios/mangle.rb b/packages/expo-brownfield/scripts/ios/mangle.rb new file mode 100644 index 00000000000000..94fcf4fb0bba0d --- /dev/null +++ b/packages/expo-brownfield/scripts/ios/mangle.rb @@ -0,0 +1,140 @@ +# expo-brownfield: ObjC symbol mangling shim. +# +# Replaces the third-party `cocoapods-mangle` gem. Invoked from a Podfile's +# `post_install` block when `multipleFrameworks: true` is set on the +# expo-brownfield plugin config. The Ruby side is intentionally minimal — it +# gathers the install context, short-circuits when nothing has changed, and +# otherwise spawns the bundled Node worker that does the heavy lifting +# (xcodebuild + nm + xcconfig generation/patching). + +require 'json' +require 'digest' +require 'fileutils' +require 'shellwords' + +module ExpoBrownfield + module Mangle + NAME = 'expo-brownfield-mangle'.freeze + VERSION = '1'.freeze + MANGLED_SPECS_CHECKSUM_KEY = 'MANGLED_SPECS_CHECKSUM'.freeze + + # Public entry point. + # + # @param [Pod::Installer] installer + # The installer passed into the Podfile's `post_install do |installer|` block. + # @param [Hash] options + # @option options [Array] :targets + # Names of user targets whose dependency graph should be mangled. + # When empty/nil all umbrella targets are included. + # @option options [String] :mangle_prefix + # Prefix to prepend to mangled symbols. Defaults to `_`. + # @option options [String] :xcconfig_path + # Override path for the generated mangling xcconfig. Defaults to + # `/Target Support Files/expo-brownfield-mangle.xcconfig`. + def self.run!(installer, options = {}) + Pod::UI.titled_section 'Updating expo-brownfield mangling' do + targets = filter_umbrella_targets(installer, options[:targets]) + if targets.empty? + Pod::UI.message '- No matching targets to mangle, skipping' + next + end + + prefix = options[:mangle_prefix] || default_mangle_prefix(installer, targets) + xcconfig_path = options[:xcconfig_path] || default_xcconfig_path(installer) + checksum = specs_checksum(targets) + + if up_to_date?(xcconfig_path, checksum) + Pod::UI.message '- Mangling config already up to date' + next + end + + context = { + podsProjectPath: installer.pods_project.path.to_s, + podTargetLabels: targets.flat_map { |t| t.pod_targets.map(&:label) }.uniq, + podXcconfigPaths: pod_xcconfig_paths(installer), + manglePrefix: prefix, + xcconfigPath: xcconfig_path, + specsChecksum: checksum, + } + + run_worker!(context) + end + end + + # @api private + def self.filter_umbrella_targets(installer, target_names) + all = installer.aggregate_targets + return all if target_names.nil? || target_names.empty? + all.select do |agg| + names = agg.user_targets.map(&:name) + (target_names & names).any? + end + end + + # @api private + def self.default_mangle_prefix(installer, targets) + project_path = targets.first.user_project.path + project_name = File.basename(project_path, '.xcodeproj') + "#{project_name.tr(' ', '_')}_" + rescue StandardError + 'ExpoBrownfield_' + end + + # @api private + def self.default_xcconfig_path(installer) + File.join(installer.sandbox.target_support_files_root, "#{NAME}.xcconfig") + end + + # @api private + def self.pod_xcconfig_paths(installer) + paths = [] + installer.pods_project.targets.each do |target| + target.build_configurations.each do |config| + ref = config.base_configuration_reference + next if ref.nil? + paths << ref.real_path.to_s + end + end + paths.uniq + end + + # @api private + def self.specs_checksum(targets) + specs = targets.flat_map { |t| t.pod_targets.flat_map(&:specs) }.uniq + specs_summary = specs.map(&:checksum).join(',') + Digest::SHA1.hexdigest("#{NAME}=#{VERSION},#{specs_summary}") + end + + # @api private + def self.up_to_date?(xcconfig_path, expected_checksum) + return false unless File.exist?(xcconfig_path) + contents = File.read(xcconfig_path) + match = contents.match(/^#{MANGLED_SPECS_CHECKSUM_KEY}\s*=\s*(\S+)/) + match && match[1] == expected_checksum + end + + # @api private + def self.run_worker!(context) + worker_args = locate_worker + json = context.to_json + + command = [*worker_args, 'mangle', '--context-json', json] + Pod::UI.message '- Running expo-brownfield mangle worker' + ok = system(*command) + raise "expo-brownfield mangle worker failed (exit=#{$?.exitstatus})" unless ok + end + + # Locate the brownfield CLI entry point. Prefer `node /bin/cli.js` + # over `npx` to avoid surprise upgrades and to work reliably in offline + # CI environments. + # + # @api private + def self.locate_worker + package_json_path = `node --print "require.resolve('expo-brownfield/package.json')" 2>/dev/null`.strip + raise 'expo-brownfield package not found in node_modules. Install it with `npx expo install expo-brownfield`.' if package_json_path.empty? + cli = File.join(File.dirname(package_json_path), 'bin', 'cli.js') + raise "expo-brownfield CLI not found at #{cli}" unless File.exist?(cli) + ['node', cli] + end + end +end From 84af74c5753371a95728a2d058605c3c97618ea5 Mon Sep 17 00:00:00 2001 From: Phil Pluckthun Date: Tue, 5 May 2026 18:10:31 +0100 Subject: [PATCH 09/26] fix(babel-preset-expo): Add `resolveModule` output cache (#45405) # Summary Updates snapshots and adds a `resolveModule` cache to make benchmarks a little more stable. # Test Plan - Unit tests should pass # Checklist - [x] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- packages/babel-preset-expo/CHANGELOG.md | 1 + .../build/configs/expo.js.map | 2 +- .../babel-preset-expo/build/configs/react.js | 9 ++------ .../build/configs/react.js.map | 2 +- .../build/utils/resolveModule.js | 22 ++++++++++++++++--- .../build/utils/resolveModule.js.map | 2 +- .../src/__tests__/jsx-import.test.ts | 2 +- .../babel-preset-expo/src/configs/expo.ts | 2 +- .../babel-preset-expo/src/configs/react.ts | 9 ++------ .../src/utils/resolveModule.ts | 22 ++++++++++++++++--- 10 files changed, 48 insertions(+), 25 deletions(-) diff --git a/packages/babel-preset-expo/CHANGELOG.md b/packages/babel-preset-expo/CHANGELOG.md index 7458fe0ca1fdef..5220bbbf4c8486 100644 --- a/packages/babel-preset-expo/CHANGELOG.md +++ b/packages/babel-preset-expo/CHANGELOG.md @@ -27,6 +27,7 @@ - Update JSX (`react-jsx`) transform to skip deprecated `react-display-name` transform, only apply pure annotations to production builds, and skip legacy source/self transforms when the automatic runtime is used ([#45351](https://github.com/expo/expo/pull/45351) by [@kitten](https://github.com/kitten)) - Apply `@babel/plugin-proposal-decorators` lazily ([#45353](https://github.com/expo/expo/pull/45353) by [@kitten](https://github.com/kitten)) - Optimize vendored `warn-on-deep-rn-imports` plugin ([#45354](https://github.com/expo/expo/pull/45354) by [@kitten](https://github.com/kitten)) +- [Internal] Cache `resolveModule` results in case of Babel config reevaluations ([#45405](https://github.com/expo/expo/pull/45405) by [@kitten](https://github.com/kitten)) ### ⚠️ Notices diff --git a/packages/babel-preset-expo/build/configs/expo.js.map b/packages/babel-preset-expo/build/configs/expo.js.map index 12e1f0b5f5273c..669dba45f34941 100644 --- a/packages/babel-preset-expo/build/configs/expo.js.map +++ b/packages/babel-preset-expo/build/configs/expo.js.map @@ -1 +1 @@ -{"version":3,"file":"expo.js","sourceRoot":"","sources":["../../src/configs/expo.ts"],"names":[],"mappings":";;AAGA,sFAAoF;AACpF,8FAA+F;AAC/F,wFAAkF;AAClF,sEAAsE;AACtE,0FAA+F;AAC/F,gEAA+D;AAC/D,8EAAyE;AACzE,wFAA8F;AAC9F,4EAA4E;AAC5E,sFAAgF;AAChF,8EAAyE;AACzE,kFAAgF;AAChF,8DAA0D;AAC1D,0DAAkE;AAGlE,MAAM,0BAA0B,GAAG,CAAC,wBAAwB,CAAC,CAAC;AA8B9D,MAAM,CAAC,OAAO,GAAG,UAAU,GAAc,EAAE,OAA0B;IACnE,MAAM,OAAO,GAAiB,EAAE,CAAC;IAEjC,qFAAqF;IACrF,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE,CAAC;QAC5B,OAAO,CAAC,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;IACnC,CAAC;IAED,qFAAqF;IACrF,MAAM,mBAAmB,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC;IAC5D,IAAI,mBAAmB,IAAI,IAAI,EAAE,CAAC;QAChC,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC;IACpC,CAAC;IAED,iDAAiD;IACjD,IAAI,OAAO,CAAC,MAAM,KAAK,QAAQ,EAAE,CAAC;QAChC,+FAA+F;QAC/F,iGAAiG;QACjG,sEAAsE;QACtE,OAAO,CAAC,IAAI,CAAC;YACX,OAAO,CAAC,4CAA4C,CAAC;YACrD,qGAAqG;YACrG,EAAE,KAAK,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,EAAE;SACnC,CAAC,CAAC;IACL,CAAC;IAED,MAAM,OAAO,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;IAC/C,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,0BAA0B,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC;IAE7D,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QACzB,gJAAgJ;QAChJ,qIAAqI;QACrI,OAAO,CAAC,IAAI,CAAC;YACX,OAAO,CAAC,0CAA0C,CAAC;YACnD;gBACE,QAAQ,EAAE,OAAO,CAAC,QAAQ;aAC3B;SACF,CAAC,CAAC;IACL,CAAC;IAED,mHAAmH;IACnH,uDAAuD;IACvD,2CAA2C;IAC3C,qCAAqC;IACrC,uEAAuE;IACvE,IAAI,OAAO,CAAC,0BAA0B,EAAE,CAAC;QACvC,OAAO,CAAC,IAAI,CAAC,mCAAiB,CAAC,CAAC;IAClC,CAAC;IAED,IAAI,OAAO,CAAC,QAAQ,KAAK,KAAK,EAAE,CAAC;QAC/B,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,+BAA+B,CAAC,CAAC,CAAC;IACzD,CAAC;IACD,6EAA6E;IAC7E,IAAI,OAAO,CAAC,OAAO,KAAK,SAAS,EAAE,CAAC;QAClC,OAAO,CAAC,IAAI,CAAC,sDAAwB,CAAC,CAAC;IACzC,CAAC;IAED,IAAI,IAAA,yBAAS,EAAC,GAAG,EAAE,0BAA0B,CAAC,EAAE,CAAC;QAC/C,OAAO,CAAC,IAAI,CAAC,0CAAqB,CAAC,CAAC;QACpC,OAAO,CAAC,IAAI,CAAC,6CAAoB,CAAC,CAAC;QACnC,qFAAqF;QACrF,mEAAmE;QACnE,yDAAyD;QACzD,0EAA0E;QAC1E,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,CAAC;YAC3B,OAAO,CAAC,IAAI,CAAC,oDAAuB,CAAC,CAAC;QACxC,CAAC;IACH,CAAC;IAED,OAAO,CAAC,IAAI,CAAC,wDAA2B,CAAC,CAAC;IAE1C,gHAAgH;IAChH,uDAAuD;IACvD,IAAI,OAAO,CAAC,aAAa,EAAE,CAAC;QAC1B,OAAO,CAAC,IAAI,CAAC,gDAAwB,CAAC,CAAC;QACvC,OAAO,CAAC,IAAI,CAAC,kEAAoC,CAAC,CAAC;IACrD,CAAC;SAAM,CAAC;QACN,8EAA8E;QAC9E,OAAO,CAAC,IAAI,CAAC,oDAAyB,CAAC,CAAC;IAC1C,CAAC;IAED,0IAA0I;IAC1I,OAAO,CAAC,IAAI,CAAC,mEAAkC,CAAC,CAAC;IAEjD,sGAAsG;IACtG,yHAAyH;IACzH,IAAI,IAAA,yBAAS,EAAC,GAAG,EAAE,2BAA2B,CAAC,EAAE,CAAC;QAChD,OAAO,CAAC,IAAI,CAAC,8BAAa,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,kBAAkB,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;IAC1D,IAAI,kBAAkB,IAAI,IAAI,EAAE,CAAC;QAC/B,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;IACnC,CAAC;IAED,IAAI,OAAO,CAAC,4BAA4B,EAAE,CAAC;QACzC,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,mCAAmC,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC;IACpF,CAAC;IAED,MAAM,kBAAkB,GAAG,OAAO,CAAC,mBAAmB,KAAK,KAAK,CAAC;IACjE,OAAO,CAAC,IAAI,CAAC,IAAA,mEAAoC,EAAC,kBAAkB,KAAK,IAAI,CAAC,CAAC,CAAC;IAEhF,eAAe;IACf,IAAI,OAAO,CAAC,UAAU,KAAK,KAAK,EAAE,CAAC;QACjC,OAAO,CAAC,IAAI,CAAC,CAAC,6CAAoB,EAAE,OAAO,CAAC,UAAU,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAC/E,CAAC;IAED,6EAA6E;IAC7E,IAAI,OAAO,CAAC,QAAQ,KAAK,KAAK,IAAI,OAAO,CAAC,UAAU,KAAK,KAAK,EAAE,CAAC;QAC/D,MAAM,kBAAkB,GAAG,IAAA,6BAAa,EAAC,GAAG,EAAE,8BAA8B,CAAC,CAAC;QAC9E,IAAI,kBAAkB,EAAE,CAAC;YACvB,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAC9C,CAAC;IACH,CAAC;SAAM,IAAI,OAAO,CAAC,UAAU,KAAK,KAAK,EAAE,CAAC;QACxC,MAAM,oBAAoB,GAAG,IAAA,6BAAa,EAAC,GAAG,EAAE,gCAAgC,CAAC,CAAC;QAClF,IAAI,oBAAoB,EAAE,CAAC;YACzB,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC;QAChD,CAAC;IACH,CAAC;IAED,IAAI,OAAO,CAAC,MAAM,KAAK,KAAK,EAAE,CAAC;QAC7B,MAAM,gBAAgB,GAAG,IAAA,6BAAa,EAAC,GAAG,EAAE,uBAAuB,CAAC,CAAC;QACrE,IAAI,gBAAgB,EAAE,CAAC;YACrB,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;IAED,OAAO;QACL,OAAO,EAAE,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;QAClC,SAAS,EAAE,6BAA6B,CAAC,OAAO,CAAC;QACjD,OAAO;KACR,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,sBAAsB,CAAC,OAO/B;IACC,IACE,CAAC,OAAO,CAAC,sBAAsB;QAC/B,oFAAoF;QACpF,OAAO,CAAC,YAAY;QACpB,mGAAmG;QACnG,uEAAuE;QACvE,OAAO,CAAC,WAAW;QACnB,kEAAkE;QAClE,OAAO,CAAC,aAAa,KAAK,KAAK,EAC/B,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,oBAAoB,GAAG,OAAO,CAAC,aAAa,CAAC;IACnD,MAAM,6BAA6B,GAAG,IAAI,GAAG,CAAC;QAC5C,mGAAmG;QACnG,QAAQ;QACR,wFAAwF;QACxF,OAAO;QACP,wIAAwI;QACxI,mIAAmI;QACnI,aAAa;QACb,eAAe;QACf,iGAAiG;QACjG,GAAG,CAAC,oBAAoB,EAAE,sBAAsB,IAAI,EAAE,CAAC;KACxD,CAAC,CAAC;IAEH,OAAO;QACL,OAAO,CAAC,6BAA6B,CAAC;QACtC;YACE,MAAM,EAAE,IAAI;YACZ,WAAW,EAAE;gBACX,mCAAmC,EAAE,CAAC,OAAO,CAAC,YAAY;gBAC1D,GAAG,CAAC,oBAAoB,EAAE,WAAW,IAAI,EAAE,CAAC;aAC7C;YACD,cAAc,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM;YAClD,GAAG,oBAAoB;YACvB,wIAAwI;YACxI,sBAAsB,EAAE,CAAC,GAAG,6BAA6B,CAAC;SAC3D;KACF,CAAC;AACJ,CAAC;AAED,SAAS,qBAAqB,CAAC,OAG9B;IACC,IACE,CAAC,OAAO,CAAC,sBAAsB;QAC/B,CAAC,CAAC,OAAO,CAAC,oBAAoB,IAAI,OAAO,CAAC,sBAAsB,KAAK,KAAK,CAAC,EAC3E,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;SAAM,CAAC;QACN,OAAO;YACL,OAAO,CAAC,qBAAqB,CAAC;YAC9B;gBACE,gGAAgG;gBAChG,YAAY,EAAE,OAAO,CAAC,sBAAsB,KAAK,IAAI;aACtD;SACF,CAAC;IACJ,CAAC;AACH,CAAC;AAED,SAAS,gBAAgB;IACvB,OAAO,CAAC,OAAO,CAAC,oCAAoC,CAAC,EAAE,EAAE,EAAE,sBAAsB,CAAC,CAAC;AACrF,CAAC;AAED,SAAS,6BAA6B,CAAC,OAGtC;IACC,IAAI,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,yBAAyB,EAAE,CAAC;QACxD,OAAO,EAAE,CAAC;IACZ,CAAC;IACD,OAAO;QACL;YACE,IAAI,EAAE,CAAC,QAAmC,EAAE,EAAE,CAC5C,CAAC,CAAC,QAAQ,IAAI,CAAC,0BAA0B,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YACjF,OAAO,EAAE,CAAC,CAAC,OAAO,CAAC,oCAAoC,CAAC,CAAC,CAAiB;SAC3E;KACF,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,OAIvB;IACC,OAAO;QACL,OAAO,CAAC,SAAS,CAAC;QAClB;YACE,GAAG,EAAE,OAAO,CAAC,KAAK;YAClB,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;SACZ;KAC/B,CAAC;AACJ,CAAC;AAED,SAAS,qBAAqB,CAC5B,OAA0B;IAE1B,MAAM,OAAO,GAAwD;QACnE,qBAAqB,EAAE,OAAO,CAAC,QAAQ;QACvC,2DAA2D;QAC3D,yBAAyB,EAAE,CAAC,CAAC,OAAO,CAAC,WAAW;KACjD,CAAC;IAEF,qEAAqE;IACrE,0GAA0G;IAC1G,qHAAqH;IACrH,MAAM,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,IAAI,OAAO,CAAC,WAAW,CAAC;IAC7E,IAAI,kBAAkB,KAAK,KAAK,EAAE,CAAC;QACjC,0GAA0G;QAC1G,OAAO,CAAC,eAAe,CAAC,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC;IAC1E,CAAC;IAED,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QACzB,OAAO,CAAC,sBAAsB,CAAC,GAAG,YAAY,CAAC;QAC/C,OAAO,CAAC,SAAS,CAAC,GAAG,KAAK,CAAC;QAC3B,OAAO,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC,QAAQ,CAAC;IAC5C,CAAC;IAED,IAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,KAAK,MAAM,EAAE,CAAC;QACpC,OAAO,CAAC,2BAA2B,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC;IACzD,CAAC;IAED,OAAO,OAAO,CAAC;AACjB,CAAC"} \ No newline at end of file +{"version":3,"file":"expo.js","sourceRoot":"","sources":["../../src/configs/expo.ts"],"names":[],"mappings":";;AAIA,sFAAoF;AACpF,8FAA+F;AAC/F,wFAAkF;AAClF,sEAAsE;AACtE,0FAA+F;AAC/F,gEAA+D;AAC/D,8EAAyE;AACzE,wFAA8F;AAC9F,4EAA4E;AAC5E,sFAAgF;AAChF,8EAAyE;AACzE,kFAAgF;AAChF,8DAA0D;AAC1D,0DAAkE;AAElE,MAAM,0BAA0B,GAAG,CAAC,wBAAwB,CAAC,CAAC;AA8B9D,MAAM,CAAC,OAAO,GAAG,UAAU,GAAc,EAAE,OAA0B;IACnE,MAAM,OAAO,GAAiB,EAAE,CAAC;IAEjC,qFAAqF;IACrF,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE,CAAC;QAC5B,OAAO,CAAC,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;IACnC,CAAC;IAED,qFAAqF;IACrF,MAAM,mBAAmB,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC;IAC5D,IAAI,mBAAmB,IAAI,IAAI,EAAE,CAAC;QAChC,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC;IACpC,CAAC;IAED,iDAAiD;IACjD,IAAI,OAAO,CAAC,MAAM,KAAK,QAAQ,EAAE,CAAC;QAChC,+FAA+F;QAC/F,iGAAiG;QACjG,sEAAsE;QACtE,OAAO,CAAC,IAAI,CAAC;YACX,OAAO,CAAC,4CAA4C,CAAC;YACrD,qGAAqG;YACrG,EAAE,KAAK,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,EAAE;SACnC,CAAC,CAAC;IACL,CAAC;IAED,MAAM,OAAO,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;IAC/C,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,0BAA0B,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC;IAE7D,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QACzB,gJAAgJ;QAChJ,qIAAqI;QACrI,OAAO,CAAC,IAAI,CAAC;YACX,OAAO,CAAC,0CAA0C,CAAC;YACnD;gBACE,QAAQ,EAAE,OAAO,CAAC,QAAQ;aAC3B;SACF,CAAC,CAAC;IACL,CAAC;IAED,mHAAmH;IACnH,uDAAuD;IACvD,2CAA2C;IAC3C,qCAAqC;IACrC,uEAAuE;IACvE,IAAI,OAAO,CAAC,0BAA0B,EAAE,CAAC;QACvC,OAAO,CAAC,IAAI,CAAC,mCAAiB,CAAC,CAAC;IAClC,CAAC;IAED,IAAI,OAAO,CAAC,QAAQ,KAAK,KAAK,EAAE,CAAC;QAC/B,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,+BAA+B,CAAC,CAAC,CAAC;IACzD,CAAC;IACD,6EAA6E;IAC7E,IAAI,OAAO,CAAC,OAAO,KAAK,SAAS,EAAE,CAAC;QAClC,OAAO,CAAC,IAAI,CAAC,sDAAwB,CAAC,CAAC;IACzC,CAAC;IAED,IAAI,IAAA,yBAAS,EAAC,GAAG,EAAE,0BAA0B,CAAC,EAAE,CAAC;QAC/C,OAAO,CAAC,IAAI,CAAC,0CAAqB,CAAC,CAAC;QACpC,OAAO,CAAC,IAAI,CAAC,6CAAoB,CAAC,CAAC;QACnC,qFAAqF;QACrF,mEAAmE;QACnE,yDAAyD;QACzD,0EAA0E;QAC1E,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,CAAC;YAC3B,OAAO,CAAC,IAAI,CAAC,oDAAuB,CAAC,CAAC;QACxC,CAAC;IACH,CAAC;IAED,OAAO,CAAC,IAAI,CAAC,wDAA2B,CAAC,CAAC;IAE1C,gHAAgH;IAChH,uDAAuD;IACvD,IAAI,OAAO,CAAC,aAAa,EAAE,CAAC;QAC1B,OAAO,CAAC,IAAI,CAAC,gDAAwB,CAAC,CAAC;QACvC,OAAO,CAAC,IAAI,CAAC,kEAAoC,CAAC,CAAC;IACrD,CAAC;SAAM,CAAC;QACN,8EAA8E;QAC9E,OAAO,CAAC,IAAI,CAAC,oDAAyB,CAAC,CAAC;IAC1C,CAAC;IAED,0IAA0I;IAC1I,OAAO,CAAC,IAAI,CAAC,mEAAkC,CAAC,CAAC;IAEjD,sGAAsG;IACtG,yHAAyH;IACzH,IAAI,IAAA,yBAAS,EAAC,GAAG,EAAE,2BAA2B,CAAC,EAAE,CAAC;QAChD,OAAO,CAAC,IAAI,CAAC,8BAAa,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,kBAAkB,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;IAC1D,IAAI,kBAAkB,IAAI,IAAI,EAAE,CAAC;QAC/B,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;IACnC,CAAC;IAED,IAAI,OAAO,CAAC,4BAA4B,EAAE,CAAC;QACzC,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,mCAAmC,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC;IACpF,CAAC;IAED,MAAM,kBAAkB,GAAG,OAAO,CAAC,mBAAmB,KAAK,KAAK,CAAC;IACjE,OAAO,CAAC,IAAI,CAAC,IAAA,mEAAoC,EAAC,kBAAkB,KAAK,IAAI,CAAC,CAAC,CAAC;IAEhF,eAAe;IACf,IAAI,OAAO,CAAC,UAAU,KAAK,KAAK,EAAE,CAAC;QACjC,OAAO,CAAC,IAAI,CAAC,CAAC,6CAAoB,EAAE,OAAO,CAAC,UAAU,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAC/E,CAAC;IAED,6EAA6E;IAC7E,IAAI,OAAO,CAAC,QAAQ,KAAK,KAAK,IAAI,OAAO,CAAC,UAAU,KAAK,KAAK,EAAE,CAAC;QAC/D,MAAM,kBAAkB,GAAG,IAAA,6BAAa,EAAC,GAAG,EAAE,8BAA8B,CAAC,CAAC;QAC9E,IAAI,kBAAkB,EAAE,CAAC;YACvB,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAC9C,CAAC;IACH,CAAC;SAAM,IAAI,OAAO,CAAC,UAAU,KAAK,KAAK,EAAE,CAAC;QACxC,MAAM,oBAAoB,GAAG,IAAA,6BAAa,EAAC,GAAG,EAAE,gCAAgC,CAAC,CAAC;QAClF,IAAI,oBAAoB,EAAE,CAAC;YACzB,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC;QAChD,CAAC;IACH,CAAC;IAED,IAAI,OAAO,CAAC,MAAM,KAAK,KAAK,EAAE,CAAC;QAC7B,MAAM,gBAAgB,GAAG,IAAA,6BAAa,EAAC,GAAG,EAAE,uBAAuB,CAAC,CAAC;QACrE,IAAI,gBAAgB,EAAE,CAAC;YACrB,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;IAED,OAAO;QACL,OAAO,EAAE,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;QAClC,SAAS,EAAE,6BAA6B,CAAC,OAAO,CAAC;QACjD,OAAO;KACR,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,sBAAsB,CAAC,OAO/B;IACC,IACE,CAAC,OAAO,CAAC,sBAAsB;QAC/B,oFAAoF;QACpF,OAAO,CAAC,YAAY;QACpB,mGAAmG;QACnG,uEAAuE;QACvE,OAAO,CAAC,WAAW;QACnB,kEAAkE;QAClE,OAAO,CAAC,aAAa,KAAK,KAAK,EAC/B,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,oBAAoB,GAAG,OAAO,CAAC,aAAa,CAAC;IACnD,MAAM,6BAA6B,GAAG,IAAI,GAAG,CAAC;QAC5C,mGAAmG;QACnG,QAAQ;QACR,wFAAwF;QACxF,OAAO;QACP,wIAAwI;QACxI,mIAAmI;QACnI,aAAa;QACb,eAAe;QACf,iGAAiG;QACjG,GAAG,CAAC,oBAAoB,EAAE,sBAAsB,IAAI,EAAE,CAAC;KACxD,CAAC,CAAC;IAEH,OAAO;QACL,OAAO,CAAC,6BAA6B,CAAC;QACtC;YACE,MAAM,EAAE,IAAI;YACZ,WAAW,EAAE;gBACX,mCAAmC,EAAE,CAAC,OAAO,CAAC,YAAY;gBAC1D,GAAG,CAAC,oBAAoB,EAAE,WAAW,IAAI,EAAE,CAAC;aAC7C;YACD,cAAc,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM;YAClD,GAAG,oBAAoB;YACvB,wIAAwI;YACxI,sBAAsB,EAAE,CAAC,GAAG,6BAA6B,CAAC;SAC3D;KACF,CAAC;AACJ,CAAC;AAED,SAAS,qBAAqB,CAAC,OAG9B;IACC,IACE,CAAC,OAAO,CAAC,sBAAsB;QAC/B,CAAC,CAAC,OAAO,CAAC,oBAAoB,IAAI,OAAO,CAAC,sBAAsB,KAAK,KAAK,CAAC,EAC3E,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;SAAM,CAAC;QACN,OAAO;YACL,OAAO,CAAC,qBAAqB,CAAC;YAC9B;gBACE,gGAAgG;gBAChG,YAAY,EAAE,OAAO,CAAC,sBAAsB,KAAK,IAAI;aACtD;SACF,CAAC;IACJ,CAAC;AACH,CAAC;AAED,SAAS,gBAAgB;IACvB,OAAO,CAAC,OAAO,CAAC,oCAAoC,CAAC,EAAE,EAAE,EAAE,sBAAsB,CAAC,CAAC;AACrF,CAAC;AAED,SAAS,6BAA6B,CAAC,OAGtC;IACC,IAAI,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,yBAAyB,EAAE,CAAC;QACxD,OAAO,EAAE,CAAC;IACZ,CAAC;IACD,OAAO;QACL;YACE,IAAI,EAAE,CAAC,QAAmC,EAAE,EAAE,CAC5C,CAAC,CAAC,QAAQ,IAAI,CAAC,0BAA0B,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YACjF,OAAO,EAAE,CAAC,CAAC,OAAO,CAAC,oCAAoC,CAAC,CAAC,CAAiB;SAC3E;KACF,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,OAIvB;IACC,OAAO;QACL,OAAO,CAAC,SAAS,CAAC;QAClB;YACE,GAAG,EAAE,OAAO,CAAC,KAAK;YAClB,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;SACZ;KAC/B,CAAC;AACJ,CAAC;AAED,SAAS,qBAAqB,CAC5B,OAA0B;IAE1B,MAAM,OAAO,GAAwD;QACnE,qBAAqB,EAAE,OAAO,CAAC,QAAQ;QACvC,2DAA2D;QAC3D,yBAAyB,EAAE,CAAC,CAAC,OAAO,CAAC,WAAW;KACjD,CAAC;IAEF,qEAAqE;IACrE,0GAA0G;IAC1G,qHAAqH;IACrH,MAAM,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,IAAI,OAAO,CAAC,WAAW,CAAC;IAC7E,IAAI,kBAAkB,KAAK,KAAK,EAAE,CAAC;QACjC,0GAA0G;QAC1G,OAAO,CAAC,eAAe,CAAC,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC;IAC1E,CAAC;IAED,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QACzB,OAAO,CAAC,sBAAsB,CAAC,GAAG,YAAY,CAAC;QAC/C,OAAO,CAAC,SAAS,CAAC,GAAG,KAAK,CAAC;QAC3B,OAAO,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC,QAAQ,CAAC;IAC5C,CAAC;IAED,IAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,KAAK,MAAM,EAAE,CAAC;QACpC,OAAO,CAAC,2BAA2B,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC;IACzD,CAAC;IAED,OAAO,OAAO,CAAC;AACjB,CAAC"} \ No newline at end of file diff --git a/packages/babel-preset-expo/build/configs/react.js b/packages/babel-preset-expo/build/configs/react.js index b41ff24bda7f6a..060caf97111fec 100644 --- a/packages/babel-preset-expo/build/configs/react.js +++ b/packages/babel-preset-expo/build/configs/react.js @@ -5,10 +5,7 @@ module.exports = function (_api, options) { const plugins = []; if (runtime === 'classic' && options.dev) { // NOTE(@kitten): runtime 'classic' is typically not needed but preserved for legacy cases (deprecated) - plugins.push([ - require('@babel/plugin-transform-react-jsx-development'), - { runtime }, - ]); + plugins.push([require('@babel/plugin-transform-react-jsx-development'), { runtime }]); } else { plugins.push([ @@ -23,9 +20,7 @@ module.exports = function (_api, options) { ]); } if (!options.dev) { - plugins.push([ - require('@babel/plugin-transform-react-pure-annotations') - ]); + plugins.push([require('@babel/plugin-transform-react-pure-annotations')]); } return { comments: false, diff --git a/packages/babel-preset-expo/build/configs/react.js.map b/packages/babel-preset-expo/build/configs/react.js.map index a7633693b85989..39994e4cada944 100644 --- a/packages/babel-preset-expo/build/configs/react.js.map +++ b/packages/babel-preset-expo/build/configs/react.js.map @@ -1 +1 @@ -{"version":3,"file":"react.js","sourceRoot":"","sources":["../../src/configs/react.ts"],"names":[],"mappings":";;AAQA,MAAM,CAAC,OAAO,GAAG,UAAU,IAAe,EAAE,OAA2B;IACrE,MAAM,OAAO,GAAG,OAAO,CAAC,UAAU,IAAI,WAAW,CAAC;IAClD,MAAM,OAAO,GAAiB,EAAE,CAAC;IAEjC,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;QACzC,uGAAuG;QACvG,OAAO,CAAC,IAAI,CAAC;YACX,OAAO,CAAC,+CAA+C,CAAC;YACxD,EAAE,OAAO,EAAE;SACZ,CAAC,CAAC;IACL,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,IAAI,CAAC;YACX,OAAO,CAAC,mCAAmC,CAAC;YAC5C;gBACE,IAAI,EAAE,CAAC,OAAO,CAAC,GAAG;gBAClB,OAAO;gBACP,GAAG,CAAC,OAAO,KAAK,SAAS,IAAI;oBAC3B,YAAY,EAAE,OAAO,CAAC,eAAe,IAAI,OAAO;iBACjD,CAAC;aACH;SACF,CAAC,CAAC;IACL,CAAC;IAED,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC;YACX,OAAO,CAAC,gDAAgD,CAAC;SAC1D,CAAC,CAAC;IACL,CAAC;IAED,OAAO;QACL,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,IAAI;QACb,OAAO;KACR,CAAC;AACJ,CAAC,CAAC"} \ No newline at end of file +{"version":3,"file":"react.js","sourceRoot":"","sources":["../../src/configs/react.ts"],"names":[],"mappings":";;AAQA,MAAM,CAAC,OAAO,GAAG,UAAU,IAAe,EAAE,OAA2B;IACrE,MAAM,OAAO,GAAG,OAAO,CAAC,UAAU,IAAI,WAAW,CAAC;IAClD,MAAM,OAAO,GAAiB,EAAE,CAAC;IAEjC,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;QACzC,uGAAuG;QACvG,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,+CAA+C,CAAC,EAAE,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;IACxF,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,IAAI,CAAC;YACX,OAAO,CAAC,mCAAmC,CAAC;YAC5C;gBACE,IAAI,EAAE,CAAC,OAAO,CAAC,GAAG;gBAClB,OAAO;gBACP,GAAG,CAAC,OAAO,KAAK,SAAS,IAAI;oBAC3B,YAAY,EAAE,OAAO,CAAC,eAAe,IAAI,OAAO;iBACjD,CAAC;aACH;SACF,CAAC,CAAC;IACL,CAAC;IAED,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,gDAAgD,CAAC,CAAC,CAAC,CAAC;IAC5E,CAAC;IAED,OAAO;QACL,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,IAAI;QACb,OAAO;KACR,CAAC;AACJ,CAAC,CAAC"} \ No newline at end of file diff --git a/packages/babel-preset-expo/build/utils/resolveModule.js b/packages/babel-preset-expo/build/utils/resolveModule.js index 4299537d963df5..5e50fcb68405f4 100644 --- a/packages/babel-preset-expo/build/utils/resolveModule.js +++ b/packages/babel-preset-expo/build/utils/resolveModule.js @@ -3,19 +3,35 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.resolveModule = resolveModule; exports.hasModule = hasModule; const common_1 = require("../common"); +let _prevPossibleProjectRoot = null; +let _cache = Object.create(null); function resolveModule(api, id) { const possibleProjectRoot = api.caller(common_1.getPossibleProjectRoot) ?? process.cwd(); + if (possibleProjectRoot !== _prevPossibleProjectRoot) { + _prevPossibleProjectRoot = possibleProjectRoot; + _cache = Object.create(null); + } + let resolved = _cache[id]; + if (resolved !== undefined) { + return resolved; + } try { - return require.resolve(id, { + resolved = require.resolve(id, { paths: [possibleProjectRoot, __dirname], }); } catch (error) { if (error.code === 'MODULE_NOT_FOUND' && error.message.includes(id)) { - return null; + resolved = null; } - throw error; + else { + throw error; + } + } + finally { + _cache[id] = resolved; } + return resolved; } function hasModule(api, id) { try { diff --git a/packages/babel-preset-expo/build/utils/resolveModule.js.map b/packages/babel-preset-expo/build/utils/resolveModule.js.map index 0d394f2e3b152b..a6cc0a024a6875 100644 --- a/packages/babel-preset-expo/build/utils/resolveModule.js.map +++ b/packages/babel-preset-expo/build/utils/resolveModule.js.map @@ -1 +1 @@ -{"version":3,"file":"resolveModule.js","sourceRoot":"","sources":["../../src/utils/resolveModule.ts"],"names":[],"mappings":";;AAIA,sCAYC;AAED,8BAOC;AAvBD,sCAAmD;AAEnD,SAAgB,aAAa,CAAC,GAAc,EAAE,EAAU;IACtD,MAAM,mBAAmB,GAAG,GAAG,CAAC,MAAM,CAAC,+BAAsB,CAAC,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;IAChF,IAAI,CAAC;QACH,OAAO,OAAO,CAAC,OAAO,CAAC,EAAE,EAAE;YACzB,KAAK,EAAE,CAAC,mBAAmB,EAAE,SAAS,CAAC;SACxC,CAAC,CAAC;IACL,CAAC;IAAC,OAAO,KAAU,EAAE,CAAC;QACpB,IAAI,KAAK,CAAC,IAAI,KAAK,kBAAkB,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC;YACpE,OAAO,IAAI,CAAC;QACd,CAAC;QACD,MAAM,KAAK,CAAC;IACd,CAAC;AACH,CAAC;AAED,SAAgB,SAAS,CAAC,GAAc,EAAE,EAAU;IAClD,IAAI,CAAC;QACH,OAAO,aAAa,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,IAAI,CAAC;IACxC,CAAC;IAAC,MAAM,CAAC;QACP,yDAAyD;QACzD,OAAO,KAAK,CAAC;IACf,CAAC;AACH,CAAC"} \ No newline at end of file +{"version":3,"file":"resolveModule.js","sourceRoot":"","sources":["../../src/utils/resolveModule.ts"],"names":[],"mappings":";;AAOA,sCAyBC;AAED,8BAOC;AAvCD,sCAAmD;AAEnD,IAAI,wBAAwB,GAAkB,IAAI,CAAC;AACnD,IAAI,MAAM,GAA8C,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;AAE5E,SAAgB,aAAa,CAAC,GAAc,EAAE,EAAU;IACtD,MAAM,mBAAmB,GAAG,GAAG,CAAC,MAAM,CAAC,+BAAsB,CAAC,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;IAChF,IAAI,mBAAmB,KAAK,wBAAwB,EAAE,CAAC;QACrD,wBAAwB,GAAG,mBAAmB,CAAC;QAC/C,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;IAC/B,CAAC;IAED,IAAI,QAAQ,GAAG,MAAM,CAAC,EAAE,CAAC,CAAC;IAC1B,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;QAC3B,OAAO,QAAQ,CAAC;IAClB,CAAC;IACD,IAAI,CAAC;QACH,QAAQ,GAAG,OAAO,CAAC,OAAO,CAAC,EAAE,EAAE;YAC7B,KAAK,EAAE,CAAC,mBAAmB,EAAE,SAAS,CAAC;SACxC,CAAC,CAAC;IACL,CAAC;IAAC,OAAO,KAAU,EAAE,CAAC;QACpB,IAAI,KAAK,CAAC,IAAI,KAAK,kBAAkB,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC;YACpE,QAAQ,GAAG,IAAI,CAAC;QAClB,CAAC;aAAM,CAAC;YACN,MAAM,KAAK,CAAC;QACd,CAAC;IACH,CAAC;YAAS,CAAC;QACT,MAAM,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC;IACxB,CAAC;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAgB,SAAS,CAAC,GAAc,EAAE,EAAU;IAClD,IAAI,CAAC;QACH,OAAO,aAAa,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,IAAI,CAAC;IACxC,CAAC;IAAC,MAAM,CAAC;QACP,yDAAyD;QACzD,OAAO,KAAK,CAAC;IACf,CAAC;AACH,CAAC"} \ No newline at end of file diff --git a/packages/babel-preset-expo/src/__tests__/jsx-import.test.ts b/packages/babel-preset-expo/src/__tests__/jsx-import.test.ts index 606daa7ba08536..7d74c6981f13ac 100644 --- a/packages/babel-preset-expo/src/__tests__/jsx-import.test.ts +++ b/packages/babel-preset-expo/src/__tests__/jsx-import.test.ts @@ -78,7 +78,7 @@ it(`transforms React display name`, () => { `; expect(babel.transform(sourceCode, options)!.code).toMatchInlineSnapshot(` " - var bar = createReactClass({ displayName: "bar" });" + var bar = createReactClass({});" `); }); diff --git a/packages/babel-preset-expo/src/configs/expo.ts b/packages/babel-preset-expo/src/configs/expo.ts index 9714f5c07952ca..088efe988a748f 100644 --- a/packages/babel-preset-expo/src/configs/expo.ts +++ b/packages/babel-preset-expo/src/configs/expo.ts @@ -1,6 +1,7 @@ import type { ConfigAPI, PluginItem } from '@babel/core'; import type { PluginOptions as ReactCompilerOptions } from 'babel-plugin-react-compiler'; +import { ReactConfigOptions } from './react'; import { reactClientReferencesPlugin } from '../plugins/client-module-proxy-plugin'; import { environmentRestrictedImportsPlugin } from '../plugins/environment-restricted-imports'; import { expoInlineManifestPlugin } from '../plugins/expo-inline-manifest-plugin'; @@ -15,7 +16,6 @@ import { serverMetadataPlugin } from '../plugins/server-metadata-plugin'; import { expoUseDomDirectivePlugin } from '../plugins/use-dom-directive-plugin'; import { widgetsPlugin } from '../plugins/widgets-plugin'; import { hasModule, resolveModule } from '../utils/resolveModule'; -import { ReactConfigOptions } from './react'; const EXCLUDED_FIRST_PARTY_PATHS = [/[/\\]node_modules[/\\]/]; diff --git a/packages/babel-preset-expo/src/configs/react.ts b/packages/babel-preset-expo/src/configs/react.ts index 0a02ae7e5812a7..d8b81810c895a9 100644 --- a/packages/babel-preset-expo/src/configs/react.ts +++ b/packages/babel-preset-expo/src/configs/react.ts @@ -12,10 +12,7 @@ module.exports = function (_api: ConfigAPI, options: ReactConfigOptions) { if (runtime === 'classic' && options.dev) { // NOTE(@kitten): runtime 'classic' is typically not needed but preserved for legacy cases (deprecated) - plugins.push([ - require('@babel/plugin-transform-react-jsx-development'), - { runtime }, - ]); + plugins.push([require('@babel/plugin-transform-react-jsx-development'), { runtime }]); } else { plugins.push([ require('@babel/plugin-transform-react-jsx'), @@ -30,9 +27,7 @@ module.exports = function (_api: ConfigAPI, options: ReactConfigOptions) { } if (!options.dev) { - plugins.push([ - require('@babel/plugin-transform-react-pure-annotations') - ]); + plugins.push([require('@babel/plugin-transform-react-pure-annotations')]); } return { diff --git a/packages/babel-preset-expo/src/utils/resolveModule.ts b/packages/babel-preset-expo/src/utils/resolveModule.ts index 145f3cd0116a9f..e56b8a1bc50923 100644 --- a/packages/babel-preset-expo/src/utils/resolveModule.ts +++ b/packages/babel-preset-expo/src/utils/resolveModule.ts @@ -2,18 +2,34 @@ import type { ConfigAPI } from '@babel/core'; import { getPossibleProjectRoot } from '../common'; +let _prevPossibleProjectRoot: null | string = null; +let _cache: Record = Object.create(null); + export function resolveModule(api: ConfigAPI, id: string): string | null { const possibleProjectRoot = api.caller(getPossibleProjectRoot) ?? process.cwd(); + if (possibleProjectRoot !== _prevPossibleProjectRoot) { + _prevPossibleProjectRoot = possibleProjectRoot; + _cache = Object.create(null); + } + + let resolved = _cache[id]; + if (resolved !== undefined) { + return resolved; + } try { - return require.resolve(id, { + resolved = require.resolve(id, { paths: [possibleProjectRoot, __dirname], }); } catch (error: any) { if (error.code === 'MODULE_NOT_FOUND' && error.message.includes(id)) { - return null; + resolved = null; + } else { + throw error; } - throw error; + } finally { + _cache[id] = resolved; } + return resolved; } export function hasModule(api: ConfigAPI, id: string): boolean { From 61b830cd364e472e31de13308034f029cd94af6a Mon Sep 17 00:00:00 2001 From: Jakub Tkacz <32908614+Ubax@users.noreply.github.com> Date: Tue, 5 May 2026 19:48:03 +0200 Subject: [PATCH 10/26] [expo-cli] Replace placeholder with link to expo router migration guide (#45397) # Why The metro plugin was using migration guide url placeholder. # How Replace url placeholder with actual url # Test Plan 1. CI 2. Manual tests # Checklist - [ ] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --- packages/@expo/cli/CHANGELOG.md | 1 + .../@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/@expo/cli/CHANGELOG.md b/packages/@expo/cli/CHANGELOG.md index fb1f9da8bf5b0e..28a069f7200c35 100644 --- a/packages/@expo/cli/CHANGELOG.md +++ b/packages/@expo/cli/CHANGELOG.md @@ -70,6 +70,7 @@ - Disable watchman by default ([#45378](https://github.com/expo/expo/pull/45378) by [@kitten](https://github.com/kitten)) - Defer version check output to command table, and prefetch on start, to prevent it blocking/slowing down startup ([#45400](https://github.com/expo/expo/pull/45400) by [@kitten](https://github.com/kitten)) - Bump to `@expo/metro@56.0.0` and `metro@0.84.4` ([#45404](https://github.com/expo/expo/pull/45404) by [@kitten](https://github.com/kitten)) +- Add link to expo router migration guide ([#45397](https://github.com/expo/expo/pull/45397) by [@Ubax](https://github.com/Ubax)) ## 55.0.12 — 2026-02-25 diff --git a/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts b/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts index d47ee9af4e4ac6..52df861b5fa2f1 100644 --- a/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts +++ b/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts @@ -634,9 +634,8 @@ export function withExtendedResolver( if (isExpoRouterInstalled && moduleName.startsWith('@react-navigation/')) { const filePath = context.originModulePath; if (!filePath.includes('node_modules')) { - // TODO(@ubax): Add link to migration guide, once it is published throw new Error( - 'As of SDK 56, expo-router is no longer compatible with react-navigation. For more information, see [MIGRATION_GUIDE_URL]. You can disable this check by setting the environment variable EXPO_ROUTER_DISABLE_RN_NAVIGATION_CHECK=1.' + 'As of SDK 56, expo-router is no longer compatible with react-navigation. For more information, see https://docs.expo.dev/router/migrate/sdk-55-to-56/. You can disable this check by setting the environment variable EXPO_ROUTER_DISABLE_RN_NAVIGATION_CHECK=1.' ); } if (moduleName === '@react-navigation/core') { From f87266f043c2593937909dc8559f2df51da6ec59 Mon Sep 17 00:00:00 2001 From: Jakub Tkacz <32908614+Ubax@users.noreply.github.com> Date: Tue, 5 May 2026 19:48:59 +0200 Subject: [PATCH 11/26] [expo-router] rename Stack.Screen.Title into Stack.Title (#45334) # Why ` - [ ] I added a `changelog.md` entry and rebuilt the package sources according to [this short guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting) - [ ] This diff will work correctly for `npx expo prebuild` & EAS Build (eg: updated a module plugin). - [ ] Conforms with the [Documentation Writing Style Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md) --------- Co-authored-by: Expo Bot <34669131+expo-bot@users.noreply.github.com> Co-authored-by: Aman Mittal --- docs/pages/router/advanced/native-tabs.mdx | 2 +- docs/pages/router/advanced/stack-toolbar.mdx | 6 +- docs/pages/router/advanced/stack.mdx | 12 +- .../data/unversioned/expo-router/stack.json | 2 +- packages/expo-router/CHANGELOG.md | 1 + packages/expo-router/CLAUDE.md | 3 +- .../expo-router/build/layouts/Stack.d.ts.map | 2 +- packages/expo-router/build/layouts/Stack.js | 1 + .../expo-router/build/layouts/Stack.js.map | 2 +- .../build/layouts/Stack.web.d.ts.map | 2 +- .../expo-router/build/layouts/Stack.web.js | 1 + .../build/layouts/Stack.web.js.map | 2 +- .../build/layouts/StackClient.d.ts | 5 +- .../build/layouts/StackClient.d.ts.map | 2 +- .../expo-router/build/layouts/StackClient.js | 1 + .../build/layouts/StackClient.js.map | 2 +- .../layouts/stack-utils/StackScreen.d.ts | 27 +-- .../layouts/stack-utils/StackScreen.d.ts.map | 2 +- .../build/layouts/stack-utils/StackScreen.js | 29 +-- .../layouts/stack-utils/StackScreen.js.map | 2 +- .../build/layouts/stack-utils/StackTitle.d.ts | 105 +++++++++ .../layouts/stack-utils/StackTitle.d.ts.map | 1 + .../build/layouts/stack-utils/StackTitle.js | 116 ++++++++++ .../layouts/stack-utils/StackTitle.js.map | 1 + .../build/layouts/stack-utils/index.d.ts | 1 + .../build/layouts/stack-utils/index.d.ts.map | 2 +- .../build/layouts/stack-utils/index.js | 5 +- .../build/layouts/stack-utils/index.js.map | 2 +- .../stack-utils/screen/StackScreenTitle.d.ts | 113 +--------- .../screen/StackScreenTitle.d.ts.map | 2 +- .../stack-utils/screen/StackScreenTitle.js | 119 +--------- .../screen/StackScreenTitle.js.map | 2 +- packages/expo-router/build/stack/index.d.ts | 6 +- .../expo-router/build/stack/index.d.ts.map | 2 +- packages/expo-router/build/stack/index.js.map | 2 +- packages/expo-router/src/layouts/Stack.tsx | 3 +- .../expo-router/src/layouts/Stack.web.tsx | 3 +- .../expo-router/src/layouts/StackClient.tsx | 2 + .../src/layouts/stack-utils/AGENTS.md | 1 + .../src/layouts/stack-utils/StackScreen.tsx | 36 +-- .../src/layouts/stack-utils/StackTitle.tsx | 176 +++++++++++++++ .../__tests__/StackScreen.test.ios.tsx | 31 +-- .../__tests__/StackScreenTitle.test.ios.tsx | 205 +----------------- .../__tests__/StackTitle.test.ios.tsx | 197 +++++++++++++++++ .../src/layouts/stack-utils/index.tsx | 2 + .../stack-utils/screen/StackScreenTitle.tsx | 188 +--------------- packages/expo-router/src/stack/index.ts | 4 + 47 files changed, 733 insertions(+), 700 deletions(-) create mode 100644 packages/expo-router/build/layouts/stack-utils/StackTitle.d.ts create mode 100644 packages/expo-router/build/layouts/stack-utils/StackTitle.d.ts.map create mode 100644 packages/expo-router/build/layouts/stack-utils/StackTitle.js create mode 100644 packages/expo-router/build/layouts/stack-utils/StackTitle.js.map create mode 100644 packages/expo-router/src/layouts/stack-utils/StackTitle.tsx create mode 100644 packages/expo-router/src/layouts/stack-utils/__tests__/StackTitle.test.ios.tsx diff --git a/docs/pages/router/advanced/native-tabs.mdx b/docs/pages/router/advanced/native-tabs.mdx index 2ba2c186903e97..9bbef9df9af3e6 100644 --- a/docs/pages/router/advanced/native-tabs.mdx +++ b/docs/pages/router/advanced/native-tabs.mdx @@ -802,7 +802,7 @@ import { Stack } from 'expo-router'; export default function SearchIndex() { return ( <> - Search + Search {}} /> {/* Screen content */} diff --git a/docs/pages/router/advanced/stack-toolbar.mdx b/docs/pages/router/advanced/stack-toolbar.mdx index 85510f2680fad6..1e18d9972bee2b 100644 --- a/docs/pages/router/advanced/stack-toolbar.mdx +++ b/docs/pages/router/advanced/stack-toolbar.mdx @@ -363,7 +363,7 @@ export default function RootLayout() { -When using `headerLargeTitle: true` (or ``) alongside `Stack.Toolbar`, the large title may not collapse on scroll. This happens when the scrollable view is not the direct first child of the screen component. +When using `headerLargeTitle: true` (or ``) alongside `Stack.Toolbar`, the large title may not collapse on scroll. This happens when the scrollable view is not the direct first child of the screen component. To fix this, ensure `ScrollView` or `FlatList` is the first child rendered by your screen component. If you need a wrapper, set `collapsable={false}` on it: @@ -375,7 +375,7 @@ import { ScrollView, View, Text } from 'react-native'; export default function Home() { return ( - Home + Home Content here ); @@ -395,7 +395,7 @@ export default function Home() { /* @end */ - Home + Home Content here diff --git a/docs/pages/router/advanced/stack.mdx b/docs/pages/router/advanced/stack.mdx index 4c83a7cb1e8672..941dc8356db9fe 100644 --- a/docs/pages/router/advanced/stack.mdx +++ b/docs/pages/router/advanced/stack.mdx @@ -158,7 +158,7 @@ export default function Details() { return ( - {params.name} + {params.name} { @@ -261,9 +261,9 @@ export default function Home() { return ( <> - + - + + + ); +} +``` + +### Disabled + +```tsx DisabledButtonExample.tsx +import { Host, Button } from '@expo/ui'; + +export default function DisabledButtonExample() { + return ( + +