From 96729cbaac1d4514fa8597453512f0499c72cfaa Mon Sep 17 00:00:00 2001
From: Christian Falch <875252+chrfalch@users.noreply.github.com>
Date: Tue, 24 Feb 2026 16:50:16 +0100
Subject: [PATCH 01/10] [expo-tools] added support for Unblocked in Expo Tools
Github Inspect (#43389)
# Why
When running the `GitHub-inspect` tool, we would like to take advantage
of Unblocked's knowledge about our internal systems.
# How
Added support for running Unblocked analysis.
# Test Plan
Run `et gitHub-inspect`
---
tools/src/Unblocked.ts | 162 +++++++++++++++++++++
tools/src/commands/GitHubInspectCommand.ts | 161 +++++++++++++++++++-
2 files changed, 318 insertions(+), 5 deletions(-)
create mode 100644 tools/src/Unblocked.ts
diff --git a/tools/src/Unblocked.ts b/tools/src/Unblocked.ts
new file mode 100644
index 00000000000000..c0206b234b42df
--- /dev/null
+++ b/tools/src/Unblocked.ts
@@ -0,0 +1,162 @@
+/**
+ * Lightweight client for the Unblocked API (https://getunblocked.com).
+ *
+ * Provides helpers to authenticate and ask questions against the
+ * Unblocked knowledge base. Reads `UNBLOCKED_API_KEY` from the environment.
+ */
+
+import open from 'open';
+import { v4 as uuidv4 } from 'uuid';
+
+import { sleepAsync } from './Utils';
+
+const BASE_URL = 'https://getunblocked.com/api/v1';
+
+export type Reference = {
+ htmlUrl: string;
+};
+
+/** Normalized answer returned by our helpers. */
+export type Answer = {
+ state: string;
+ questionId: string;
+ question: string;
+ answer?: string;
+ references?: Reference[];
+};
+
+/** Raw shape from the Unblocked API. */
+type RawAnswerResponse = {
+ state: string;
+ questionId?: string;
+ question?: string;
+ result?: {
+ answer?: string;
+ references?: Reference[];
+ };
+ // Flat fallback fields (in case the API changes)
+ answer?: string;
+ references?: Reference[];
+};
+
+function getApiKey(): string {
+ const key = process.env.UNBLOCKED_API_KEY;
+ if (!key) {
+ throw new Error(
+ 'UNBLOCKED_API_KEY environment variable is not set. Run authenticateAsync() to get a token.'
+ );
+ }
+ return key;
+}
+
+async function requestAsync(method: string, path: string, body?: object): Promise {
+ const response = await fetch(`${BASE_URL}${path}`, {
+ method,
+ headers: {
+ Authorization: `Bearer ${getApiKey()}`,
+ 'Content-Type': 'application/json',
+ },
+ body: body ? JSON.stringify(body) : undefined,
+ });
+
+ if (!response.ok) {
+ const text = await response.text().catch(() => '');
+ if (response.status === 401) {
+ throw new Error(`Unblocked API: invalid or expired token (401). ${text}`);
+ }
+ if (response.status === 403) {
+ throw new Error(
+ `Unblocked API: forbidden — your plan may not support this feature (403). ${text}`
+ );
+ }
+ throw new Error(`Unblocked API error ${response.status}: ${text}`);
+ }
+
+ return response;
+}
+
+/**
+ * Checks whether the current UNBLOCKED_API_KEY is valid.
+ * Returns true on 200, false on 401/403.
+ */
+export async function isAuthenticatedAsync(): Promise {
+ try {
+ await requestAsync('GET', '/collections?limit=1');
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+/**
+ * Opens the Unblocked API tokens settings page in the browser
+ * and logs instructions to the console.
+ */
+export async function authenticateAsync(): Promise {
+ const url = 'https://getunblocked.com/dashboard/settings/api-tokens';
+ console.log(`Opening ${url} …`);
+ console.log('Create a new API token and paste it when prompted.');
+ await open(url);
+}
+
+/**
+ * Sets the API key for the current process.
+ */
+export function setApiKey(key: string): void {
+ process.env.UNBLOCKED_API_KEY = key;
+}
+
+/**
+ * Submits a question to Unblocked and returns the question ID
+ * that can be used to poll for the answer.
+ */
+export async function submitQuestionAsync(question: string): Promise {
+ const questionId = uuidv4();
+ await requestAsync('PUT', `/answers/${questionId}`, { question });
+ return questionId;
+}
+
+/**
+ * Fetches the current state of an answer by question ID.
+ * Normalizes the API response (answer/references may be nested under `result`).
+ */
+export async function getAnswerAsync(questionId: string): Promise {
+ const response = await requestAsync('GET', `/answers/${questionId}`);
+ const raw = (await response.json()) as RawAnswerResponse;
+ return {
+ state: raw.state,
+ questionId: raw.questionId ?? questionId,
+ question: raw.question ?? '',
+ answer: raw.result?.answer ?? raw.answer,
+ references: raw.result?.references ?? raw.references,
+ };
+}
+
+/**
+ * High-level helper: submits a question and polls until the answer
+ * is complete, then returns the full Answer object.
+ *
+ * Polls with exponential backoff starting at 1 s, capped at 10 s.
+ * Times out after `timeoutMs` (default 120 s).
+ */
+export async function askQuestionAsync(
+ question: string,
+ opts?: { timeoutMs?: number }
+): Promise {
+ const timeoutMs = opts?.timeoutMs ?? 120_000;
+ const questionId = await submitQuestionAsync(question);
+
+ const start = Date.now();
+ let delay = 1000;
+
+ while (Date.now() - start < timeoutMs) {
+ await sleepAsync(delay);
+ const answer = await getAnswerAsync(questionId);
+ if (answer.state === 'complete' || answer.state === 'completed' || answer.state === 'failed') {
+ return answer;
+ }
+ delay = Math.min(delay * 1.5, 10_000);
+ }
+
+ throw new Error(`Timed out waiting for answer to question "${question}" after ${timeoutMs}ms`);
+}
diff --git a/tools/src/commands/GitHubInspectCommand.ts b/tools/src/commands/GitHubInspectCommand.ts
index a2a5fa3e8bca78..8f786a7de55663 100644
--- a/tools/src/commands/GitHubInspectCommand.ts
+++ b/tools/src/commands/GitHubInspectCommand.ts
@@ -1,6 +1,7 @@
import { Command } from '@expo/commander';
import chalk from 'chalk';
import ora from 'ora';
+import readline from 'readline';
import {
getAuthenticatedUserAsync,
@@ -15,6 +16,7 @@ import {
getPullRequestAsync,
} from '../GitHub';
import logger from '../Logger';
+import { askQuestionAsync, authenticateAsync, isAuthenticatedAsync, setApiKey } from '../Unblocked';
type ActionOptions = {
week?: string;
@@ -263,6 +265,26 @@ function waitForKey(validKeys: string[]): Promise {
});
}
+function promptYesNo(question: string): Promise {
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
+ return new Promise((resolve) => {
+ rl.question(`${question} (y/N) `, (answer) => {
+ rl.close();
+ resolve(answer.trim().toLowerCase() === 'y');
+ });
+ });
+}
+
+function promptInput(question: string): Promise {
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
+ return new Promise((resolve) => {
+ rl.question(`${question} `, (answer) => {
+ rl.close();
+ resolve(answer);
+ });
+ });
+}
+
type QueueItem = {
number: number;
title: string;
@@ -700,7 +722,79 @@ async function fetchDetailData(item: QueueItem): Promise {
return base;
}
-function renderDetailView(detail: DetailData, showBody: boolean, showComments: boolean): string[] {
+type Comment = Awaited>[number];
+
+async function analyzeIssueAsync(detail: DetailData, comments: Comment[]): Promise {
+ const authenticated = await isAuthenticatedAsync();
+ if (!authenticated) {
+ return 'Unblocked API not configured. Set UNBLOCKED_API_KEY env var.';
+ }
+
+ const type = detail.isPR ? 'PR' : 'issue';
+ const bodySnippet = detail.body ? truncate(detail.body, 2000) : '(no body)';
+
+ let prompt = `Analyze this GitHub ${type}.\n\n`;
+ prompt += `Number: #${detail.number}\n`;
+ prompt += `Title: ${detail.title}\n`;
+ prompt += `State: ${detail.state}\n`;
+ prompt += `Author: ${detail.author}\n`;
+ prompt += `Labels: ${detail.labels}\n`;
+ prompt += `Created: ${detail.created}\n`;
+ prompt += `Updated: ${detail.updated}\n`;
+
+ if (detail.isPR) {
+ if (detail.branch) prompt += `Branch: ${detail.branch}\n`;
+ if (detail.diffStats) prompt += `Diff: ${detail.diffStats}\n`;
+ if (detail.reviews && detail.reviews.length > 0) {
+ prompt += `Reviews:\n`;
+ for (const r of detail.reviews) {
+ prompt += ` - ${r.login}: ${r.state} (${r.date})\n`;
+ }
+ }
+ }
+
+ prompt += `\nBody:\n${bodySnippet}\n`;
+
+ const recentComments = comments.slice(-10);
+ if (recentComments.length > 0) {
+ prompt += `\nRecent comments (${recentComments.length} of ${comments.length}):\n`;
+ for (const c of recentComments) {
+ const badge = isTeamResponse(c) ? '[TEAM]' : '[EXTERNAL]';
+ const date = new Date(c.created_at).toISOString().slice(0, 10);
+ prompt += `\n${badge} ${c.user?.login ?? '-'} (${date}):\n${truncate(c.body ?? '', 500)}\n`;
+ }
+ }
+
+ prompt += `\nAs an on-call engineer assistant, please:`;
+ prompt += `\n1. Summarize the problem and current status.`;
+ prompt += `\n2. Search for duplicate or related issues that report the same problem — link them if found.`;
+ prompt += `\n3. Check if this is a known issue with an existing fix, workaround, or relevant PR.`;
+ prompt += `\n4. Recommend what action the on-call engineer should take next.`;
+ prompt += `\n5. Flag whether this needs urgent attention.`;
+ prompt += `\nBe concise. Always include links to duplicates, related issues, or solutions if you find any.`;
+
+ const answer = await askQuestionAsync(prompt);
+
+ if (answer.state === 'failed') {
+ return 'Analysis failed — Unblocked returned an error.';
+ }
+
+ let result = answer.answer ?? 'No analysis returned.';
+ if (answer.references && answer.references.length > 0) {
+ result += '\n\nReferences:';
+ for (const ref of answer.references) {
+ result += `\n ${ref.htmlUrl}`;
+ }
+ }
+ return result;
+}
+
+function renderDetailView(
+ detail: DetailData,
+ showBody: boolean,
+ showComments: boolean,
+ analysisText: string | null = null
+): string[] {
const lines: string[] = [];
const type = detail.isPR ? 'PR' : 'Issue';
@@ -763,13 +857,22 @@ function renderDetailView(detail: DetailData, showBody: boolean, showComments: b
lines.push(chalk.gray(' Loading...'));
}
+ if (analysisText) {
+ lines.push('');
+ lines.push(chalk.bold(' --- AI Analysis ---'));
+ for (const line of analysisText.split('\n')) {
+ lines.push(` ${line}`);
+ }
+ }
+
lines.push('');
// Hint line
const bodyToggle = showBody ? chalk.yellow('(b)ody') : chalk.green('(b)ody');
const commentsLabel = `(c)omments (${detail.comments})`;
const commentsToggle = showComments ? chalk.yellow(commentsLabel) : chalk.green(commentsLabel);
- const parts = [bodyToggle, commentsToggle, chalk.gray('Esc back')];
+ const analyzeToggle = analysisText ? chalk.yellow('(a)nalyze') : chalk.green('(a)nalyze');
+ const parts = [bodyToggle, commentsToggle, analyzeToggle, chalk.gray('Esc back')];
lines.push(chalk.gray(' ') + parts.join(chalk.gray(' / ')));
return lines;
@@ -782,13 +885,21 @@ async function showDetailInteractive(item: QueueItem): Promise {
let showBody = false;
let showComments = false;
+ let showAnalysis = false;
let commentLines: string[] | null = null;
+ let rawComments: Comment[] | null = null;
+ let analysisText: string | null = null;
let lastRenderedCount = 0;
const render = () => {
if (lastRenderedCount > 0) clearLines(lastRenderedCount);
- const lines = renderDetailView(detail, showBody, showComments);
+ const lines = renderDetailView(
+ detail,
+ showBody,
+ showComments,
+ showAnalysis ? analysisText : null
+ );
// Replace "Loading..." placeholders with cached content
if (showComments && commentLines) {
@@ -810,15 +921,16 @@ async function showDetailInteractive(item: QueueItem): Promise {
render();
- const validKeys = ['escape', 'b', 'c'];
+ const validKeys = ['escape', 'b', 'c', 'a'];
while (true) {
const key = await waitForKey(validKeys);
if (key === 'escape') {
- if (showBody || showComments) {
+ if (showBody || showComments || showAnalysis) {
showBody = false;
showComments = false;
+ showAnalysis = false;
render();
} else {
clearLines(lastRenderedCount);
@@ -832,6 +944,7 @@ async function showDetailInteractive(item: QueueItem): Promise {
if (showComments && !commentLines) {
render(); // show "Loading..."
const comments = detail.comments > 0 ? await listAllCommentsAsync(detail.number) : [];
+ rawComments = comments;
commentLines = [];
if (comments.length === 0) {
commentLines.push(chalk.gray(' No comments.'));
@@ -846,6 +959,25 @@ async function showDetailInteractive(item: QueueItem): Promise {
}
}
render();
+ } else if (key === 'a') {
+ showAnalysis = !showAnalysis;
+ if (showAnalysis && !analysisText) {
+ // Ensure we have raw comments loaded
+ if (!rawComments) {
+ rawComments = detail.comments > 0 ? await listAllCommentsAsync(detail.number) : [];
+ }
+ if (lastRenderedCount > 0) clearLines(lastRenderedCount);
+ lastRenderedCount = 0;
+ const analyzeSpinner = ora('Analyzing with Unblocked…').start();
+ try {
+ analysisText = await analyzeIssueAsync(detail, rawComments);
+ analyzeSpinner.stop();
+ } catch (err: any) {
+ analyzeSpinner.stop();
+ analysisText = `Analysis error: ${err.message ?? err}`;
+ }
+ }
+ render();
}
}
}
@@ -861,6 +993,25 @@ async function action(options: ActionOptions) {
process.exit(1);
}
+ if (!(await isAuthenticatedAsync())) {
+ spinner.stop();
+ logger.warn(chalk.yellow('Unblocked API not configured — (a)nalyze will be unavailable.'));
+ const shouldSetup = await promptYesNo('Set up Unblocked API key now?');
+ if (shouldSetup) {
+ await authenticateAsync();
+ const key = await promptInput('Paste your API token:');
+ if (key) {
+ setApiKey(key.trim());
+ if (await isAuthenticatedAsync()) {
+ logger.info(chalk.green('Unblocked authenticated successfully.'));
+ } else {
+ logger.warn(chalk.yellow('Token appears invalid — (a)nalyze will be unavailable.'));
+ }
+ }
+ }
+ spinner.start();
+ }
+
await interactiveDashboard(options, spinner);
}
From f3ce6c7f4da1d9cd4e812c95ce623611fd1cab84 Mon Sep 17 00:00:00 2001
From: Tomasz Sapeta
Date: Tue, 24 Feb 2026 17:24:33 +0100
Subject: [PATCH 02/10] fix(expo-go): add RCTBridge forward declaration for
EXScopedReactNativeAdapter (#43390)
Co-authored-by: Brent Vatne
---
.../Core/UniversalModules/EXScopedReactNativeAdapter.h | 2 ++
1 file changed, 2 insertions(+)
diff --git a/apps/expo-go/ios/Exponent/Versioned/Core/UniversalModules/EXScopedReactNativeAdapter.h b/apps/expo-go/ios/Exponent/Versioned/Core/UniversalModules/EXScopedReactNativeAdapter.h
index d17d9b682f5746..1ee2a9764ff234 100644
--- a/apps/expo-go/ios/Exponent/Versioned/Core/UniversalModules/EXScopedReactNativeAdapter.h
+++ b/apps/expo-go/ios/Exponent/Versioned/Core/UniversalModules/EXScopedReactNativeAdapter.h
@@ -1,5 +1,7 @@
// Copyright © 2018 650 Industries. All rights reserved.
+@class RCTBridge;
+
#import
@interface EXScopedReactNativeAdapter : EXReactNativeAdapter
From 9660a5d885378bf62f9f2b477232edf90bdd5fe6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?S=C3=A1vio=20Carlos=20Martins=20Costa?=
Date: Tue, 24 Feb 2026 14:37:43 -0300
Subject: [PATCH 03/10] [docs] add warnings about using the metro reserved path
"/assets" (#43255)
Co-authored-by: Aman Mittal
---
docs/pages/guides/publishing-websites.mdx | 2 ++
docs/pages/router/basics/notation.mdx | 2 ++
docs/pages/router/web/static-rendering.mdx | 4 ++++
3 files changed, 8 insertions(+)
diff --git a/docs/pages/guides/publishing-websites.mdx b/docs/pages/guides/publishing-websites.mdx
index 979c87b6048d59..06b9aebff511a6 100644
--- a/docs/pages/guides/publishing-websites.mdx
+++ b/docs/pages/guides/publishing-websites.mdx
@@ -57,6 +57,8 @@ Run the universal export command to compile the project for web:
The resulting project files are located in the **dist** directory. Any files inside the **public** directory are also copied to the **dist** directory.
+> **warning** Avoid creating the directory `/public/assets/`. The path `/assets` is reserved by Metro and will cause file access errors during development.
+
## Serve locally
Use `npx expo serve` to quickly test locally how your website will be hosted in production. Run the following command to serve the static bundle:
diff --git a/docs/pages/router/basics/notation.mdx b/docs/pages/router/basics/notation.mdx
index d1e1d2dc9e8795..5459853754d947 100644
--- a/docs/pages/router/basics/notation.mdx
+++ b/docs/pages/router/basics/notation.mdx
@@ -63,6 +63,8 @@ Routes that include a `+` have special significance to Expo Router, and are used
- [`+native-intent`](/router/advanced/native-intent/) is used to handle deep links into your app that don't match a specific route, such as links generated by third-party services.
- [`+middleware`](/router/web/middleware/) is used to run code before a route is rendered, allowing you to perform tasks like authentication or redirection for every request.
+> **warning** Avoid creating a top-level route named `assets` (like `/app/assets.tsx` or `/app/assets/[other-pages].tsx`). The path `/assets` is reserved by Metro and will cause errors when trying to access it from the URL.
+
## Route notation applied
Consider the following project file structure to identify the different types of routes represented:
diff --git a/docs/pages/router/web/static-rendering.mdx b/docs/pages/router/web/static-rendering.mdx
index 6e57af4a6dfe31..b366b5c5ac9b67 100644
--- a/docs/pages/router/web/static-rendering.mdx
+++ b/docs/pages/router/web/static-rendering.mdx
@@ -123,6 +123,8 @@ export async function generateStaticParams(params: {
+> **warning** Avoid creating a top-level route named `assets` (like `/app/assets.tsx` or `/app/assets/[other-pages].tsx`). The path `/assets` is reserved by Metro and will cause errors when trying to access it from the URL.
+
### Read files using `process.cwd()`
Since Expo Router compiles your code into a separate directory you cannot use `__dirname` to form a path as its value will be different than expected.
@@ -227,6 +229,8 @@ Expo CLI supports a root **public** directory that gets copied to the **dist** d
files={['public/favicon.ico', 'public/logo.png', 'public/.well-known/apple-app-site-association']}
/>
+> **warning** Avoid creating the directory `/public/assets/`. The path `/assets` is reserved by Metro and will cause file access errors during development.
+
These files will be copied to the **dist** directory during static rendering:
Date: Tue, 24 Feb 2026 17:56:12 +0000
Subject: [PATCH 04/10] fix(metro-config): Fix picomatch basename match in
`sideEffects` matcher (#43395)
# Why
As spotted and reported in DMs, `picomatch` doesn't perform `matchBase`
the same way `minimatch` does and the basename pattern must be
implemented differently to match `minimatch`'s (quirkier) behaviour.
# How
- Update tests and add manual basename matching
# Test Plan
- Unit test added to cover this scenario
# Checklist
- [x] I added a `changelog.md` entry and rebuilt the package sources
according to [this short
guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting)
- [ ] This diff will work correctly for `npx expo prebuild` & EAS Build
(eg: updated a module plugin).
- [ ] Conforms with the [Documentation Writing Style
Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md)
---
packages/@expo/metro-config/CHANGELOG.md | 2 ++
.../metro-config/build/serializer/sideEffects.js | 12 ++++++++----
.../metro-config/build/serializer/sideEffects.js.map | 2 +-
.../src/serializer/__tests__/sideEffects.test.ts | 11 +++++++++++
.../@expo/metro-config/src/serializer/sideEffects.ts | 12 ++++++++----
5 files changed, 30 insertions(+), 9 deletions(-)
diff --git a/packages/@expo/metro-config/CHANGELOG.md b/packages/@expo/metro-config/CHANGELOG.md
index a08200d196f324..bdcb80b58a81e7 100644
--- a/packages/@expo/metro-config/CHANGELOG.md
+++ b/packages/@expo/metro-config/CHANGELOG.md
@@ -8,6 +8,8 @@
### 🐛 Bug fixes
+- Fix basename matching for `picomatch` in side-effect matcher ([#43395](https://github.com/expo/expo/pull/43395) by [@kitten](https://github.com/kitten))
+
### 💡 Others
- Revert: Set default `resolver.useWatchman: undefined` value (enables it by default, as before) ([#43251](https://github.com/expo/expo/pull/43251) by [@kitten](https://github.com/kitten))
diff --git a/packages/@expo/metro-config/build/serializer/sideEffects.js b/packages/@expo/metro-config/build/serializer/sideEffects.js
index 6b6dd8d3fe1723..c1eae004f7e15c 100644
--- a/packages/@expo/metro-config/build/serializer/sideEffects.js
+++ b/packages/@expo/metro-config/build/serializer/sideEffects.js
@@ -11,6 +11,7 @@ const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const picomatch_1 = __importDefault(require("picomatch"));
const findUpPackageJsonPath_1 = require("./findUpPackageJsonPath");
+const filePath_1 = require("../utils/filePath");
const debug = require('debug')('expo:side-effects');
function hasSideEffectWithDebugTrace(options, graph, value, parentTrace = [value.path], checked = new Set()) {
const currentModuleHasSideEffect = getShallowSideEffect(options, value);
@@ -65,8 +66,11 @@ function _createSideEffectMatcher(dirRoot, packageJson, packageJsonPath = '') {
if (Array.isArray(packageJson.sideEffects)) {
const sideEffects = packageJson.sideEffects
.filter((sideEffect) => typeof sideEffect === 'string')
- .map((sideEffect) => sideEffect.replace(/^\.\//, ''));
- sideEffectMatcher = (0, picomatch_1.default)(sideEffects, { matchBase: true });
+ .map((sideEffect) => {
+ const pattern = sideEffect.replace(/^\.\//, '');
+ return pattern.includes('/') ? pattern : `**/${pattern}`;
+ });
+ sideEffectMatcher = (0, picomatch_1.default)(sideEffects);
}
else if (typeof packageJson.sideEffects === 'boolean' || !packageJson.sideEffects) {
sideEffectMatcher = packageJson.sideEffects;
@@ -83,8 +87,8 @@ function _createSideEffectMatcher(dirRoot, packageJson, packageJsonPath = '') {
return sideEffectMatcher;
}
else {
- const relativeName = path_1.default.relative(dirRoot, fp);
- return sideEffectMatcher(relativeName);
+ const relativeName = path_1.default.isAbsolute(fp) ? path_1.default.relative(dirRoot, fp) : path_1.default.normalize(fp);
+ return sideEffectMatcher((0, filePath_1.toPosixPath)(relativeName));
}
};
}
diff --git a/packages/@expo/metro-config/build/serializer/sideEffects.js.map b/packages/@expo/metro-config/build/serializer/sideEffects.js.map
index 3fe4af6fa743d0..73558c267fd2a6 100644
--- a/packages/@expo/metro-config/build/serializer/sideEffects.js.map
+++ b/packages/@expo/metro-config/build/serializer/sideEffects.js.map
@@ -1 +1 @@
-{"version":3,"file":"sideEffects.js","sourceRoot":"","sources":["../../src/serializer/sideEffects.ts"],"names":[],"mappings":";;;;;AA2BA,kEAsCC;AAmCD,4DA2BC;AAmCD,0CAEC;AAxJD,qFAAkF;AAClF,4CAAoB;AACpB,gDAAwB;AACxB,0DAAkC;AAElC,mEAAgE;AAEhE,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,mBAAmB,CAAuB,CAAC;AAQ1E,SAAgB,2BAA2B,CACzC,OAA0B,EAC1B,KAAoB,EACpB,KAAqB,EACrB,cAAwB,CAAC,KAAK,CAAC,IAAI,CAAC,EACpC,UAAuB,IAAI,GAAG,EAAE;IAEhC,MAAM,0BAA0B,GAAG,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;IACxE,IAAI,0BAA0B,EAAE,CAAC;QAC/B,OAAO,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;IAC7B,CAAC;IACD,kEAAkE;IAClE,KAAK,MAAM,YAAY,IAAI,KAAK,CAAC,YAAY,CAAC,MAAM,EAAE,EAAE,CAAC;QACvD,IAAI,CAAC,IAAA,2CAAoB,EAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,YAAY,CAAC,EAAE,CAAC;YAClF,SAAS;QACX,CAAC;QACD,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC;QACvC,MAAM,GAAG,GAAG,KAAK,CAAC,YAAY,CAAC,GAAG,CAAC,YAAY,CAAC,YAAY,CAAE,CAAC;QAC/D,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,SAAS;QACX,CAAC;QAED,MAAM,CAAC,aAAa,EAAE,KAAK,CAAC,GAAG,2BAA2B,CACxD,OAAO,EACP,KAAK,EACL,GAAG,EACH,CAAC,GAAG,WAAW,EAAE,YAAY,CAAC,YAAY,CAAC,EAC3C,OAAO,CACR,CAAC;QAEF,IAAI,aAAa,EAAE,CAAC;YAClB,2CAA2C;YAC3C,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;YAEzB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QACvB,CAAC;IACH,CAAC;IACD,OAAO,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;AAC1C,CAAC;AAED,MAAM,YAAY,GAAG,IAAI,GAAG,EAAe,CAAC;AAE5C,MAAM,qBAAqB,GAAG,CAC5B,OAEC,EACD,GAAW,EAC8B,EAAE;IAC3C,IAAI,WAAgB,CAAC;IACrB,IAAI,eAAe,GAAkB,IAAI,CAAC;IAC1C,IAAI,OAAO,OAAO,CAAC,oBAAoB,KAAK,UAAU,EAAE,CAAC;QACvD,CAAC,WAAW,EAAE,eAAe,CAAC,GAAG,OAAO,CAAC,oBAAoB,CAAC,GAAG,CAAC,CAAC;IACrE,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,GAAG,YAAY,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,MAAM,EAAE,CAAC;YACX,OAAO,MAAM,CAAC;QAChB,CAAC;QAED,eAAe,GAAG,IAAA,6CAAqB,EAAC,GAAG,CAAC,CAAC;QAC7C,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,YAAE,CAAC,YAAY,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC,CAAC;IACtE,CAAC;IAED,IAAI,CAAC,eAAe,EAAE,CAAC;QACrB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,iCAAiC;IACjC,MAAM,OAAO,GAAG,cAAI,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9C,MAAM,YAAY,GAAG,wBAAwB,CAAC,OAAO,EAAE,WAAW,EAAE,eAAe,CAAC,CAAC;IACrF,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;IACpC,OAAO,YAAY,CAAC;AACtB,CAAC,CAAC;AAEF,SAAgB,wBAAwB,CACtC,OAAe,EACf,WAAiD,EACjD,kBAA0B,EAAE;IAE5B,IAAI,iBAA0D,CAAC;IAC/D,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,EAAE,CAAC;QAC3C,MAAM,WAAW,GAAG,WAAW,CAAC,WAAW;aACxC,MAAM,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,OAAO,UAAU,KAAK,QAAQ,CAAC;aACtD,GAAG,CAAC,CAAC,UAAe,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;QAC7D,iBAAiB,GAAG,IAAA,mBAAS,EAAC,WAAW,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAClE,CAAC;SAAM,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,SAAS,IAAI,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC;QACpF,iBAAiB,GAAG,WAAW,CAAC,WAAW,CAAC;IAC9C,CAAC;SAAM,CAAC;QACN,KAAK,CAAC,4CAA4C,EAAE,eAAe,EAAE,WAAW,CAAC,WAAW,CAAC,CAAC;IAChG,CAAC;IACD,OAAO,CAAC,EAAU,EAAE,EAAE;QACpB,+EAA+E;QAC/E,IAAI,iBAAiB,IAAI,IAAI,EAAE,CAAC;YAC9B,OAAO,IAAI,CAAC;QACd,CAAC;aAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE,CAAC;YAClD,OAAO,iBAAiB,CAAC;QAC3B,CAAC;aAAM,CAAC;YACN,MAAM,YAAY,GAAG,cAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;YAChD,OAAO,iBAAiB,CAAC,YAAY,CAAC,CAAC;QACzC,CAAC;IACH,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAAC,OAA0B,EAAE,KAAqB;IAC7E,IAAI,KAAK,EAAE,WAAW,KAAK,SAAS,EAAE,CAAC;QACrC,OAAO,KAAK,CAAC,WAAW,CAAC;IAC3B,CAAC;IACD,MAAM,YAAY,GAAG,gCAAgC,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;IACtE,KAAK,CAAC,WAAW,GAAG,YAAY,CAAC;IACjC,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,gCAAgC,CACvC,OAA0B,EAC1B,KAAqB;IAErB,IAAI,KAAK,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;QACpC,OAAO,KAAK,CAAC,WAAW,CAAC;IAC3B,CAAC;IACD,2CAA2C;IAC3C,IAAI,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;QAChC,OAAO,KAAK,CAAC;IACf,CAAC;IAED,IAAI,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,CAAC,IAAI,KAAK,WAAW,CAAC,EAAE,CAAC;QAC/D,MAAM,YAAY,GAAG,qBAAqB,CAAC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;QAEhE,IAAI,YAAY,IAAI,IAAI,EAAE,CAAC;YACzB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,OAAO,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IAClC,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,eAAe,CAAC,IAAY;IAC1C,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;AAC/B,CAAC"}
\ No newline at end of file
+{"version":3,"file":"sideEffects.js","sourceRoot":"","sources":["../../src/serializer/sideEffects.ts"],"names":[],"mappings":";;;;;AA4BA,kEAsCC;AAmCD,4DA8BC;AAmCD,0CAEC;AA5JD,qFAAkF;AAClF,4CAAoB;AACpB,gDAAwB;AACxB,0DAAkC;AAElC,mEAAgE;AAChE,gDAAgD;AAEhD,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,mBAAmB,CAAuB,CAAC;AAQ1E,SAAgB,2BAA2B,CACzC,OAA0B,EAC1B,KAAoB,EACpB,KAAqB,EACrB,cAAwB,CAAC,KAAK,CAAC,IAAI,CAAC,EACpC,UAAuB,IAAI,GAAG,EAAE;IAEhC,MAAM,0BAA0B,GAAG,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;IACxE,IAAI,0BAA0B,EAAE,CAAC;QAC/B,OAAO,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;IAC7B,CAAC;IACD,kEAAkE;IAClE,KAAK,MAAM,YAAY,IAAI,KAAK,CAAC,YAAY,CAAC,MAAM,EAAE,EAAE,CAAC;QACvD,IAAI,CAAC,IAAA,2CAAoB,EAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,YAAY,CAAC,EAAE,CAAC;YAClF,SAAS;QACX,CAAC;QACD,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC;QACvC,MAAM,GAAG,GAAG,KAAK,CAAC,YAAY,CAAC,GAAG,CAAC,YAAY,CAAC,YAAY,CAAE,CAAC;QAC/D,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,SAAS;QACX,CAAC;QAED,MAAM,CAAC,aAAa,EAAE,KAAK,CAAC,GAAG,2BAA2B,CACxD,OAAO,EACP,KAAK,EACL,GAAG,EACH,CAAC,GAAG,WAAW,EAAE,YAAY,CAAC,YAAY,CAAC,EAC3C,OAAO,CACR,CAAC;QAEF,IAAI,aAAa,EAAE,CAAC;YAClB,2CAA2C;YAC3C,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;YAEzB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QACvB,CAAC;IACH,CAAC;IACD,OAAO,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;AAC1C,CAAC;AAED,MAAM,YAAY,GAAG,IAAI,GAAG,EAAe,CAAC;AAE5C,MAAM,qBAAqB,GAAG,CAC5B,OAEC,EACD,GAAW,EAC8B,EAAE;IAC3C,IAAI,WAAgB,CAAC;IACrB,IAAI,eAAe,GAAkB,IAAI,CAAC;IAC1C,IAAI,OAAO,OAAO,CAAC,oBAAoB,KAAK,UAAU,EAAE,CAAC;QACvD,CAAC,WAAW,EAAE,eAAe,CAAC,GAAG,OAAO,CAAC,oBAAoB,CAAC,GAAG,CAAC,CAAC;IACrE,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,GAAG,YAAY,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,MAAM,EAAE,CAAC;YACX,OAAO,MAAM,CAAC;QAChB,CAAC;QAED,eAAe,GAAG,IAAA,6CAAqB,EAAC,GAAG,CAAC,CAAC;QAC7C,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,YAAE,CAAC,YAAY,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC,CAAC;IACtE,CAAC;IAED,IAAI,CAAC,eAAe,EAAE,CAAC;QACrB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,iCAAiC;IACjC,MAAM,OAAO,GAAG,cAAI,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9C,MAAM,YAAY,GAAG,wBAAwB,CAAC,OAAO,EAAE,WAAW,EAAE,eAAe,CAAC,CAAC;IACrF,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;IACpC,OAAO,YAAY,CAAC;AACtB,CAAC,CAAC;AAEF,SAAgB,wBAAwB,CACtC,OAAe,EACf,WAAiD,EACjD,kBAA0B,EAAE;IAE5B,IAAI,iBAA0D,CAAC;IAC/D,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,EAAE,CAAC;QAC3C,MAAM,WAAW,GAAG,WAAW,CAAC,WAAW;aACxC,MAAM,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,OAAO,UAAU,KAAK,QAAQ,CAAC;aACtD,GAAG,CAAC,CAAC,UAAe,EAAE,EAAE;YACvB,MAAM,OAAO,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;YAChD,OAAO,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,OAAO,EAAE,CAAC;QAC3D,CAAC,CAAC,CAAC;QACL,iBAAiB,GAAG,IAAA,mBAAS,EAAC,WAAW,CAAC,CAAC;IAC7C,CAAC;SAAM,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,SAAS,IAAI,CAAC,WAAW,CAAC,WAAW,EAAE,CAAC;QACpF,iBAAiB,GAAG,WAAW,CAAC,WAAW,CAAC;IAC9C,CAAC;SAAM,CAAC;QACN,KAAK,CAAC,4CAA4C,EAAE,eAAe,EAAE,WAAW,CAAC,WAAW,CAAC,CAAC;IAChG,CAAC;IACD,OAAO,CAAC,EAAU,EAAE,EAAE;QACpB,+EAA+E;QAC/E,IAAI,iBAAiB,IAAI,IAAI,EAAE,CAAC;YAC9B,OAAO,IAAI,CAAC;QACd,CAAC;aAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE,CAAC;YAClD,OAAO,iBAAiB,CAAC;QAC3B,CAAC;aAAM,CAAC;YACN,MAAM,YAAY,GAAG,cAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,cAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,cAAI,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;YAC3F,OAAO,iBAAiB,CAAC,IAAA,sBAAW,EAAC,YAAY,CAAC,CAAC,CAAC;QACtD,CAAC;IACH,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAAC,OAA0B,EAAE,KAAqB;IAC7E,IAAI,KAAK,EAAE,WAAW,KAAK,SAAS,EAAE,CAAC;QACrC,OAAO,KAAK,CAAC,WAAW,CAAC;IAC3B,CAAC;IACD,MAAM,YAAY,GAAG,gCAAgC,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;IACtE,KAAK,CAAC,WAAW,GAAG,YAAY,CAAC;IACjC,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,gCAAgC,CACvC,OAA0B,EAC1B,KAAqB;IAErB,IAAI,KAAK,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;QACpC,OAAO,KAAK,CAAC,WAAW,CAAC;IAC3B,CAAC;IACD,2CAA2C;IAC3C,IAAI,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;QAChC,OAAO,KAAK,CAAC;IACf,CAAC;IAED,IAAI,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,CAAC,IAAI,KAAK,WAAW,CAAC,EAAE,CAAC;QAC/D,MAAM,YAAY,GAAG,qBAAqB,CAAC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;QAEhE,IAAI,YAAY,IAAI,IAAI,EAAE,CAAC;YACzB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,OAAO,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IAClC,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,eAAe,CAAC,IAAY;IAC1C,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;AAC/B,CAAC"}
\ No newline at end of file
diff --git a/packages/@expo/metro-config/src/serializer/__tests__/sideEffects.test.ts b/packages/@expo/metro-config/src/serializer/__tests__/sideEffects.test.ts
index 0e2a1dcf1fee34..e2fb5e5bbff965 100644
--- a/packages/@expo/metro-config/src/serializer/__tests__/sideEffects.test.ts
+++ b/packages/@expo/metro-config/src/serializer/__tests__/sideEffects.test.ts
@@ -42,3 +42,14 @@ it('matches side effects for matching file with glob', () => {
//
expect(matcher('/other/file/foo.fx.js')).toBe(true);
});
+it('matches side effects with matching path patterns', () => {
+ const matcher = _createSideEffectMatcher('/', {
+ sideEffects: ['src/asyncRequire/*.ts'],
+ });
+ expect(matcher('file.js')).toBe(false);
+ expect(matcher('file.ts')).toBe(false);
+ expect(matcher('other/file.js')).toBe(false);
+ expect(matcher('other/file.ts')).toBe(false);
+ expect(matcher('src/asyncRequire/file.js')).toBe(false);
+ expect(matcher('src/asyncRequire/file.ts')).toBe(true);
+});
diff --git a/packages/@expo/metro-config/src/serializer/sideEffects.ts b/packages/@expo/metro-config/src/serializer/sideEffects.ts
index 3ee25e3990f480..d984ba57d54395 100644
--- a/packages/@expo/metro-config/src/serializer/sideEffects.ts
+++ b/packages/@expo/metro-config/src/serializer/sideEffects.ts
@@ -16,6 +16,7 @@ import path from 'path';
import picomatch from 'picomatch';
import { findUpPackageJsonPath } from './findUpPackageJsonPath';
+import { toPosixPath } from '../utils/filePath';
const debug = require('debug')('expo:side-effects') as typeof console.log;
@@ -107,8 +108,11 @@ export function _createSideEffectMatcher(
if (Array.isArray(packageJson.sideEffects)) {
const sideEffects = packageJson.sideEffects
.filter((sideEffect) => typeof sideEffect === 'string')
- .map((sideEffect: any) => sideEffect.replace(/^\.\//, ''));
- sideEffectMatcher = picomatch(sideEffects, { matchBase: true });
+ .map((sideEffect: any) => {
+ const pattern = sideEffect.replace(/^\.\//, '');
+ return pattern.includes('/') ? pattern : `**/${pattern}`;
+ });
+ sideEffectMatcher = picomatch(sideEffects);
} else if (typeof packageJson.sideEffects === 'boolean' || !packageJson.sideEffects) {
sideEffectMatcher = packageJson.sideEffects;
} else {
@@ -121,8 +125,8 @@ export function _createSideEffectMatcher(
} else if (typeof sideEffectMatcher === 'boolean') {
return sideEffectMatcher;
} else {
- const relativeName = path.relative(dirRoot, fp);
- return sideEffectMatcher(relativeName);
+ const relativeName = path.isAbsolute(fp) ? path.relative(dirRoot, fp) : path.normalize(fp);
+ return sideEffectMatcher(toPosixPath(relativeName));
}
};
}
From b3ca96361bec833ddb2397d1b1303510e7c0e54a Mon Sep 17 00:00:00 2001
From: Phil Pluckthun
Date: Tue, 24 Feb 2026 18:22:21 +0000
Subject: [PATCH 05/10] fix(require-utils): Fix error being swallowed and add
advice error messages (#43329)
# Why
The first `require` call's errors were being swallowed, and the
resulting error that happens after isn't helpful. When `require`
succeeds, we should just issue its error and annotate it.
This PR restores the old transform behaviour for `.js` and `.ts` files,
but with Babel instead to transform ESM to CJS, allowing dual-ESM-CJS in
a single file. The same is done for TypeScript but with TypeScript
itself instead, relying on the `typescript` package. This is done until
we can figure out a better migration path, or tell people to rely on
Node's native behaviour instead.
This behaviour is activated either when `evalModule` is invoked without
a new format argument (which is `null` for the native behaviour) or when
`loadModuleSync` is invoked. This covers both the callsite from
`@expo/config` and `expo-modules-autolinking` sufficiently. The result
of this evaluation is cached in `loadModuleSync` but not in
`evalModule`, since the latter is an explicit call to evaluate.
The "legacy" behaviour basically allows:
- Importing CommonJS modules with ESM, without interop behaviour that'd
turn the CommonJS namespace into the default export
- Using mixed CommonJS and ESM
This only applies to `.js` and `.ts` since `.c[tj]s` and `.m[tj]s`
support is new, and won't need the old behaviour. This explicitly
circumvents the `package.json:type` field as we did before. It also does
not apply to `loadModule`, which is asynchronous, unless loading of
`.js` fails, or native TypeScript loading fails.
(Internal note; See Slack thread for context:
https://exponent-internal.slack.com/archives/C5ERY0TAR/p1771677581725529?thread_ts=1771623609.577789&cid=C5ERY0TAR)
# How
- Fix swallowed error after `require`
- Add ESM-to-CJS transformer with Babel for `commonjs` formats (instead
of Sucrase, but matching options)
- Switch unknown formats to `commonjs` (js is commonjs, ts is
typescript-commonjs now)
- Reuse cached, evaluated module on `loadModuleSync` call
# Test Plan
- Create a test `app.config.js` that contains `import { boolish } from
'getenv'`. Rename to `.mjs` to resolve.
- Create a test `app.config.js` that contains `require`. Rename to
`.cjs` to resolve.
- Repeat the same for `.ts`
# Checklist
- [x] I added a `changelog.md` entry and rebuilt the package sources
according to [this short
guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting)
- [ ] This diff will work correctly for `npx expo prebuild` & EAS Build
(eg: updated a module plugin).
- [ ] Conforms with the [Documentation Writing Style
Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md)
---
packages/@expo/require-utils/CHANGELOG.md | 3 +
.../@expo/require-utils/build/codeframe.d.ts | 2 +-
.../@expo/require-utils/build/codeframe.js | 32 +++----
.../require-utils/build/codeframe.js.map | 2 +-
packages/@expo/require-utils/build/load.d.ts | 3 +-
packages/@expo/require-utils/build/load.js | 77 ++++++++++------
.../@expo/require-utils/build/load.js.map | 2 +-
.../@expo/require-utils/build/transform.d.ts | 1 +
.../@expo/require-utils/build/transform.js | 31 +++++++
.../require-utils/build/transform.js.map | 1 +
packages/@expo/require-utils/package.json | 5 +-
.../src/__tests__/fixtures/example.js | 1 +
.../require-utils/src/__tests__/load-test.ts | 79 ++++++++++++++++
packages/@expo/require-utils/src/codeframe.ts | 22 ++---
packages/@expo/require-utils/src/load.ts | 90 +++++++++++++------
packages/@expo/require-utils/src/transform.ts | 24 +++++
16 files changed, 291 insertions(+), 84 deletions(-)
create mode 100644 packages/@expo/require-utils/build/transform.d.ts
create mode 100644 packages/@expo/require-utils/build/transform.js
create mode 100644 packages/@expo/require-utils/build/transform.js.map
create mode 100644 packages/@expo/require-utils/src/__tests__/fixtures/example.js
create mode 100644 packages/@expo/require-utils/src/__tests__/load-test.ts
create mode 100644 packages/@expo/require-utils/src/transform.ts
diff --git a/packages/@expo/require-utils/CHANGELOG.md b/packages/@expo/require-utils/CHANGELOG.md
index ecc6a7052db901..d2214f986f9a86 100644
--- a/packages/@expo/require-utils/CHANGELOG.md
+++ b/packages/@expo/require-utils/CHANGELOG.md
@@ -8,6 +8,9 @@
### 🐛 Bug fixes
+- Fix ESM/CJS compatibility errors being swallowed ([#43329](https://github.com/expo/expo/pull/43329) by [@kitten](https://github.com/kitten))
+- Restore old ESM-to-CJS transform behaviour for `.js` and `.ts` loading for now ([#43329](https://github.com/expo/expo/pull/43329) by [@kitten](https://github.com/kitten))
+
### 💡 Others
## 55.0.1 — 2026-02-20
diff --git a/packages/@expo/require-utils/build/codeframe.d.ts b/packages/@expo/require-utils/build/codeframe.d.ts
index 064ff37e4b0f4d..6a2bb18e8d64ae 100644
--- a/packages/@expo/require-utils/build/codeframe.d.ts
+++ b/packages/@expo/require-utils/build/codeframe.d.ts
@@ -2,6 +2,6 @@ import type { Diagnostic } from 'typescript';
export declare function formatDiagnostic(diagnostic: Diagnostic | undefined): (SyntaxError & {
codeFrame: string;
}) | null;
-export declare function annotateError(code: string, filename: string, error: Error): (Error & {
+export declare function annotateError(code: string | null, filename: string, error: Error): (Error & {
codeFrame: string;
}) | null;
diff --git a/packages/@expo/require-utils/build/codeframe.js b/packages/@expo/require-utils/build/codeframe.js
index 0fe129c33232a8..58c19f6a07bbab 100644
--- a/packages/@expo/require-utils/build/codeframe.js
+++ b/packages/@expo/require-utils/build/codeframe.js
@@ -66,21 +66,23 @@ function annotateError(code, filename, error) {
if (typeof error !== 'object' || error == null) {
return null;
}
- const loc = errorToLoc(filename, error);
- if (loc) {
- const {
- codeFrameColumns
- } = require('@babel/code-frame');
- const codeFrame = codeFrameColumns(code, {
- start: loc
- }, {
- highlightCode: true
- });
- const annotatedError = error;
- annotatedError.codeFrame = codeFrame;
- annotatedError.message += `\n${codeFrame}`;
- delete annotatedError.stack;
- return annotatedError;
+ if (code) {
+ const loc = errorToLoc(filename, error);
+ if (loc) {
+ const {
+ codeFrameColumns
+ } = require('@babel/code-frame');
+ const codeFrame = codeFrameColumns(code, {
+ start: loc
+ }, {
+ highlightCode: true
+ });
+ const annotatedError = error;
+ annotatedError.codeFrame = codeFrame;
+ annotatedError.message += `\n${codeFrame}`;
+ delete annotatedError.stack;
+ return annotatedError;
+ }
}
return null;
}
diff --git a/packages/@expo/require-utils/build/codeframe.js.map b/packages/@expo/require-utils/build/codeframe.js.map
index ff1eb0eae30eb2..8bfba7c98867f3 100644
--- a/packages/@expo/require-utils/build/codeframe.js.map
+++ b/packages/@expo/require-utils/build/codeframe.js.map
@@ -1 +1 @@
-{"version":3,"file":"codeframe.js","names":["_nodeUrl","data","_interopRequireDefault","require","e","__esModule","default","errorToLoc","filename","error","name","stack","slice","length","message","trace","match","url","pathToFileURL","href","line","Number","isSafeInteger","column","undefined","formatDiagnostic","diagnostic","start","file","messageText","codeFrameColumns","character","getLineAndCharacterOfPosition","loc","codeFrame","getText","highlightCode","annotatedError","SyntaxError","annotateError","code"],"sources":["../src/codeframe.ts"],"sourcesContent":["import url from 'node:url';\nimport type { Diagnostic } from 'typescript';\n\nfunction errorToLoc(filename: string, error: Error) {\n if (error.name === 'ReferenceError' || error.name === 'SyntaxError') {\n let stack = `${error.stack || ''}`;\n stack = stack.slice(error.name.length + 2 /* '${name}: ' prefix */);\n stack = stack.slice(error.message.length);\n const trace = stack.match(/at ([^\\n]+):(\\d+):(\\d+)/m);\n if (url.pathToFileURL(filename).href === trace?.[1]) {\n const line = Number(trace[2]);\n return Number.isSafeInteger(line) ? { line, column: Number(trace[3]) || undefined } : null;\n }\n }\n return null;\n}\n\nexport function formatDiagnostic(diagnostic: Diagnostic | undefined) {\n if (!diagnostic) {\n return null;\n }\n const { start, file, messageText } = diagnostic;\n if (file && messageText && start != null) {\n const { codeFrameColumns }: typeof import('@babel/code-frame') = require('@babel/code-frame');\n const { line, character } = file.getLineAndCharacterOfPosition(start);\n const loc = { line: line + 1, column: character + 1 };\n const codeFrame = codeFrameColumns(file.getText(), { start: loc }, { highlightCode: true });\n const annotatedError = new SyntaxError(`${messageText}\\n${codeFrame}`) as SyntaxError & {\n codeFrame: string;\n };\n annotatedError.codeFrame = codeFrame;\n delete annotatedError.stack;\n return annotatedError;\n }\n return null;\n}\n\nexport function annotateError(code: string, filename: string, error: Error) {\n if (typeof error !== 'object' || error == null) {\n return null;\n }\n const loc = errorToLoc(filename, error);\n if (loc) {\n const { codeFrameColumns }: typeof import('@babel/code-frame') = require('@babel/code-frame');\n const codeFrame = codeFrameColumns(code, { start: loc }, { highlightCode: true });\n const annotatedError = error as Error & { codeFrame: string };\n annotatedError.codeFrame = codeFrame;\n annotatedError.message += `\\n${codeFrame}`;\n delete annotatedError.stack;\n return annotatedError;\n }\n return null;\n}\n"],"mappings":";;;;;;;AAAA,SAAAA,SAAA;EAAA,MAAAC,IAAA,GAAAC,sBAAA,CAAAC,OAAA;EAAAH,QAAA,YAAAA,CAAA;IAAA,OAAAC,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAA2B,SAAAC,uBAAAE,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAG3B,SAASG,UAAUA,CAACC,QAAgB,EAAEC,KAAY,EAAE;EAClD,IAAIA,KAAK,CAACC,IAAI,KAAK,gBAAgB,IAAID,KAAK,CAACC,IAAI,KAAK,aAAa,EAAE;IACnE,IAAIC,KAAK,GAAG,GAAGF,KAAK,CAACE,KAAK,IAAI,EAAE,EAAE;IAClCA,KAAK,GAAGA,KAAK,CAACC,KAAK,CAACH,KAAK,CAACC,IAAI,CAACG,MAAM,GAAG,CAAC,CAAC,wBAAwB,CAAC;IACnEF,KAAK,GAAGA,KAAK,CAACC,KAAK,CAACH,KAAK,CAACK,OAAO,CAACD,MAAM,CAAC;IACzC,MAAME,KAAK,GAAGJ,KAAK,CAACK,KAAK,CAAC,0BAA0B,CAAC;IACrD,IAAIC,kBAAG,CAACC,aAAa,CAACV,QAAQ,CAAC,CAACW,IAAI,KAAKJ,KAAK,GAAG,CAAC,CAAC,EAAE;MACnD,MAAMK,IAAI,GAAGC,MAAM,CAACN,KAAK,CAAC,CAAC,CAAC,CAAC;MAC7B,OAAOM,MAAM,CAACC,aAAa,CAACF,IAAI,CAAC,GAAG;QAAEA,IAAI;QAAEG,MAAM,EAAEF,MAAM,CAACN,KAAK,CAAC,CAAC,CAAC,CAAC,IAAIS;MAAU,CAAC,GAAG,IAAI;IAC5F;EACF;EACA,OAAO,IAAI;AACb;AAEO,SAASC,gBAAgBA,CAACC,UAAkC,EAAE;EACnE,IAAI,CAACA,UAAU,EAAE;IACf,OAAO,IAAI;EACb;EACA,MAAM;IAAEC,KAAK;IAAEC,IAAI;IAAEC;EAAY,CAAC,GAAGH,UAAU;EAC/C,IAAIE,IAAI,IAAIC,WAAW,IAAIF,KAAK,IAAI,IAAI,EAAE;IACxC,MAAM;MAAEG;IAAqD,CAAC,GAAG3B,OAAO,CAAC,mBAAmB,CAAC;IAC7F,MAAM;MAAEiB,IAAI;MAAEW;IAAU,CAAC,GAAGH,IAAI,CAACI,6BAA6B,CAACL,KAAK,CAAC;IACrE,MAAMM,GAAG,GAAG;MAAEb,IAAI,EAAEA,IAAI,GAAG,CAAC;MAAEG,MAAM,EAAEQ,SAAS,GAAG;IAAE,CAAC;IACrD,MAAMG,SAAS,GAAGJ,gBAAgB,CAACF,IAAI,CAACO,OAAO,CAAC,CAAC,EAAE;MAAER,KAAK,EAAEM;IAAI,CAAC,EAAE;MAAEG,aAAa,EAAE;IAAK,CAAC,CAAC;IAC3F,MAAMC,cAAc,GAAG,IAAIC,WAAW,CAAC,GAAGT,WAAW,KAAKK,SAAS,EAAE,CAEpE;IACDG,cAAc,CAACH,SAAS,GAAGA,SAAS;IACpC,OAAOG,cAAc,CAAC1B,KAAK;IAC3B,OAAO0B,cAAc;EACvB;EACA,OAAO,IAAI;AACb;AAEO,SAASE,aAAaA,CAACC,IAAY,EAAEhC,QAAgB,EAAEC,KAAY,EAAE;EAC1E,IAAI,OAAOA,KAAK,KAAK,QAAQ,IAAIA,KAAK,IAAI,IAAI,EAAE;IAC9C,OAAO,IAAI;EACb;EACA,MAAMwB,GAAG,GAAG1B,UAAU,CAACC,QAAQ,EAAEC,KAAK,CAAC;EACvC,IAAIwB,GAAG,EAAE;IACP,MAAM;MAAEH;IAAqD,CAAC,GAAG3B,OAAO,CAAC,mBAAmB,CAAC;IAC7F,MAAM+B,SAAS,GAAGJ,gBAAgB,CAACU,IAAI,EAAE;MAAEb,KAAK,EAAEM;IAAI,CAAC,EAAE;MAAEG,aAAa,EAAE;IAAK,CAAC,CAAC;IACjF,MAAMC,cAAc,GAAG5B,KAAsC;IAC7D4B,cAAc,CAACH,SAAS,GAAGA,SAAS;IACpCG,cAAc,CAACvB,OAAO,IAAI,KAAKoB,SAAS,EAAE;IAC1C,OAAOG,cAAc,CAAC1B,KAAK;IAC3B,OAAO0B,cAAc;EACvB;EACA,OAAO,IAAI;AACb","ignoreList":[]}
\ No newline at end of file
+{"version":3,"file":"codeframe.js","names":["_nodeUrl","data","_interopRequireDefault","require","e","__esModule","default","errorToLoc","filename","error","name","stack","slice","length","message","trace","match","url","pathToFileURL","href","line","Number","isSafeInteger","column","undefined","formatDiagnostic","diagnostic","start","file","messageText","codeFrameColumns","character","getLineAndCharacterOfPosition","loc","codeFrame","getText","highlightCode","annotatedError","SyntaxError","annotateError","code"],"sources":["../src/codeframe.ts"],"sourcesContent":["import url from 'node:url';\nimport type { Diagnostic } from 'typescript';\n\nfunction errorToLoc(filename: string, error: Error) {\n if (error.name === 'ReferenceError' || error.name === 'SyntaxError') {\n let stack = `${error.stack || ''}`;\n stack = stack.slice(error.name.length + 2 /* '${name}: ' prefix */);\n stack = stack.slice(error.message.length);\n const trace = stack.match(/at ([^\\n]+):(\\d+):(\\d+)/m);\n if (url.pathToFileURL(filename).href === trace?.[1]) {\n const line = Number(trace[2]);\n return Number.isSafeInteger(line) ? { line, column: Number(trace[3]) || undefined } : null;\n }\n }\n return null;\n}\n\nexport function formatDiagnostic(diagnostic: Diagnostic | undefined) {\n if (!diagnostic) {\n return null;\n }\n const { start, file, messageText } = diagnostic;\n if (file && messageText && start != null) {\n const { codeFrameColumns }: typeof import('@babel/code-frame') = require('@babel/code-frame');\n const { line, character } = file.getLineAndCharacterOfPosition(start);\n const loc = { line: line + 1, column: character + 1 };\n const codeFrame = codeFrameColumns(file.getText(), { start: loc }, { highlightCode: true });\n const annotatedError = new SyntaxError(`${messageText}\\n${codeFrame}`) as SyntaxError & {\n codeFrame: string;\n };\n annotatedError.codeFrame = codeFrame;\n delete annotatedError.stack;\n return annotatedError;\n }\n return null;\n}\n\nexport function annotateError(code: string | null, filename: string, error: Error) {\n if (typeof error !== 'object' || error == null) {\n return null;\n }\n if (code) {\n const loc = errorToLoc(filename, error);\n if (loc) {\n const { codeFrameColumns }: typeof import('@babel/code-frame') = require('@babel/code-frame');\n const codeFrame = codeFrameColumns(code, { start: loc }, { highlightCode: true });\n const annotatedError = error as Error & { codeFrame: string };\n annotatedError.codeFrame = codeFrame;\n annotatedError.message += `\\n${codeFrame}`;\n delete annotatedError.stack;\n return annotatedError;\n }\n }\n return null;\n}\n"],"mappings":";;;;;;;AAAA,SAAAA,SAAA;EAAA,MAAAC,IAAA,GAAAC,sBAAA,CAAAC,OAAA;EAAAH,QAAA,YAAAA,CAAA;IAAA,OAAAC,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAA2B,SAAAC,uBAAAE,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAG3B,SAASG,UAAUA,CAACC,QAAgB,EAAEC,KAAY,EAAE;EAClD,IAAIA,KAAK,CAACC,IAAI,KAAK,gBAAgB,IAAID,KAAK,CAACC,IAAI,KAAK,aAAa,EAAE;IACnE,IAAIC,KAAK,GAAG,GAAGF,KAAK,CAACE,KAAK,IAAI,EAAE,EAAE;IAClCA,KAAK,GAAGA,KAAK,CAACC,KAAK,CAACH,KAAK,CAACC,IAAI,CAACG,MAAM,GAAG,CAAC,CAAC,wBAAwB,CAAC;IACnEF,KAAK,GAAGA,KAAK,CAACC,KAAK,CAACH,KAAK,CAACK,OAAO,CAACD,MAAM,CAAC;IACzC,MAAME,KAAK,GAAGJ,KAAK,CAACK,KAAK,CAAC,0BAA0B,CAAC;IACrD,IAAIC,kBAAG,CAACC,aAAa,CAACV,QAAQ,CAAC,CAACW,IAAI,KAAKJ,KAAK,GAAG,CAAC,CAAC,EAAE;MACnD,MAAMK,IAAI,GAAGC,MAAM,CAACN,KAAK,CAAC,CAAC,CAAC,CAAC;MAC7B,OAAOM,MAAM,CAACC,aAAa,CAACF,IAAI,CAAC,GAAG;QAAEA,IAAI;QAAEG,MAAM,EAAEF,MAAM,CAACN,KAAK,CAAC,CAAC,CAAC,CAAC,IAAIS;MAAU,CAAC,GAAG,IAAI;IAC5F;EACF;EACA,OAAO,IAAI;AACb;AAEO,SAASC,gBAAgBA,CAACC,UAAkC,EAAE;EACnE,IAAI,CAACA,UAAU,EAAE;IACf,OAAO,IAAI;EACb;EACA,MAAM;IAAEC,KAAK;IAAEC,IAAI;IAAEC;EAAY,CAAC,GAAGH,UAAU;EAC/C,IAAIE,IAAI,IAAIC,WAAW,IAAIF,KAAK,IAAI,IAAI,EAAE;IACxC,MAAM;MAAEG;IAAqD,CAAC,GAAG3B,OAAO,CAAC,mBAAmB,CAAC;IAC7F,MAAM;MAAEiB,IAAI;MAAEW;IAAU,CAAC,GAAGH,IAAI,CAACI,6BAA6B,CAACL,KAAK,CAAC;IACrE,MAAMM,GAAG,GAAG;MAAEb,IAAI,EAAEA,IAAI,GAAG,CAAC;MAAEG,MAAM,EAAEQ,SAAS,GAAG;IAAE,CAAC;IACrD,MAAMG,SAAS,GAAGJ,gBAAgB,CAACF,IAAI,CAACO,OAAO,CAAC,CAAC,EAAE;MAAER,KAAK,EAAEM;IAAI,CAAC,EAAE;MAAEG,aAAa,EAAE;IAAK,CAAC,CAAC;IAC3F,MAAMC,cAAc,GAAG,IAAIC,WAAW,CAAC,GAAGT,WAAW,KAAKK,SAAS,EAAE,CAEpE;IACDG,cAAc,CAACH,SAAS,GAAGA,SAAS;IACpC,OAAOG,cAAc,CAAC1B,KAAK;IAC3B,OAAO0B,cAAc;EACvB;EACA,OAAO,IAAI;AACb;AAEO,SAASE,aAAaA,CAACC,IAAmB,EAAEhC,QAAgB,EAAEC,KAAY,EAAE;EACjF,IAAI,OAAOA,KAAK,KAAK,QAAQ,IAAIA,KAAK,IAAI,IAAI,EAAE;IAC9C,OAAO,IAAI;EACb;EACA,IAAI+B,IAAI,EAAE;IACR,MAAMP,GAAG,GAAG1B,UAAU,CAACC,QAAQ,EAAEC,KAAK,CAAC;IACvC,IAAIwB,GAAG,EAAE;MACP,MAAM;QAAEH;MAAqD,CAAC,GAAG3B,OAAO,CAAC,mBAAmB,CAAC;MAC7F,MAAM+B,SAAS,GAAGJ,gBAAgB,CAACU,IAAI,EAAE;QAAEb,KAAK,EAAEM;MAAI,CAAC,EAAE;QAAEG,aAAa,EAAE;MAAK,CAAC,CAAC;MACjF,MAAMC,cAAc,GAAG5B,KAAsC;MAC7D4B,cAAc,CAACH,SAAS,GAAGA,SAAS;MACpCG,cAAc,CAACvB,OAAO,IAAI,KAAKoB,SAAS,EAAE;MAC1C,OAAOG,cAAc,CAAC1B,KAAK;MAC3B,OAAO0B,cAAc;IACvB;EACF;EACA,OAAO,IAAI;AACb","ignoreList":[]}
\ No newline at end of file
diff --git a/packages/@expo/require-utils/build/load.d.ts b/packages/@expo/require-utils/build/load.d.ts
index be029c97ebce8d..335d7407828b10 100644
--- a/packages/@expo/require-utils/build/load.d.ts
+++ b/packages/@expo/require-utils/build/load.d.ts
@@ -8,10 +8,11 @@ declare global {
}
}
}
+type Format = 'commonjs' | 'module' | 'module-typescript' | 'commonjs-typescript' | 'typescript';
export interface ModuleOptions {
paths?: string[];
}
-declare function evalModule(code: string, filename: string, opts?: ModuleOptions): any;
+declare function evalModule(code: string, filename: string, opts?: ModuleOptions, format?: Format): any;
declare function loadModule(filename: string): Promise;
/** Require module or evaluate with TypeScript
* NOTE: Requiring ESM has been added in all LTS versions (Node 20.19+, 22.12+, 24).
diff --git a/packages/@expo/require-utils/build/load.js b/packages/@expo/require-utils/build/load.js
index d75eb1f196b738..c3a0f9b1487218 100644
--- a/packages/@expo/require-utils/build/load.js
+++ b/packages/@expo/require-utils/build/load.js
@@ -41,6 +41,13 @@ function _codeframe() {
};
return data;
}
+function _transform() {
+ const data = require("./transform");
+ _transform = function () {
+ return data;
+ };
+ return data;
+}
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
@@ -65,35 +72,35 @@ const tsExtensionMapping = {
'.cts': '.cjs',
'.mts': '.mjs'
};
-function toFormat(filename) {
+function maybeReadFileSync(filename) {
+ try {
+ return _nodeFs().default.readFileSync(filename, 'utf8');
+ } catch (error) {
+ if (error.code === 'ENOENT') {
+ return null;
+ }
+ throw error;
+ }
+}
+function toFormat(filename, isLegacy) {
if (filename.endsWith('.cjs')) {
return 'commonjs';
} else if (filename.endsWith('.mjs')) {
return 'module';
} else if (filename.endsWith('.js')) {
- return undefined;
+ return isLegacy ? 'commonjs' : null;
} else if (filename.endsWith('.mts')) {
return 'module-typescript';
} else if (filename.endsWith('.cts')) {
return 'commonjs-typescript';
} else if (filename.endsWith('.ts')) {
- return 'typescript';
+ return isLegacy ? 'commonjs-typescript' : 'typescript';
} else {
- return undefined;
- }
-}
-function isTypescriptFilename(filename) {
- switch (toFormat(filename)) {
- case 'module-typescript':
- case 'commonjs-typescript':
- case 'typescript':
- return true;
- default:
- return false;
+ return null;
}
}
function compileModule(code, filename, opts) {
- const format = toFormat(filename);
+ const format = toFormat(filename, false);
const prependPaths = opts.paths ?? [];
const nodeModulePaths = nodeModule()._nodeModulePaths(_nodePath().default.dirname(filename));
const paths = [...prependPaths, ...nodeModulePaths];
@@ -102,28 +109,28 @@ function compileModule(code, filename, opts) {
filename,
paths
});
- mod._compile(code, filename, format);
+ mod._compile(code, filename, format != null ? format : undefined);
+ mod.loaded = true;
require.cache[filename] = mod;
parent?.children?.splice(parent.children.indexOf(mod), 1);
- return mod.exports;
+ return mod;
} catch (error) {
delete require.cache[filename];
throw error;
}
}
const hasStripTypeScriptTypes = typeof nodeModule().stripTypeScriptTypes === 'function';
-function evalModule(code, filename, opts = {}) {
+function evalModule(code, filename, opts = {}, format = toFormat(filename, true)) {
let inputCode = code;
let inputFilename = filename;
let diagnostic;
- if (filename.endsWith('.ts') || filename.endsWith('.cts') || filename.endsWith('.mts')) {
- const ext = _nodePath().default.extname(filename);
+ if (format === 'typescript' || format === 'module-typescript' || format === 'commonjs-typescript') {
const ts = loadTypescript();
if (ts) {
let module;
- if (ext === '.cts') {
+ if (format === 'commonjs-typescript') {
module = ts.ModuleKind.CommonJS;
- } else if (ext === '.mts') {
+ } else if (format === 'module-typescript') {
module = ts.ModuleKind.ESNext;
} else {
// NOTE(@kitten): We can "preserve" the output, meaning, it can either be ESM or CJS
@@ -159,22 +166,29 @@ function evalModule(code, filename, opts = {}) {
});
}
if (inputCode !== code) {
+ const ext = _nodePath().default.extname(filename);
const inputExt = tsExtensionMapping[ext] ?? ext;
if (inputExt !== ext) {
inputFilename = _nodePath().default.join(_nodePath().default.dirname(filename), _nodePath().default.basename(filename, ext) + inputExt);
}
}
+ } else if (format === 'commonjs') {
+ inputCode = (0, _transform().toCommonJS)(filename, code);
}
try {
const mod = compileModule(inputCode, inputFilename, opts);
if (inputFilename !== filename) {
require.cache[filename] = mod;
}
- return mod;
+ return mod.exports;
} catch (error) {
// If we have a diagnostic from TypeScript, we issue its error with a codeframe first,
// since it's likely more useful than the eval error
- throw (0, _codeframe().formatDiagnostic)(diagnostic) ?? (0, _codeframe().annotateError)(code, filename, error) ?? error;
+ const diagnosticError = (0, _codeframe().formatDiagnostic)(diagnostic);
+ if (diagnosticError) {
+ throw diagnosticError;
+ }
+ throw (0, _codeframe().annotateError)(code, filename, error) ?? error;
}
}
async function requireOrImport(filename) {
@@ -200,19 +214,30 @@ async function loadModule(filename) {
* NOTE: Requiring ESM has been added in all LTS versions (Node 20.19+, 22.12+, 24).
* This already forms the minimum required Node version as of Expo SDK 54 */
function loadModuleSync(filename) {
+ const format = toFormat(filename, true);
+ const isTypeScript = format === 'module-typescript' || format === 'commonjs-typescript' || format === 'typescript';
try {
- if (!isTypescriptFilename(filename)) {
+ if (format !== 'module' && !isTypeScript) {
return require(filename);
}
} catch (error) {
if (error.code === 'MODULE_NOT_FOUND') {
throw error;
+ } else if (format == null) {
+ const code = maybeReadFileSync(filename);
+ throw (0, _codeframe().annotateError)(code, filename, error) || error;
}
// We fallback to always evaluating the entrypoint module
// This is out of safety, since we're not trusting the requiring ESM feature
// and evaluating the module manually bypasses the error when it's flagged off
}
+
+ // Load from cache manually, if `loaded` is set and exports are defined, to avoid
+ // double transform or double evaluation
+ if (require.cache[filename]?.exports && require.cache[filename].loaded) {
+ return require.cache[filename].exports;
+ }
const code = _nodeFs().default.readFileSync(filename, 'utf8');
- return evalModule(code, filename);
+ return evalModule(code, filename, {}, format);
}
//# sourceMappingURL=load.js.map
\ No newline at end of file
diff --git a/packages/@expo/require-utils/build/load.js.map b/packages/@expo/require-utils/build/load.js.map
index 7aae616cda3c25..ca168aafa9aa8f 100644
--- a/packages/@expo/require-utils/build/load.js.map
+++ b/packages/@expo/require-utils/build/load.js.map
@@ -1 +1 @@
-{"version":3,"file":"load.js","names":["_nodeFs","data","_interopRequireDefault","require","nodeModule","_interopRequireWildcard","_nodePath","_nodeUrl","_codeframe","e","__esModule","default","_getRequireWildcardCache","WeakMap","r","t","has","get","n","__proto__","a","Object","defineProperty","getOwnPropertyDescriptor","u","hasOwnProperty","call","i","set","_ts","loadTypescript","undefined","error","code","parent","module","tsExtensionMapping","toFormat","filename","endsWith","isTypescriptFilename","compileModule","opts","format","prependPaths","paths","nodeModulePaths","_nodeModulePaths","path","dirname","mod","assign","Module","_compile","cache","children","splice","indexOf","exports","hasStripTypeScriptTypes","stripTypeScriptTypes","evalModule","inputCode","inputFilename","diagnostic","ext","extname","ts","ModuleKind","CommonJS","ESNext","Preserve","output","transpileModule","fileName","reportDiagnostics","compilerOptions","moduleResolution","ModuleResolutionKind","Bundler","verbatimModuleSyntax","target","ScriptTarget","newLine","NewLineKind","LineFeed","inlineSourceMap","esModuleInterop","outputText","diagnostics","length","mode","sourceMap","inputExt","join","basename","formatDiagnostic","annotateError","requireOrImport","Promise","resolve","isAbsolute","url","pathToFileURL","toString","then","s","loadModule","loadModuleSync","fs","readFileSync"],"sources":["../src/load.ts"],"sourcesContent":["import fs from 'node:fs';\nimport * as nodeModule from 'node:module';\nimport path from 'node:path';\nimport url from 'node:url';\nimport type * as ts from 'typescript';\n\nimport { annotateError, formatDiagnostic } from './codeframe';\n\ndeclare module 'node:module' {\n export function _nodeModulePaths(base: string): readonly string[];\n}\n\ndeclare global {\n namespace NodeJS {\n export interface Module {\n _compile(\n code: string,\n filename: string,\n format?: 'module' | 'commonjs' | 'commonjs-typescript' | 'module-typescript' | 'typescript'\n ): unknown;\n }\n }\n}\n\nlet _ts: typeof import('typescript') | null | undefined;\nfunction loadTypescript() {\n if (_ts === undefined) {\n try {\n _ts = require('typescript');\n } catch (error: any) {\n if (error.code !== 'MODULE_NOT_FOUND') {\n throw error;\n } else {\n _ts = null;\n }\n }\n }\n return _ts;\n}\n\nconst parent = module;\n\nconst tsExtensionMapping: Record = {\n '.ts': '.js',\n '.cts': '.cjs',\n '.mts': '.mjs',\n};\n\nfunction toFormat(filename: string) {\n if (filename.endsWith('.cjs')) {\n return 'commonjs';\n } else if (filename.endsWith('.mjs')) {\n return 'module';\n } else if (filename.endsWith('.js')) {\n return undefined;\n } else if (filename.endsWith('.mts')) {\n return 'module-typescript';\n } else if (filename.endsWith('.cts')) {\n return 'commonjs-typescript';\n } else if (filename.endsWith('.ts')) {\n return 'typescript';\n } else {\n return undefined;\n }\n}\n\nfunction isTypescriptFilename(filename: string) {\n switch (toFormat(filename)) {\n case 'module-typescript':\n case 'commonjs-typescript':\n case 'typescript':\n return true;\n default:\n return false;\n }\n}\n\nexport interface ModuleOptions {\n paths?: string[];\n}\n\nfunction compileModule(code: string, filename: string, opts: ModuleOptions): T {\n const format = toFormat(filename);\n const prependPaths = opts.paths ?? [];\n const nodeModulePaths = nodeModule._nodeModulePaths(path.dirname(filename));\n const paths = [...prependPaths, ...nodeModulePaths];\n try {\n const mod = Object.assign(new nodeModule.Module(filename, parent), { filename, paths });\n mod._compile(code, filename, format);\n require.cache[filename] = mod;\n parent?.children?.splice(parent.children.indexOf(mod), 1);\n return mod.exports;\n } catch (error: any) {\n delete require.cache[filename];\n throw error;\n }\n}\n\nconst hasStripTypeScriptTypes = typeof nodeModule.stripTypeScriptTypes === 'function';\n\nfunction evalModule(code: string, filename: string, opts: ModuleOptions = {}) {\n let inputCode = code;\n let inputFilename = filename;\n let diagnostic: ts.Diagnostic | undefined;\n if (filename.endsWith('.ts') || filename.endsWith('.cts') || filename.endsWith('.mts')) {\n const ext = path.extname(filename);\n const ts = loadTypescript();\n\n if (ts) {\n let module: ts.ModuleKind;\n if (ext === '.cts') {\n module = ts.ModuleKind.CommonJS;\n } else if (ext === '.mts') {\n module = ts.ModuleKind.ESNext;\n } else {\n // NOTE(@kitten): We can \"preserve\" the output, meaning, it can either be ESM or CJS\n // and stop TypeScript from either transpiling it to CommonJS or adding an `export {}`\n // if no exports are used. This allows the user to choose if this file is CJS or ESM\n // (but not to mix both)\n module = ts.ModuleKind.Preserve;\n }\n const output = ts.transpileModule(code, {\n fileName: filename,\n reportDiagnostics: true,\n compilerOptions: {\n module,\n moduleResolution: ts.ModuleResolutionKind.Bundler,\n // `verbatimModuleSyntax` needs to be off, to erase as many imports as possible\n verbatimModuleSyntax: false,\n target: ts.ScriptTarget.ESNext,\n newLine: ts.NewLineKind.LineFeed,\n inlineSourceMap: true,\n esModuleInterop: true,\n },\n });\n inputCode = output?.outputText || inputCode;\n if (output?.diagnostics?.length) {\n diagnostic = output.diagnostics[0];\n }\n }\n\n if (hasStripTypeScriptTypes && inputCode === code) {\n // This may throw its own error, but this contains a code-frame already\n inputCode = nodeModule.stripTypeScriptTypes(code, {\n mode: 'transform',\n sourceMap: true,\n });\n }\n\n if (inputCode !== code) {\n const inputExt = tsExtensionMapping[ext] ?? ext;\n if (inputExt !== ext) {\n inputFilename = path.join(path.dirname(filename), path.basename(filename, ext) + inputExt);\n }\n }\n }\n\n try {\n const mod = compileModule(inputCode, inputFilename, opts);\n if (inputFilename !== filename) {\n require.cache[filename] = mod;\n }\n return mod;\n } catch (error: any) {\n // If we have a diagnostic from TypeScript, we issue its error with a codeframe first,\n // since it's likely more useful than the eval error\n throw formatDiagnostic(diagnostic) ?? annotateError(code, filename, error) ?? error;\n }\n}\n\nasync function requireOrImport(filename: string) {\n try {\n return require(filename);\n } catch {\n return await import(\n path.isAbsolute(filename) ? url.pathToFileURL(filename).toString() : filename\n );\n }\n}\n\nasync function loadModule(filename: string) {\n try {\n return await requireOrImport(filename);\n } catch (error: any) {\n if (error.code === 'ERR_UNKNOWN_FILE_EXTENSION' || error.code === 'MODULE_NOT_FOUND') {\n return loadModuleSync(filename);\n } else {\n throw error;\n }\n }\n}\n\n/** Require module or evaluate with TypeScript\n * NOTE: Requiring ESM has been added in all LTS versions (Node 20.19+, 22.12+, 24).\n * This already forms the minimum required Node version as of Expo SDK 54 */\nfunction loadModuleSync(filename: string) {\n try {\n if (!isTypescriptFilename(filename)) {\n return require(filename);\n }\n } catch (error: any) {\n if (error.code === 'MODULE_NOT_FOUND') {\n throw error;\n }\n // We fallback to always evaluating the entrypoint module\n // This is out of safety, since we're not trusting the requiring ESM feature\n // and evaluating the module manually bypasses the error when it's flagged off\n }\n const code = fs.readFileSync(filename, 'utf8');\n return evalModule(code, filename);\n}\n\nexport { evalModule, loadModule, loadModuleSync };\n"],"mappings":";;;;;;;;AAAA,SAAAA,QAAA;EAAA,MAAAC,IAAA,GAAAC,sBAAA,CAAAC,OAAA;EAAAH,OAAA,YAAAA,CAAA;IAAA,OAAAC,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AACA,SAAAG,WAAA;EAAA,MAAAH,IAAA,GAAAI,uBAAA,CAAAF,OAAA;EAAAC,UAAA,YAAAA,CAAA;IAAA,OAAAH,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AACA,SAAAK,UAAA;EAAA,MAAAL,IAAA,GAAAC,sBAAA,CAAAC,OAAA;EAAAG,SAAA,YAAAA,CAAA;IAAA,OAAAL,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AACA,SAAAM,SAAA;EAAA,MAAAN,IAAA,GAAAC,sBAAA,CAAAC,OAAA;EAAAI,QAAA,YAAAA,CAAA;IAAA,OAAAN,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAGA,SAAAO,WAAA;EAAA,MAAAP,IAAA,GAAAE,OAAA;EAAAK,UAAA,YAAAA,CAAA;IAAA,OAAAP,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAA8D,SAAAC,uBAAAO,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAAA,SAAAG,yBAAAH,CAAA,6BAAAI,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAD,wBAAA,YAAAA,CAAAH,CAAA,WAAAA,CAAA,GAAAM,CAAA,GAAAD,CAAA,KAAAL,CAAA;AAAA,SAAAJ,wBAAAI,CAAA,EAAAK,CAAA,SAAAA,CAAA,IAAAL,CAAA,IAAAA,CAAA,CAAAC,UAAA,SAAAD,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAE,OAAA,EAAAF,CAAA,QAAAM,CAAA,GAAAH,wBAAA,CAAAE,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAC,GAAA,CAAAP,CAAA,UAAAM,CAAA,CAAAE,GAAA,CAAAR,CAAA,OAAAS,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,CAAA,IAAAf,CAAA,oBAAAe,CAAA,OAAAC,cAAA,CAAAC,IAAA,CAAAjB,CAAA,EAAAe,CAAA,SAAAG,CAAA,GAAAP,CAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAd,CAAA,EAAAe,CAAA,UAAAG,CAAA,KAAAA,CAAA,CAAAV,GAAA,IAAAU,CAAA,CAAAC,GAAA,IAAAP,MAAA,CAAAC,cAAA,CAAAJ,CAAA,EAAAM,CAAA,EAAAG,CAAA,IAAAT,CAAA,CAAAM,CAAA,IAAAf,CAAA,CAAAe,CAAA,YAAAN,CAAA,CAAAP,OAAA,GAAAF,CAAA,EAAAM,CAAA,IAAAA,CAAA,CAAAa,GAAA,CAAAnB,CAAA,EAAAS,CAAA,GAAAA,CAAA;AAkB9D,IAAIW,GAAmD;AACvD,SAASC,cAAcA,CAAA,EAAG;EACxB,IAAID,GAAG,KAAKE,SAAS,EAAE;IACrB,IAAI;MACFF,GAAG,GAAG1B,OAAO,CAAC,YAAY,CAAC;IAC7B,CAAC,CAAC,OAAO6B,KAAU,EAAE;MACnB,IAAIA,KAAK,CAACC,IAAI,KAAK,kBAAkB,EAAE;QACrC,MAAMD,KAAK;MACb,CAAC,MAAM;QACLH,GAAG,GAAG,IAAI;MACZ;IACF;EACF;EACA,OAAOA,GAAG;AACZ;AAEA,MAAMK,MAAM,GAAGC,MAAM;AAErB,MAAMC,kBAAsD,GAAG;EAC7D,KAAK,EAAE,KAAK;EACZ,MAAM,EAAE,MAAM;EACd,MAAM,EAAE;AACV,CAAC;AAED,SAASC,QAAQA,CAACC,QAAgB,EAAE;EAClC,IAAIA,QAAQ,CAACC,QAAQ,CAAC,MAAM,CAAC,EAAE;IAC7B,OAAO,UAAU;EACnB,CAAC,MAAM,IAAID,QAAQ,CAACC,QAAQ,CAAC,MAAM,CAAC,EAAE;IACpC,OAAO,QAAQ;EACjB,CAAC,MAAM,IAAID,QAAQ,CAACC,QAAQ,CAAC,KAAK,CAAC,EAAE;IACnC,OAAOR,SAAS;EAClB,CAAC,MAAM,IAAIO,QAAQ,CAACC,QAAQ,CAAC,MAAM,CAAC,EAAE;IACpC,OAAO,mBAAmB;EAC5B,CAAC,MAAM,IAAID,QAAQ,CAACC,QAAQ,CAAC,MAAM,CAAC,EAAE;IACpC,OAAO,qBAAqB;EAC9B,CAAC,MAAM,IAAID,QAAQ,CAACC,QAAQ,CAAC,KAAK,CAAC,EAAE;IACnC,OAAO,YAAY;EACrB,CAAC,MAAM;IACL,OAAOR,SAAS;EAClB;AACF;AAEA,SAASS,oBAAoBA,CAACF,QAAgB,EAAE;EAC9C,QAAQD,QAAQ,CAACC,QAAQ,CAAC;IACxB,KAAK,mBAAmB;IACxB,KAAK,qBAAqB;IAC1B,KAAK,YAAY;MACf,OAAO,IAAI;IACb;MACE,OAAO,KAAK;EAChB;AACF;AAMA,SAASG,aAAaA,CAAUR,IAAY,EAAEK,QAAgB,EAAEI,IAAmB,EAAK;EACtF,MAAMC,MAAM,GAAGN,QAAQ,CAACC,QAAQ,CAAC;EACjC,MAAMM,YAAY,GAAGF,IAAI,CAACG,KAAK,IAAI,EAAE;EACrC,MAAMC,eAAe,GAAG1C,UAAU,CAAD,CAAC,CAAC2C,gBAAgB,CAACC,mBAAI,CAACC,OAAO,CAACX,QAAQ,CAAC,CAAC;EAC3E,MAAMO,KAAK,GAAG,CAAC,GAAGD,YAAY,EAAE,GAAGE,eAAe,CAAC;EACnD,IAAI;IACF,MAAMI,GAAG,GAAG7B,MAAM,CAAC8B,MAAM,CAAC,KAAI/C,UAAU,CAAD,CAAC,CAACgD,MAAM,EAACd,QAAQ,EAAEJ,MAAM,CAAC,EAAE;MAAEI,QAAQ;MAAEO;IAAM,CAAC,CAAC;IACvFK,GAAG,CAACG,QAAQ,CAACpB,IAAI,EAAEK,QAAQ,EAAEK,MAAM,CAAC;IACpCxC,OAAO,CAACmD,KAAK,CAAChB,QAAQ,CAAC,GAAGY,GAAG;IAC7BhB,MAAM,EAAEqB,QAAQ,EAAEC,MAAM,CAACtB,MAAM,CAACqB,QAAQ,CAACE,OAAO,CAACP,GAAG,CAAC,EAAE,CAAC,CAAC;IACzD,OAAOA,GAAG,CAACQ,OAAO;EACpB,CAAC,CAAC,OAAO1B,KAAU,EAAE;IACnB,OAAO7B,OAAO,CAACmD,KAAK,CAAChB,QAAQ,CAAC;IAC9B,MAAMN,KAAK;EACb;AACF;AAEA,MAAM2B,uBAAuB,GAAG,OAAOvD,UAAU,CAAD,CAAC,CAACwD,oBAAoB,KAAK,UAAU;AAErF,SAASC,UAAUA,CAAC5B,IAAY,EAAEK,QAAgB,EAAEI,IAAmB,GAAG,CAAC,CAAC,EAAE;EAC5E,IAAIoB,SAAS,GAAG7B,IAAI;EACpB,IAAI8B,aAAa,GAAGzB,QAAQ;EAC5B,IAAI0B,UAAqC;EACzC,IAAI1B,QAAQ,CAACC,QAAQ,CAAC,KAAK,CAAC,IAAID,QAAQ,CAACC,QAAQ,CAAC,MAAM,CAAC,IAAID,QAAQ,CAACC,QAAQ,CAAC,MAAM,CAAC,EAAE;IACtF,MAAM0B,GAAG,GAAGjB,mBAAI,CAACkB,OAAO,CAAC5B,QAAQ,CAAC;IAClC,MAAM6B,EAAE,GAAGrC,cAAc,CAAC,CAAC;IAE3B,IAAIqC,EAAE,EAAE;MACN,IAAIhC,MAAqB;MACzB,IAAI8B,GAAG,KAAK,MAAM,EAAE;QAClB9B,MAAM,GAAGgC,EAAE,CAACC,UAAU,CAACC,QAAQ;MACjC,CAAC,MAAM,IAAIJ,GAAG,KAAK,MAAM,EAAE;QACzB9B,MAAM,GAAGgC,EAAE,CAACC,UAAU,CAACE,MAAM;MAC/B,CAAC,MAAM;QACL;QACA;QACA;QACA;QACAnC,MAAM,GAAGgC,EAAE,CAACC,UAAU,CAACG,QAAQ;MACjC;MACA,MAAMC,MAAM,GAAGL,EAAE,CAACM,eAAe,CAACxC,IAAI,EAAE;QACtCyC,QAAQ,EAAEpC,QAAQ;QAClBqC,iBAAiB,EAAE,IAAI;QACvBC,eAAe,EAAE;UACfzC,MAAM;UACN0C,gBAAgB,EAAEV,EAAE,CAACW,oBAAoB,CAACC,OAAO;UACjD;UACAC,oBAAoB,EAAE,KAAK;UAC3BC,MAAM,EAAEd,EAAE,CAACe,YAAY,CAACZ,MAAM;UAC9Ba,OAAO,EAAEhB,EAAE,CAACiB,WAAW,CAACC,QAAQ;UAChCC,eAAe,EAAE,IAAI;UACrBC,eAAe,EAAE;QACnB;MACF,CAAC,CAAC;MACFzB,SAAS,GAAGU,MAAM,EAAEgB,UAAU,IAAI1B,SAAS;MAC3C,IAAIU,MAAM,EAAEiB,WAAW,EAAEC,MAAM,EAAE;QAC/B1B,UAAU,GAAGQ,MAAM,CAACiB,WAAW,CAAC,CAAC,CAAC;MACpC;IACF;IAEA,IAAI9B,uBAAuB,IAAIG,SAAS,KAAK7B,IAAI,EAAE;MACjD;MACA6B,SAAS,GAAG1D,UAAU,CAAD,CAAC,CAACwD,oBAAoB,CAAC3B,IAAI,EAAE;QAChD0D,IAAI,EAAE,WAAW;QACjBC,SAAS,EAAE;MACb,CAAC,CAAC;IACJ;IAEA,IAAI9B,SAAS,KAAK7B,IAAI,EAAE;MACtB,MAAM4D,QAAQ,GAAGzD,kBAAkB,CAAC6B,GAAG,CAAC,IAAIA,GAAG;MAC/C,IAAI4B,QAAQ,KAAK5B,GAAG,EAAE;QACpBF,aAAa,GAAGf,mBAAI,CAAC8C,IAAI,CAAC9C,mBAAI,CAACC,OAAO,CAACX,QAAQ,CAAC,EAAEU,mBAAI,CAAC+C,QAAQ,CAACzD,QAAQ,EAAE2B,GAAG,CAAC,GAAG4B,QAAQ,CAAC;MAC5F;IACF;EACF;EAEA,IAAI;IACF,MAAM3C,GAAG,GAAGT,aAAa,CAACqB,SAAS,EAAEC,aAAa,EAAErB,IAAI,CAAC;IACzD,IAAIqB,aAAa,KAAKzB,QAAQ,EAAE;MAC9BnC,OAAO,CAACmD,KAAK,CAAChB,QAAQ,CAAC,GAAGY,GAAG;IAC/B;IACA,OAAOA,GAAG;EACZ,CAAC,CAAC,OAAOlB,KAAU,EAAE;IACnB;IACA;IACA,MAAM,IAAAgE,6BAAgB,EAAChC,UAAU,CAAC,IAAI,IAAAiC,0BAAa,EAAChE,IAAI,EAAEK,QAAQ,EAAEN,KAAK,CAAC,IAAIA,KAAK;EACrF;AACF;AAEA,eAAekE,eAAeA,CAAC5D,QAAgB,EAAE;EAC/C,IAAI;IACF,OAAOnC,OAAO,CAACmC,QAAQ,CAAC;EAC1B,CAAC,CAAC,MAAM;IACN,OAAO,MAAA6D,OAAA,CAAAC,OAAA,IACLpD,mBAAI,CAACqD,UAAU,CAAC/D,QAAQ,CAAC,GAAGgE,kBAAG,CAACC,aAAa,CAACjE,QAAQ,CAAC,CAACkE,QAAQ,CAAC,CAAC,GAAGlE,QAAQ,IAAAmE,IAAA,CAAAC,CAAA,IAAArG,uBAAA,CAAAF,OAAA,CAAAuG,CAAA,GAC9E;EACH;AACF;AAEA,eAAeC,UAAUA,CAACrE,QAAgB,EAAE;EAC1C,IAAI;IACF,OAAO,MAAM4D,eAAe,CAAC5D,QAAQ,CAAC;EACxC,CAAC,CAAC,OAAON,KAAU,EAAE;IACnB,IAAIA,KAAK,CAACC,IAAI,KAAK,4BAA4B,IAAID,KAAK,CAACC,IAAI,KAAK,kBAAkB,EAAE;MACpF,OAAO2E,cAAc,CAACtE,QAAQ,CAAC;IACjC,CAAC,MAAM;MACL,MAAMN,KAAK;IACb;EACF;AACF;;AAEA;AACA;AACA;AACA,SAAS4E,cAAcA,CAACtE,QAAgB,EAAE;EACxC,IAAI;IACF,IAAI,CAACE,oBAAoB,CAACF,QAAQ,CAAC,EAAE;MACnC,OAAOnC,OAAO,CAACmC,QAAQ,CAAC;IAC1B;EACF,CAAC,CAAC,OAAON,KAAU,EAAE;IACnB,IAAIA,KAAK,CAACC,IAAI,KAAK,kBAAkB,EAAE;MACrC,MAAMD,KAAK;IACb;IACA;IACA;IACA;EACF;EACA,MAAMC,IAAI,GAAG4E,iBAAE,CAACC,YAAY,CAACxE,QAAQ,EAAE,MAAM,CAAC;EAC9C,OAAOuB,UAAU,CAAC5B,IAAI,EAAEK,QAAQ,CAAC;AACnC","ignoreList":[]}
\ No newline at end of file
+{"version":3,"file":"load.js","names":["_nodeFs","data","_interopRequireDefault","require","nodeModule","_interopRequireWildcard","_nodePath","_nodeUrl","_codeframe","_transform","e","__esModule","default","_getRequireWildcardCache","WeakMap","r","t","has","get","n","__proto__","a","Object","defineProperty","getOwnPropertyDescriptor","u","hasOwnProperty","call","i","set","_ts","loadTypescript","undefined","error","code","parent","module","tsExtensionMapping","maybeReadFileSync","filename","fs","readFileSync","toFormat","isLegacy","endsWith","compileModule","opts","format","prependPaths","paths","nodeModulePaths","_nodeModulePaths","path","dirname","mod","assign","Module","_compile","loaded","cache","children","splice","indexOf","hasStripTypeScriptTypes","stripTypeScriptTypes","evalModule","inputCode","inputFilename","diagnostic","ts","ModuleKind","CommonJS","ESNext","Preserve","output","transpileModule","fileName","reportDiagnostics","compilerOptions","moduleResolution","ModuleResolutionKind","Bundler","verbatimModuleSyntax","target","ScriptTarget","newLine","NewLineKind","LineFeed","inlineSourceMap","esModuleInterop","outputText","diagnostics","length","mode","sourceMap","ext","extname","inputExt","join","basename","toCommonJS","exports","diagnosticError","formatDiagnostic","annotateError","requireOrImport","Promise","resolve","isAbsolute","url","pathToFileURL","toString","then","s","loadModule","loadModuleSync","isTypeScript"],"sources":["../src/load.ts"],"sourcesContent":["import fs from 'node:fs';\nimport * as nodeModule from 'node:module';\nimport path from 'node:path';\nimport url from 'node:url';\nimport type * as ts from 'typescript';\n\nimport { annotateError, formatDiagnostic } from './codeframe';\nimport { toCommonJS } from './transform';\n\ndeclare module 'node:module' {\n export function _nodeModulePaths(base: string): readonly string[];\n}\n\ndeclare global {\n namespace NodeJS {\n export interface Module {\n _compile(\n code: string,\n filename: string,\n format?: 'module' | 'commonjs' | 'commonjs-typescript' | 'module-typescript' | 'typescript'\n ): unknown;\n }\n }\n}\n\nlet _ts: typeof import('typescript') | null | undefined;\nfunction loadTypescript() {\n if (_ts === undefined) {\n try {\n _ts = require('typescript');\n } catch (error: any) {\n if (error.code !== 'MODULE_NOT_FOUND') {\n throw error;\n } else {\n _ts = null;\n }\n }\n }\n return _ts;\n}\n\nconst parent = module;\n\nconst tsExtensionMapping: Record = {\n '.ts': '.js',\n '.cts': '.cjs',\n '.mts': '.mjs',\n};\n\nfunction maybeReadFileSync(filename: string) {\n try {\n return fs.readFileSync(filename, 'utf8');\n } catch (error: any) {\n if (error.code === 'ENOENT') {\n return null;\n }\n throw error;\n }\n}\n\ntype Format = 'commonjs' | 'module' | 'module-typescript' | 'commonjs-typescript' | 'typescript';\n\nfunction toFormat(filename: string, isLegacy: true): Format;\nfunction toFormat(filename: string, isLegacy: false): Format | null;\nfunction toFormat(filename: string, isLegacy: boolean): Format | null {\n if (filename.endsWith('.cjs')) {\n return 'commonjs';\n } else if (filename.endsWith('.mjs')) {\n return 'module';\n } else if (filename.endsWith('.js')) {\n return isLegacy ? 'commonjs' : null;\n } else if (filename.endsWith('.mts')) {\n return 'module-typescript';\n } else if (filename.endsWith('.cts')) {\n return 'commonjs-typescript';\n } else if (filename.endsWith('.ts')) {\n return isLegacy ? 'commonjs-typescript' : 'typescript';\n } else {\n return null;\n }\n}\n\nexport interface ModuleOptions {\n paths?: string[];\n}\n\nfunction compileModule(code: string, filename: string, opts: ModuleOptions) {\n const format = toFormat(filename, false);\n const prependPaths = opts.paths ?? [];\n const nodeModulePaths = nodeModule._nodeModulePaths(path.dirname(filename));\n const paths = [...prependPaths, ...nodeModulePaths];\n try {\n const mod = Object.assign(new nodeModule.Module(filename, parent), { filename, paths });\n mod._compile(code, filename, format != null ? format : undefined);\n mod.loaded = true;\n require.cache[filename] = mod;\n parent?.children?.splice(parent.children.indexOf(mod), 1);\n return mod;\n } catch (error: any) {\n delete require.cache[filename];\n throw error;\n }\n}\n\nconst hasStripTypeScriptTypes = typeof nodeModule.stripTypeScriptTypes === 'function';\n\nfunction evalModule(\n code: string,\n filename: string,\n opts: ModuleOptions = {},\n format: Format = toFormat(filename, true)\n) {\n let inputCode = code;\n let inputFilename = filename;\n let diagnostic: ts.Diagnostic | undefined;\n if (\n format === 'typescript' ||\n format === 'module-typescript' ||\n format === 'commonjs-typescript'\n ) {\n const ts = loadTypescript();\n\n if (ts) {\n let module: ts.ModuleKind;\n if (format === 'commonjs-typescript') {\n module = ts.ModuleKind.CommonJS;\n } else if (format === 'module-typescript') {\n module = ts.ModuleKind.ESNext;\n } else {\n // NOTE(@kitten): We can \"preserve\" the output, meaning, it can either be ESM or CJS\n // and stop TypeScript from either transpiling it to CommonJS or adding an `export {}`\n // if no exports are used. This allows the user to choose if this file is CJS or ESM\n // (but not to mix both)\n module = ts.ModuleKind.Preserve;\n }\n const output = ts.transpileModule(code, {\n fileName: filename,\n reportDiagnostics: true,\n compilerOptions: {\n module,\n moduleResolution: ts.ModuleResolutionKind.Bundler,\n // `verbatimModuleSyntax` needs to be off, to erase as many imports as possible\n verbatimModuleSyntax: false,\n target: ts.ScriptTarget.ESNext,\n newLine: ts.NewLineKind.LineFeed,\n inlineSourceMap: true,\n esModuleInterop: true,\n },\n });\n inputCode = output?.outputText || inputCode;\n if (output?.diagnostics?.length) {\n diagnostic = output.diagnostics[0];\n }\n }\n\n if (hasStripTypeScriptTypes && inputCode === code) {\n // This may throw its own error, but this contains a code-frame already\n inputCode = nodeModule.stripTypeScriptTypes(code, {\n mode: 'transform',\n sourceMap: true,\n });\n }\n\n if (inputCode !== code) {\n const ext = path.extname(filename);\n const inputExt = tsExtensionMapping[ext] ?? ext;\n if (inputExt !== ext) {\n inputFilename = path.join(path.dirname(filename), path.basename(filename, ext) + inputExt);\n }\n }\n } else if (format === 'commonjs') {\n inputCode = toCommonJS(filename, code);\n }\n\n try {\n const mod = compileModule(inputCode, inputFilename, opts);\n if (inputFilename !== filename) {\n require.cache[filename] = mod;\n }\n return mod.exports;\n } catch (error: any) {\n // If we have a diagnostic from TypeScript, we issue its error with a codeframe first,\n // since it's likely more useful than the eval error\n const diagnosticError = formatDiagnostic(diagnostic);\n if (diagnosticError) {\n throw diagnosticError;\n }\n throw annotateError(code, filename, error) ?? error;\n }\n}\n\nasync function requireOrImport(filename: string) {\n try {\n return require(filename);\n } catch {\n return await import(\n path.isAbsolute(filename) ? url.pathToFileURL(filename).toString() : filename\n );\n }\n}\n\nasync function loadModule(filename: string) {\n try {\n return await requireOrImport(filename);\n } catch (error: any) {\n if (error.code === 'ERR_UNKNOWN_FILE_EXTENSION' || error.code === 'MODULE_NOT_FOUND') {\n return loadModuleSync(filename);\n } else {\n throw error;\n }\n }\n}\n\n/** Require module or evaluate with TypeScript\n * NOTE: Requiring ESM has been added in all LTS versions (Node 20.19+, 22.12+, 24).\n * This already forms the minimum required Node version as of Expo SDK 54 */\nfunction loadModuleSync(filename: string) {\n const format = toFormat(filename, true);\n const isTypeScript =\n format === 'module-typescript' || format === 'commonjs-typescript' || format === 'typescript';\n try {\n if (format !== 'module' && !isTypeScript) {\n return require(filename);\n }\n } catch (error: any) {\n if (error.code === 'MODULE_NOT_FOUND') {\n throw error;\n } else if (format == null) {\n const code = maybeReadFileSync(filename);\n throw annotateError(code, filename, error) || error;\n }\n // We fallback to always evaluating the entrypoint module\n // This is out of safety, since we're not trusting the requiring ESM feature\n // and evaluating the module manually bypasses the error when it's flagged off\n }\n\n // Load from cache manually, if `loaded` is set and exports are defined, to avoid\n // double transform or double evaluation\n if (require.cache[filename]?.exports && require.cache[filename].loaded) {\n return require.cache[filename].exports;\n }\n\n const code = fs.readFileSync(filename, 'utf8');\n return evalModule(code, filename, {}, format);\n}\n\nexport { evalModule, loadModule, loadModuleSync };\n"],"mappings":";;;;;;;;AAAA,SAAAA,QAAA;EAAA,MAAAC,IAAA,GAAAC,sBAAA,CAAAC,OAAA;EAAAH,OAAA,YAAAA,CAAA;IAAA,OAAAC,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AACA,SAAAG,WAAA;EAAA,MAAAH,IAAA,GAAAI,uBAAA,CAAAF,OAAA;EAAAC,UAAA,YAAAA,CAAA;IAAA,OAAAH,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AACA,SAAAK,UAAA;EAAA,MAAAL,IAAA,GAAAC,sBAAA,CAAAC,OAAA;EAAAG,SAAA,YAAAA,CAAA;IAAA,OAAAL,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AACA,SAAAM,SAAA;EAAA,MAAAN,IAAA,GAAAC,sBAAA,CAAAC,OAAA;EAAAI,QAAA,YAAAA,CAAA;IAAA,OAAAN,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAGA,SAAAO,WAAA;EAAA,MAAAP,IAAA,GAAAE,OAAA;EAAAK,UAAA,YAAAA,CAAA;IAAA,OAAAP,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AACA,SAAAQ,WAAA;EAAA,MAAAR,IAAA,GAAAE,OAAA;EAAAM,UAAA,YAAAA,CAAA;IAAA,OAAAR,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAAyC,SAAAC,uBAAAQ,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAAA,SAAAG,yBAAAH,CAAA,6BAAAI,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAD,wBAAA,YAAAA,CAAAH,CAAA,WAAAA,CAAA,GAAAM,CAAA,GAAAD,CAAA,KAAAL,CAAA;AAAA,SAAAL,wBAAAK,CAAA,EAAAK,CAAA,SAAAA,CAAA,IAAAL,CAAA,IAAAA,CAAA,CAAAC,UAAA,SAAAD,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAE,OAAA,EAAAF,CAAA,QAAAM,CAAA,GAAAH,wBAAA,CAAAE,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAC,GAAA,CAAAP,CAAA,UAAAM,CAAA,CAAAE,GAAA,CAAAR,CAAA,OAAAS,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,CAAA,IAAAf,CAAA,oBAAAe,CAAA,OAAAC,cAAA,CAAAC,IAAA,CAAAjB,CAAA,EAAAe,CAAA,SAAAG,CAAA,GAAAP,CAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAd,CAAA,EAAAe,CAAA,UAAAG,CAAA,KAAAA,CAAA,CAAAV,GAAA,IAAAU,CAAA,CAAAC,GAAA,IAAAP,MAAA,CAAAC,cAAA,CAAAJ,CAAA,EAAAM,CAAA,EAAAG,CAAA,IAAAT,CAAA,CAAAM,CAAA,IAAAf,CAAA,CAAAe,CAAA,YAAAN,CAAA,CAAAP,OAAA,GAAAF,CAAA,EAAAM,CAAA,IAAAA,CAAA,CAAAa,GAAA,CAAAnB,CAAA,EAAAS,CAAA,GAAAA,CAAA;AAkBzC,IAAIW,GAAmD;AACvD,SAASC,cAAcA,CAAA,EAAG;EACxB,IAAID,GAAG,KAAKE,SAAS,EAAE;IACrB,IAAI;MACFF,GAAG,GAAG3B,OAAO,CAAC,YAAY,CAAC;IAC7B,CAAC,CAAC,OAAO8B,KAAU,EAAE;MACnB,IAAIA,KAAK,CAACC,IAAI,KAAK,kBAAkB,EAAE;QACrC,MAAMD,KAAK;MACb,CAAC,MAAM;QACLH,GAAG,GAAG,IAAI;MACZ;IACF;EACF;EACA,OAAOA,GAAG;AACZ;AAEA,MAAMK,MAAM,GAAGC,MAAM;AAErB,MAAMC,kBAAsD,GAAG;EAC7D,KAAK,EAAE,KAAK;EACZ,MAAM,EAAE,MAAM;EACd,MAAM,EAAE;AACV,CAAC;AAED,SAASC,iBAAiBA,CAACC,QAAgB,EAAE;EAC3C,IAAI;IACF,OAAOC,iBAAE,CAACC,YAAY,CAACF,QAAQ,EAAE,MAAM,CAAC;EAC1C,CAAC,CAAC,OAAON,KAAU,EAAE;IACnB,IAAIA,KAAK,CAACC,IAAI,KAAK,QAAQ,EAAE;MAC3B,OAAO,IAAI;IACb;IACA,MAAMD,KAAK;EACb;AACF;AAMA,SAASS,QAAQA,CAACH,QAAgB,EAAEI,QAAiB,EAAiB;EACpE,IAAIJ,QAAQ,CAACK,QAAQ,CAAC,MAAM,CAAC,EAAE;IAC7B,OAAO,UAAU;EACnB,CAAC,MAAM,IAAIL,QAAQ,CAACK,QAAQ,CAAC,MAAM,CAAC,EAAE;IACpC,OAAO,QAAQ;EACjB,CAAC,MAAM,IAAIL,QAAQ,CAACK,QAAQ,CAAC,KAAK,CAAC,EAAE;IACnC,OAAOD,QAAQ,GAAG,UAAU,GAAG,IAAI;EACrC,CAAC,MAAM,IAAIJ,QAAQ,CAACK,QAAQ,CAAC,MAAM,CAAC,EAAE;IACpC,OAAO,mBAAmB;EAC5B,CAAC,MAAM,IAAIL,QAAQ,CAACK,QAAQ,CAAC,MAAM,CAAC,EAAE;IACpC,OAAO,qBAAqB;EAC9B,CAAC,MAAM,IAAIL,QAAQ,CAACK,QAAQ,CAAC,KAAK,CAAC,EAAE;IACnC,OAAOD,QAAQ,GAAG,qBAAqB,GAAG,YAAY;EACxD,CAAC,MAAM;IACL,OAAO,IAAI;EACb;AACF;AAMA,SAASE,aAAaA,CAACX,IAAY,EAAEK,QAAgB,EAAEO,IAAmB,EAAE;EAC1E,MAAMC,MAAM,GAAGL,QAAQ,CAACH,QAAQ,EAAE,KAAK,CAAC;EACxC,MAAMS,YAAY,GAAGF,IAAI,CAACG,KAAK,IAAI,EAAE;EACrC,MAAMC,eAAe,GAAG9C,UAAU,CAAD,CAAC,CAAC+C,gBAAgB,CAACC,mBAAI,CAACC,OAAO,CAACd,QAAQ,CAAC,CAAC;EAC3E,MAAMU,KAAK,GAAG,CAAC,GAAGD,YAAY,EAAE,GAAGE,eAAe,CAAC;EACnD,IAAI;IACF,MAAMI,GAAG,GAAGhC,MAAM,CAACiC,MAAM,CAAC,KAAInD,UAAU,CAAD,CAAC,CAACoD,MAAM,EAACjB,QAAQ,EAAEJ,MAAM,CAAC,EAAE;MAAEI,QAAQ;MAAEU;IAAM,CAAC,CAAC;IACvFK,GAAG,CAACG,QAAQ,CAACvB,IAAI,EAAEK,QAAQ,EAAEQ,MAAM,IAAI,IAAI,GAAGA,MAAM,GAAGf,SAAS,CAAC;IACjEsB,GAAG,CAACI,MAAM,GAAG,IAAI;IACjBvD,OAAO,CAACwD,KAAK,CAACpB,QAAQ,CAAC,GAAGe,GAAG;IAC7BnB,MAAM,EAAEyB,QAAQ,EAAEC,MAAM,CAAC1B,MAAM,CAACyB,QAAQ,CAACE,OAAO,CAACR,GAAG,CAAC,EAAE,CAAC,CAAC;IACzD,OAAOA,GAAG;EACZ,CAAC,CAAC,OAAOrB,KAAU,EAAE;IACnB,OAAO9B,OAAO,CAACwD,KAAK,CAACpB,QAAQ,CAAC;IAC9B,MAAMN,KAAK;EACb;AACF;AAEA,MAAM8B,uBAAuB,GAAG,OAAO3D,UAAU,CAAD,CAAC,CAAC4D,oBAAoB,KAAK,UAAU;AAErF,SAASC,UAAUA,CACjB/B,IAAY,EACZK,QAAgB,EAChBO,IAAmB,GAAG,CAAC,CAAC,EACxBC,MAAc,GAAGL,QAAQ,CAACH,QAAQ,EAAE,IAAI,CAAC,EACzC;EACA,IAAI2B,SAAS,GAAGhC,IAAI;EACpB,IAAIiC,aAAa,GAAG5B,QAAQ;EAC5B,IAAI6B,UAAqC;EACzC,IACErB,MAAM,KAAK,YAAY,IACvBA,MAAM,KAAK,mBAAmB,IAC9BA,MAAM,KAAK,qBAAqB,EAChC;IACA,MAAMsB,EAAE,GAAGtC,cAAc,CAAC,CAAC;IAE3B,IAAIsC,EAAE,EAAE;MACN,IAAIjC,MAAqB;MACzB,IAAIW,MAAM,KAAK,qBAAqB,EAAE;QACpCX,MAAM,GAAGiC,EAAE,CAACC,UAAU,CAACC,QAAQ;MACjC,CAAC,MAAM,IAAIxB,MAAM,KAAK,mBAAmB,EAAE;QACzCX,MAAM,GAAGiC,EAAE,CAACC,UAAU,CAACE,MAAM;MAC/B,CAAC,MAAM;QACL;QACA;QACA;QACA;QACApC,MAAM,GAAGiC,EAAE,CAACC,UAAU,CAACG,QAAQ;MACjC;MACA,MAAMC,MAAM,GAAGL,EAAE,CAACM,eAAe,CAACzC,IAAI,EAAE;QACtC0C,QAAQ,EAAErC,QAAQ;QAClBsC,iBAAiB,EAAE,IAAI;QACvBC,eAAe,EAAE;UACf1C,MAAM;UACN2C,gBAAgB,EAAEV,EAAE,CAACW,oBAAoB,CAACC,OAAO;UACjD;UACAC,oBAAoB,EAAE,KAAK;UAC3BC,MAAM,EAAEd,EAAE,CAACe,YAAY,CAACZ,MAAM;UAC9Ba,OAAO,EAAEhB,EAAE,CAACiB,WAAW,CAACC,QAAQ;UAChCC,eAAe,EAAE,IAAI;UACrBC,eAAe,EAAE;QACnB;MACF,CAAC,CAAC;MACFvB,SAAS,GAAGQ,MAAM,EAAEgB,UAAU,IAAIxB,SAAS;MAC3C,IAAIQ,MAAM,EAAEiB,WAAW,EAAEC,MAAM,EAAE;QAC/BxB,UAAU,GAAGM,MAAM,CAACiB,WAAW,CAAC,CAAC,CAAC;MACpC;IACF;IAEA,IAAI5B,uBAAuB,IAAIG,SAAS,KAAKhC,IAAI,EAAE;MACjD;MACAgC,SAAS,GAAG9D,UAAU,CAAD,CAAC,CAAC4D,oBAAoB,CAAC9B,IAAI,EAAE;QAChD2D,IAAI,EAAE,WAAW;QACjBC,SAAS,EAAE;MACb,CAAC,CAAC;IACJ;IAEA,IAAI5B,SAAS,KAAKhC,IAAI,EAAE;MACtB,MAAM6D,GAAG,GAAG3C,mBAAI,CAAC4C,OAAO,CAACzD,QAAQ,CAAC;MAClC,MAAM0D,QAAQ,GAAG5D,kBAAkB,CAAC0D,GAAG,CAAC,IAAIA,GAAG;MAC/C,IAAIE,QAAQ,KAAKF,GAAG,EAAE;QACpB5B,aAAa,GAAGf,mBAAI,CAAC8C,IAAI,CAAC9C,mBAAI,CAACC,OAAO,CAACd,QAAQ,CAAC,EAAEa,mBAAI,CAAC+C,QAAQ,CAAC5D,QAAQ,EAAEwD,GAAG,CAAC,GAAGE,QAAQ,CAAC;MAC5F;IACF;EACF,CAAC,MAAM,IAAIlD,MAAM,KAAK,UAAU,EAAE;IAChCmB,SAAS,GAAG,IAAAkC,uBAAU,EAAC7D,QAAQ,EAAEL,IAAI,CAAC;EACxC;EAEA,IAAI;IACF,MAAMoB,GAAG,GAAGT,aAAa,CAACqB,SAAS,EAAEC,aAAa,EAAErB,IAAI,CAAC;IACzD,IAAIqB,aAAa,KAAK5B,QAAQ,EAAE;MAC9BpC,OAAO,CAACwD,KAAK,CAACpB,QAAQ,CAAC,GAAGe,GAAG;IAC/B;IACA,OAAOA,GAAG,CAAC+C,OAAO;EACpB,CAAC,CAAC,OAAOpE,KAAU,EAAE;IACnB;IACA;IACA,MAAMqE,eAAe,GAAG,IAAAC,6BAAgB,EAACnC,UAAU,CAAC;IACpD,IAAIkC,eAAe,EAAE;MACnB,MAAMA,eAAe;IACvB;IACA,MAAM,IAAAE,0BAAa,EAACtE,IAAI,EAAEK,QAAQ,EAAEN,KAAK,CAAC,IAAIA,KAAK;EACrD;AACF;AAEA,eAAewE,eAAeA,CAAClE,QAAgB,EAAE;EAC/C,IAAI;IACF,OAAOpC,OAAO,CAACoC,QAAQ,CAAC;EAC1B,CAAC,CAAC,MAAM;IACN,OAAO,MAAAmE,OAAA,CAAAC,OAAA,IACLvD,mBAAI,CAACwD,UAAU,CAACrE,QAAQ,CAAC,GAAGsE,kBAAG,CAACC,aAAa,CAACvE,QAAQ,CAAC,CAACwE,QAAQ,CAAC,CAAC,GAAGxE,QAAQ,IAAAyE,IAAA,CAAAC,CAAA,IAAA5G,uBAAA,CAAAF,OAAA,CAAA8G,CAAA,GAC9E;EACH;AACF;AAEA,eAAeC,UAAUA,CAAC3E,QAAgB,EAAE;EAC1C,IAAI;IACF,OAAO,MAAMkE,eAAe,CAAClE,QAAQ,CAAC;EACxC,CAAC,CAAC,OAAON,KAAU,EAAE;IACnB,IAAIA,KAAK,CAACC,IAAI,KAAK,4BAA4B,IAAID,KAAK,CAACC,IAAI,KAAK,kBAAkB,EAAE;MACpF,OAAOiF,cAAc,CAAC5E,QAAQ,CAAC;IACjC,CAAC,MAAM;MACL,MAAMN,KAAK;IACb;EACF;AACF;;AAEA;AACA;AACA;AACA,SAASkF,cAAcA,CAAC5E,QAAgB,EAAE;EACxC,MAAMQ,MAAM,GAAGL,QAAQ,CAACH,QAAQ,EAAE,IAAI,CAAC;EACvC,MAAM6E,YAAY,GAChBrE,MAAM,KAAK,mBAAmB,IAAIA,MAAM,KAAK,qBAAqB,IAAIA,MAAM,KAAK,YAAY;EAC/F,IAAI;IACF,IAAIA,MAAM,KAAK,QAAQ,IAAI,CAACqE,YAAY,EAAE;MACxC,OAAOjH,OAAO,CAACoC,QAAQ,CAAC;IAC1B;EACF,CAAC,CAAC,OAAON,KAAU,EAAE;IACnB,IAAIA,KAAK,CAACC,IAAI,KAAK,kBAAkB,EAAE;MACrC,MAAMD,KAAK;IACb,CAAC,MAAM,IAAIc,MAAM,IAAI,IAAI,EAAE;MACzB,MAAMb,IAAI,GAAGI,iBAAiB,CAACC,QAAQ,CAAC;MACxC,MAAM,IAAAiE,0BAAa,EAACtE,IAAI,EAAEK,QAAQ,EAAEN,KAAK,CAAC,IAAIA,KAAK;IACrD;IACA;IACA;IACA;EACF;;EAEA;EACA;EACA,IAAI9B,OAAO,CAACwD,KAAK,CAACpB,QAAQ,CAAC,EAAE8D,OAAO,IAAIlG,OAAO,CAACwD,KAAK,CAACpB,QAAQ,CAAC,CAACmB,MAAM,EAAE;IACtE,OAAOvD,OAAO,CAACwD,KAAK,CAACpB,QAAQ,CAAC,CAAC8D,OAAO;EACxC;EAEA,MAAMnE,IAAI,GAAGM,iBAAE,CAACC,YAAY,CAACF,QAAQ,EAAE,MAAM,CAAC;EAC9C,OAAO0B,UAAU,CAAC/B,IAAI,EAAEK,QAAQ,EAAE,CAAC,CAAC,EAAEQ,MAAM,CAAC;AAC/C","ignoreList":[]}
\ No newline at end of file
diff --git a/packages/@expo/require-utils/build/transform.d.ts b/packages/@expo/require-utils/build/transform.d.ts
new file mode 100644
index 00000000000000..9f8d58dfe0b83f
--- /dev/null
+++ b/packages/@expo/require-utils/build/transform.d.ts
@@ -0,0 +1 @@
+export declare function toCommonJS(filename: string, code: string): string;
diff --git a/packages/@expo/require-utils/build/transform.js b/packages/@expo/require-utils/build/transform.js
new file mode 100644
index 00000000000000..6a07590e813541
--- /dev/null
+++ b/packages/@expo/require-utils/build/transform.js
@@ -0,0 +1,31 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.toCommonJS = toCommonJS;
+function _core() {
+ const data = require("@babel/core");
+ _core = function () {
+ return data;
+ };
+ return data;
+}
+function toCommonJS(filename, code) {
+ const result = (0, _core().transformSync)(code, {
+ filename,
+ babelrc: false,
+ plugins: [[require('@babel/plugin-transform-modules-commonjs'), {
+ // NOTE(@kitten): We used to use sucrase to transform, which is why
+ // we're doing this CJS-to-ESM transform in the first place. Our
+ // previous transformation isn't 100% compatible with the standard
+ // Node ESM loading. In Babel, this is the "node" flag (although
+ // node behaviour is explicitly different from this). This skips
+ // the `__esModule -> default` wrapper
+ importInterop: 'node',
+ loose: true
+ }]]
+ });
+ return result?.code ?? code;
+}
+//# sourceMappingURL=transform.js.map
\ No newline at end of file
diff --git a/packages/@expo/require-utils/build/transform.js.map b/packages/@expo/require-utils/build/transform.js.map
new file mode 100644
index 00000000000000..bc08c9100a7df9
--- /dev/null
+++ b/packages/@expo/require-utils/build/transform.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"transform.js","names":["_core","data","require","toCommonJS","filename","code","result","transformSync","babelrc","plugins","importInterop","loose"],"sources":["../src/transform.ts"],"sourcesContent":["import { transformSync } from '@babel/core';\n\nexport function toCommonJS(filename: string, code: string) {\n const result = transformSync(code, {\n filename,\n babelrc: false,\n plugins: [\n [\n require('@babel/plugin-transform-modules-commonjs'),\n {\n // NOTE(@kitten): We used to use sucrase to transform, which is why\n // we're doing this CJS-to-ESM transform in the first place. Our\n // previous transformation isn't 100% compatible with the standard\n // Node ESM loading. In Babel, this is the \"node\" flag (although\n // node behaviour is explicitly different from this). This skips\n // the `__esModule -> default` wrapper\n importInterop: 'node',\n loose: true,\n },\n ],\n ],\n });\n return result?.code ?? code;\n}\n"],"mappings":";;;;;;AAAA,SAAAA,MAAA;EAAA,MAAAC,IAAA,GAAAC,OAAA;EAAAF,KAAA,YAAAA,CAAA;IAAA,OAAAC,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAEO,SAASE,UAAUA,CAACC,QAAgB,EAAEC,IAAY,EAAE;EACzD,MAAMC,MAAM,GAAG,IAAAC,qBAAa,EAACF,IAAI,EAAE;IACjCD,QAAQ;IACRI,OAAO,EAAE,KAAK;IACdC,OAAO,EAAE,CACP,CACEP,OAAO,CAAC,0CAA0C,CAAC,EACnD;MACE;MACA;MACA;MACA;MACA;MACA;MACAQ,aAAa,EAAE,MAAM;MACrBC,KAAK,EAAE;IACT,CAAC,CACF;EAEL,CAAC,CAAC;EACF,OAAOL,MAAM,EAAED,IAAI,IAAIA,IAAI;AAC7B","ignoreList":[]}
\ No newline at end of file
diff --git a/packages/@expo/require-utils/package.json b/packages/@expo/require-utils/package.json
index 08db77c1de46a4..e4b686d743dfe8 100644
--- a/packages/@expo/require-utils/package.json
+++ b/packages/@expo/require-utils/package.json
@@ -36,11 +36,14 @@
}
},
"dependencies": {
- "@babel/code-frame": "^7.20.0"
+ "@babel/code-frame": "^7.20.0",
+ "@babel/core": "^7.25.2",
+ "@babel/plugin-transform-modules-commonjs": "^7.24.8"
},
"devDependencies": {
"@types/node": "^22.14.0",
"expo-module-scripts": "^55.0.2",
+ "memfs": "^3.2.0",
"typescript": "^5.9.2"
},
"publishConfig": {
diff --git a/packages/@expo/require-utils/src/__tests__/fixtures/example.js b/packages/@expo/require-utils/src/__tests__/fixtures/example.js
new file mode 100644
index 00000000000000..7e724a07c031a1
--- /dev/null
+++ b/packages/@expo/require-utils/src/__tests__/fixtures/example.js
@@ -0,0 +1 @@
+module.exports.test = 'test';
diff --git a/packages/@expo/require-utils/src/__tests__/load-test.ts b/packages/@expo/require-utils/src/__tests__/load-test.ts
new file mode 100644
index 00000000000000..89bf90d5c8f16b
--- /dev/null
+++ b/packages/@expo/require-utils/src/__tests__/load-test.ts
@@ -0,0 +1,79 @@
+import * as path from 'path';
+
+import { evalModule } from '../load';
+
+const basepath = path.join(__dirname, 'fixtures');
+
+describe('evalModule', () => {
+ it('accepts .js code and turns it to CommonJS with default imports', () => {
+ const mod = evalModule(
+ `
+ import mjs from './example.js';
+ const cjs = require('./example.js');
+ export default {
+ mjs,
+ cjs,
+ }
+ `,
+ path.join(basepath, 'eval.js')
+ );
+
+ expect(mod).toEqual({
+ __esModule: true,
+ default: {
+ mjs: { test: 'test' },
+ cjs: { test: 'test' },
+ },
+ });
+ });
+
+ it('accepts .js code and turns it to CommonJS with named imports', () => {
+ const mod = evalModule(
+ `
+ import { test } from './example.js';
+ export default test
+ `,
+ path.join(basepath, 'eval.js')
+ );
+
+ expect(mod).toEqual({
+ __esModule: true,
+ default: 'test',
+ });
+ });
+
+ it('accepts .ts code and turns it to CommonJS with default imports', () => {
+ const mod = evalModule(
+ `
+ import mjs from './example.js';
+ const cjs = require('./example.js');
+ export default {
+ mjs,
+ cjs,
+ } as any
+ `,
+ path.join(basepath, 'eval.ts')
+ );
+
+ expect(mod).toEqual({
+ default: {
+ mjs: { test: 'test' },
+ cjs: { test: 'test' },
+ },
+ });
+ });
+
+ it('accepts .ts code and turns it to CommonJS with named imports', () => {
+ const mod = evalModule(
+ `
+ import { test } from './example.js';
+ export default (test as any)
+ `,
+ path.join(basepath, 'eval.ts')
+ );
+
+ expect(mod).toEqual({
+ default: 'test',
+ });
+ });
+});
diff --git a/packages/@expo/require-utils/src/codeframe.ts b/packages/@expo/require-utils/src/codeframe.ts
index 780c8c5e82b588..3ff70adbb61214 100644
--- a/packages/@expo/require-utils/src/codeframe.ts
+++ b/packages/@expo/require-utils/src/codeframe.ts
@@ -35,19 +35,21 @@ export function formatDiagnostic(diagnostic: Diagnostic | undefined) {
return null;
}
-export function annotateError(code: string, filename: string, error: Error) {
+export function annotateError(code: string | null, filename: string, error: Error) {
if (typeof error !== 'object' || error == null) {
return null;
}
- const loc = errorToLoc(filename, error);
- if (loc) {
- const { codeFrameColumns }: typeof import('@babel/code-frame') = require('@babel/code-frame');
- const codeFrame = codeFrameColumns(code, { start: loc }, { highlightCode: true });
- const annotatedError = error as Error & { codeFrame: string };
- annotatedError.codeFrame = codeFrame;
- annotatedError.message += `\n${codeFrame}`;
- delete annotatedError.stack;
- return annotatedError;
+ if (code) {
+ const loc = errorToLoc(filename, error);
+ if (loc) {
+ const { codeFrameColumns }: typeof import('@babel/code-frame') = require('@babel/code-frame');
+ const codeFrame = codeFrameColumns(code, { start: loc }, { highlightCode: true });
+ const annotatedError = error as Error & { codeFrame: string };
+ annotatedError.codeFrame = codeFrame;
+ annotatedError.message += `\n${codeFrame}`;
+ delete annotatedError.stack;
+ return annotatedError;
+ }
}
return null;
}
diff --git a/packages/@expo/require-utils/src/load.ts b/packages/@expo/require-utils/src/load.ts
index 30382356075432..c73104f229cacc 100644
--- a/packages/@expo/require-utils/src/load.ts
+++ b/packages/@expo/require-utils/src/load.ts
@@ -5,6 +5,7 @@ import url from 'node:url';
import type * as ts from 'typescript';
import { annotateError, formatDiagnostic } from './codeframe';
+import { toCommonJS } from './transform';
declare module 'node:module' {
export function _nodeModulePaths(base: string): readonly string[];
@@ -46,32 +47,36 @@ const tsExtensionMapping: Record = {
'.mts': '.mjs',
};
-function toFormat(filename: string) {
+function maybeReadFileSync(filename: string) {
+ try {
+ return fs.readFileSync(filename, 'utf8');
+ } catch (error: any) {
+ if (error.code === 'ENOENT') {
+ return null;
+ }
+ throw error;
+ }
+}
+
+type Format = 'commonjs' | 'module' | 'module-typescript' | 'commonjs-typescript' | 'typescript';
+
+function toFormat(filename: string, isLegacy: true): Format;
+function toFormat(filename: string, isLegacy: false): Format | null;
+function toFormat(filename: string, isLegacy: boolean): Format | null {
if (filename.endsWith('.cjs')) {
return 'commonjs';
} else if (filename.endsWith('.mjs')) {
return 'module';
} else if (filename.endsWith('.js')) {
- return undefined;
+ return isLegacy ? 'commonjs' : null;
} else if (filename.endsWith('.mts')) {
return 'module-typescript';
} else if (filename.endsWith('.cts')) {
return 'commonjs-typescript';
} else if (filename.endsWith('.ts')) {
- return 'typescript';
+ return isLegacy ? 'commonjs-typescript' : 'typescript';
} else {
- return undefined;
- }
-}
-
-function isTypescriptFilename(filename: string) {
- switch (toFormat(filename)) {
- case 'module-typescript':
- case 'commonjs-typescript':
- case 'typescript':
- return true;
- default:
- return false;
+ return null;
}
}
@@ -79,17 +84,18 @@ export interface ModuleOptions {
paths?: string[];
}
-function compileModule(code: string, filename: string, opts: ModuleOptions): T {
- const format = toFormat(filename);
+function compileModule(code: string, filename: string, opts: ModuleOptions) {
+ const format = toFormat(filename, false);
const prependPaths = opts.paths ?? [];
const nodeModulePaths = nodeModule._nodeModulePaths(path.dirname(filename));
const paths = [...prependPaths, ...nodeModulePaths];
try {
const mod = Object.assign(new nodeModule.Module(filename, parent), { filename, paths });
- mod._compile(code, filename, format);
+ mod._compile(code, filename, format != null ? format : undefined);
+ mod.loaded = true;
require.cache[filename] = mod;
parent?.children?.splice(parent.children.indexOf(mod), 1);
- return mod.exports;
+ return mod;
} catch (error: any) {
delete require.cache[filename];
throw error;
@@ -98,19 +104,27 @@ function compileModule(code: string, filename: string, opts: ModuleOpti
const hasStripTypeScriptTypes = typeof nodeModule.stripTypeScriptTypes === 'function';
-function evalModule(code: string, filename: string, opts: ModuleOptions = {}) {
+function evalModule(
+ code: string,
+ filename: string,
+ opts: ModuleOptions = {},
+ format: Format = toFormat(filename, true)
+) {
let inputCode = code;
let inputFilename = filename;
let diagnostic: ts.Diagnostic | undefined;
- if (filename.endsWith('.ts') || filename.endsWith('.cts') || filename.endsWith('.mts')) {
- const ext = path.extname(filename);
+ if (
+ format === 'typescript' ||
+ format === 'module-typescript' ||
+ format === 'commonjs-typescript'
+ ) {
const ts = loadTypescript();
if (ts) {
let module: ts.ModuleKind;
- if (ext === '.cts') {
+ if (format === 'commonjs-typescript') {
module = ts.ModuleKind.CommonJS;
- } else if (ext === '.mts') {
+ } else if (format === 'module-typescript') {
module = ts.ModuleKind.ESNext;
} else {
// NOTE(@kitten): We can "preserve" the output, meaning, it can either be ESM or CJS
@@ -148,11 +162,14 @@ function evalModule(code: string, filename: string, opts: ModuleOptions = {}) {
}
if (inputCode !== code) {
+ const ext = path.extname(filename);
const inputExt = tsExtensionMapping[ext] ?? ext;
if (inputExt !== ext) {
inputFilename = path.join(path.dirname(filename), path.basename(filename, ext) + inputExt);
}
}
+ } else if (format === 'commonjs') {
+ inputCode = toCommonJS(filename, code);
}
try {
@@ -160,11 +177,15 @@ function evalModule(code: string, filename: string, opts: ModuleOptions = {}) {
if (inputFilename !== filename) {
require.cache[filename] = mod;
}
- return mod;
+ return mod.exports;
} catch (error: any) {
// If we have a diagnostic from TypeScript, we issue its error with a codeframe first,
// since it's likely more useful than the eval error
- throw formatDiagnostic(diagnostic) ?? annotateError(code, filename, error) ?? error;
+ const diagnosticError = formatDiagnostic(diagnostic);
+ if (diagnosticError) {
+ throw diagnosticError;
+ }
+ throw annotateError(code, filename, error) ?? error;
}
}
@@ -194,20 +215,33 @@ async function loadModule(filename: string) {
* NOTE: Requiring ESM has been added in all LTS versions (Node 20.19+, 22.12+, 24).
* This already forms the minimum required Node version as of Expo SDK 54 */
function loadModuleSync(filename: string) {
+ const format = toFormat(filename, true);
+ const isTypeScript =
+ format === 'module-typescript' || format === 'commonjs-typescript' || format === 'typescript';
try {
- if (!isTypescriptFilename(filename)) {
+ if (format !== 'module' && !isTypeScript) {
return require(filename);
}
} catch (error: any) {
if (error.code === 'MODULE_NOT_FOUND') {
throw error;
+ } else if (format == null) {
+ const code = maybeReadFileSync(filename);
+ throw annotateError(code, filename, error) || error;
}
// We fallback to always evaluating the entrypoint module
// This is out of safety, since we're not trusting the requiring ESM feature
// and evaluating the module manually bypasses the error when it's flagged off
}
+
+ // Load from cache manually, if `loaded` is set and exports are defined, to avoid
+ // double transform or double evaluation
+ if (require.cache[filename]?.exports && require.cache[filename].loaded) {
+ return require.cache[filename].exports;
+ }
+
const code = fs.readFileSync(filename, 'utf8');
- return evalModule(code, filename);
+ return evalModule(code, filename, {}, format);
}
export { evalModule, loadModule, loadModuleSync };
diff --git a/packages/@expo/require-utils/src/transform.ts b/packages/@expo/require-utils/src/transform.ts
new file mode 100644
index 00000000000000..d51e218e88d447
--- /dev/null
+++ b/packages/@expo/require-utils/src/transform.ts
@@ -0,0 +1,24 @@
+import { transformSync } from '@babel/core';
+
+export function toCommonJS(filename: string, code: string) {
+ const result = transformSync(code, {
+ filename,
+ babelrc: false,
+ plugins: [
+ [
+ require('@babel/plugin-transform-modules-commonjs'),
+ {
+ // NOTE(@kitten): We used to use sucrase to transform, which is why
+ // we're doing this CJS-to-ESM transform in the first place. Our
+ // previous transformation isn't 100% compatible with the standard
+ // Node ESM loading. In Babel, this is the "node" flag (although
+ // node behaviour is explicitly different from this). This skips
+ // the `__esModule -> default` wrapper
+ importInterop: 'node',
+ loose: true,
+ },
+ ],
+ ],
+ });
+ return result?.code ?? code;
+}
From 39e9821f93de138d137877d1c7cd137ecbf502dd Mon Sep 17 00:00:00 2001
From: Phil Pluckthun
Date: Tue, 24 Feb 2026 18:46:45 +0000
Subject: [PATCH 06/10] feat(cli): Expand debug events (#43247)
# Why
Follows-up on: #43013
# How
- Add event for env and nodeEnv
- Add event for start implementation (Metro)
- Add event for config/instantiation (Metro)
- Reduce output further when logs+headless are enabled
# Test Plan
- quick tests with `LOG_EVENTS=1 EXPO_UNSTABLE_HEADLESS=1 expo start`
# Checklist
- [x] I added a `changelog.md` entry and rebuilt the package sources
according to [this short
guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting)
- [ ] This diff will work correctly for `npx expo prebuild` & EAS Build
(eg: updated a module plugin).
- [ ] Conforms with the [Documentation Writing Style
Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md)
---
packages/@expo/cli/CHANGELOG.md | 1 +
packages/@expo/cli/e2e/__tests__/run-test.ts | 2 +
packages/@expo/cli/src/events/builder.ts | 3 +
packages/@expo/cli/src/events/index.ts | 13 ++++
packages/@expo/cli/src/events/types.ts | 13 +++-
.../server/metro/MetroBundlerDevServer.ts | 30 ++++++++
.../server/metro/MetroTerminalReporter.ts | 4 +-
.../start/server/metro/instantiateMetro.ts | 69 ++++++++++++++++---
.../src/start/server/metro/runServer-fork.ts | 21 +++---
.../server/metro/withMetroMultiPlatform.ts | 2 +-
packages/@expo/cli/src/start/startAsync.ts | 7 +-
packages/@expo/cli/src/utils/interactive.ts | 3 +-
packages/@expo/cli/src/utils/nodeEnv.ts | 42 ++++++++++-
13 files changed, 183 insertions(+), 27 deletions(-)
diff --git a/packages/@expo/cli/CHANGELOG.md b/packages/@expo/cli/CHANGELOG.md
index 5d54e9b1aef7ea..1bfd52476ab709 100644
--- a/packages/@expo/cli/CHANGELOG.md
+++ b/packages/@expo/cli/CHANGELOG.md
@@ -13,6 +13,7 @@
### 💡 Others
- Replace `minimatch` with `picomatch` and update ([#43323](https://github.com/expo/expo/pull/43323) by [@kitten](https://github.com/kitten))
+- Expand logging events ([#43247](https://github.com/expo/expo/pull/43247) by [@kitten](https://github.com/kitten))
## 55.0.10 — 2026-02-20
diff --git a/packages/@expo/cli/e2e/__tests__/run-test.ts b/packages/@expo/cli/e2e/__tests__/run-test.ts
index 5a030f6d098341..d9d6f772657a5b 100644
--- a/packages/@expo/cli/e2e/__tests__/run-test.ts
+++ b/packages/@expo/cli/e2e/__tests__/run-test.ts
@@ -21,6 +21,8 @@ afterAll(() => {
it('loads expected modules by default', async () => {
const modules = await getLoadedModulesAsync(`require('../../build/src/run').expoRun`);
expect(modules).toStrictEqual([
+ '@expo/cli/build/src/events/index.js',
+ '@expo/cli/build/src/events/stream.js',
'@expo/cli/build/src/log.js',
'@expo/cli/build/src/run/hints.js',
'@expo/cli/build/src/run/index.js',
diff --git a/packages/@expo/cli/src/events/builder.ts b/packages/@expo/cli/src/events/builder.ts
index 3afb672f692509..900324b6d47dc6 100644
--- a/packages/@expo/cli/src/events/builder.ts
+++ b/packages/@expo/cli/src/events/builder.ts
@@ -58,6 +58,9 @@ interface EventLoggerType {
export interface EventLogger extends EventLoggerType {
(event: EventName, data: Events[EventName]): void;
+
+ path(target: string): string;
+ path(target: string | null | undefined): string | null;
}
export interface EventBuilder {
diff --git a/packages/@expo/cli/src/events/index.ts b/packages/@expo/cli/src/events/index.ts
index bfbde7dab0b6b5..1bb115bf4b01d3 100644
--- a/packages/@expo/cli/src/events/index.ts
+++ b/packages/@expo/cli/src/events/index.ts
@@ -11,6 +11,7 @@ interface InitMetadata {
version: string;
}
+let logPath = process.cwd();
let logStream: LogStream | undefined;
function parseLogTarget(env: string | undefined) {
@@ -23,6 +24,7 @@ function parseLogTarget(env: string | undefined) {
try {
const parsedPath = path.parse(env);
logDestination = path.format(parsedPath);
+ logPath = parsedPath.dir;
} catch {
logDestination = undefined;
}
@@ -110,6 +112,17 @@ export const events: EventLoggerBuilder = ((
}
}
log.category = category;
+
+ log.path = function relativePath(target: string | undefined | null): string | null {
+ try {
+ return target != null && path.isAbsolute(target)
+ ? path.relative(logPath, target).replace(/\\/, '/') || '.'
+ : (target ?? null);
+ } catch {
+ return target || null;
+ }
+ };
+
return log;
}) as EventLoggerBuilder;
diff --git a/packages/@expo/cli/src/events/types.ts b/packages/@expo/cli/src/events/types.ts
index 224a912bc023a7..3fd3378c011e2e 100644
--- a/packages/@expo/cli/src/events/types.ts
+++ b/packages/@expo/cli/src/events/types.ts
@@ -1,10 +1,21 @@
import type { rootEvent } from './index';
import type { collectEventLoggers } from '../events/builder';
+import type { event as metroBundlerDevServerEvent } from '../start/server/metro/MetroBundlerDevServer';
import type { event as metroTerminalReporterEvent } from '../start/server/metro/MetroTerminalReporter';
+import type { event as instantiateMetroEvent } from '../start/server/metro/instantiateMetro';
+import type { event as nodeEnvEvent } from '../utils/nodeEnv';
/** Collection of all event logger events
* @privateRemarks
* When creating a new logger with `events()`, import it here and
* add it to add its types to this union type.
*/
-export type Events = collectEventLoggers<[typeof rootEvent, typeof metroTerminalReporterEvent]>;
+export type Events = collectEventLoggers<
+ [
+ typeof rootEvent,
+ typeof metroBundlerDevServerEvent,
+ typeof metroTerminalReporterEvent,
+ typeof instantiateMetroEvent,
+ typeof nodeEnvEvent,
+ ]
+>;
diff --git a/packages/@expo/cli/src/start/server/metro/MetroBundlerDevServer.ts b/packages/@expo/cli/src/start/server/metro/MetroBundlerDevServer.ts
index 0e2e6e4028e041..1ecc08377fe287 100644
--- a/packages/@expo/cli/src/start/server/metro/MetroBundlerDevServer.ts
+++ b/packages/@expo/cli/src/start/server/metro/MetroBundlerDevServer.ts
@@ -61,6 +61,7 @@ import {
} from './router';
import { serializeHtmlWithAssets } from './serializeHtml';
import { observeAnyFileChanges, observeFileChanges } from './waitForMetroToObserveTypeScriptFile';
+import { events } from '../../../events';
import type {
BundleAssetWithFileHashes,
ExportAssetDescriptor,
@@ -153,6 +154,22 @@ const EXPO_GO_METRO_PORT = 8081;
/** Default port to use for apps that run in standard React Native projects or Expo Dev Clients. */
const DEV_CLIENT_METRO_PORT = 8081;
+// prettier-ignore
+export const event = events('devserver', (t) => [
+ t.event<'start', {
+ mode: 'production' | 'development';
+ web: boolean;
+ baseUrl: string;
+ asyncRoutes: boolean;
+ routerRoot: string;
+ serverComponents: boolean;
+ serverActions: boolean;
+ serverRendering: boolean;
+ apiRoutes: boolean;
+ exporting: boolean;
+ }>(),
+]);
+
export class MetroBundlerDevServer extends BundlerDevServer {
private metro: MetroServer | null = null;
private hmrServer: MetroHmrServer | null = null;
@@ -1218,6 +1235,19 @@ export class MetroBundlerDevServer extends BundlerDevServer {
// Required for symbolication:
process.env.EXPO_DEV_SERVER_ORIGIN = `http://localhost:${options.port}`;
+ event('start', {
+ mode,
+ web: this.isTargetingWeb(),
+ baseUrl,
+ asyncRoutes,
+ routerRoot: event.path(appDir),
+ serverComponents: this.isReactServerComponentsEnabled,
+ serverActions: isReactServerActionsOnlyEnabled,
+ serverRendering: useServerRendering,
+ apiRoutes: hasApiRoutes,
+ exporting: !!options.isExporting,
+ });
+
const { metro, hmrServer, server, middleware, messageSocket } = await instantiateMetroAsync(
this,
parsedOptions,
diff --git a/packages/@expo/cli/src/start/server/metro/MetroTerminalReporter.ts b/packages/@expo/cli/src/start/server/metro/MetroTerminalReporter.ts
index aa689ab72a9198..354830bb56adb7 100644
--- a/packages/@expo/cli/src/start/server/metro/MetroTerminalReporter.ts
+++ b/packages/@expo/cli/src/start/server/metro/MetroTerminalReporter.ts
@@ -234,7 +234,9 @@ export class MetroTerminalReporter extends TerminalReporter {
_logInitializing(port: number, hasReducedPerformance: boolean): void {
// Don't print a giant logo...
- this.terminal.log(chalk.dim('Starting Metro Bundler') + '\n');
+ if (!shouldReduceLogs()) {
+ this.terminal.log(chalk.dim('Starting Metro Bundler') + '\n');
+ }
}
shouldFilterClientLog(event: { type: 'client_log'; data: unknown[] }): boolean {
diff --git a/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts b/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts
index 304cc37edc6f07..37f2222f2f378d 100644
--- a/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts
+++ b/packages/@expo/cli/src/start/server/metro/instantiateMetro.ts
@@ -23,6 +23,7 @@ import { createDebugMiddleware } from './debugging/createDebugMiddleware';
import { createMetroMiddleware } from './dev-server/createMetroMiddleware';
import { runServer } from './runServer-fork';
import { withMetroMultiPlatformAsync } from './withMetroMultiPlatform';
+import { events, shouldReduceLogs } from '../../../events';
import { Log } from '../../../log';
import { env } from '../../../utils/env';
import { CommandError } from '../../../utils/errors';
@@ -31,6 +32,30 @@ import { createJsInspectorMiddleware } from '../middleware/inspector/createJsIns
import { prependMiddleware } from '../middleware/mutations';
import { getPlatformBundlers } from '../platformBundlers';
+// prettier-ignore
+export const event = events('metro', (t) => [
+ t.event<'config', {
+ serverRoot: string;
+ projectRoot: string;
+ exporting: boolean;
+ flags: {
+ autolinkingModuleResolution: boolean;
+ serverActions: boolean;
+ serverComponents: boolean;
+ reactCompiler: boolean;
+ optimizeGraph?: boolean;
+ treeshaking?: boolean;
+ logbox?: boolean;
+ };
+ }>(),
+ t.event<'instantiate', {
+ atlas: boolean;
+ workers: number | null;
+ host: string | null;
+ port: number | null;
+ }>(),
+]);
+
// NOTE(@kitten): We pass a custom createStableModuleIdFactory function into the Metro module ID factory sometimes
interface MetroServerWithModuleIdMod extends MetroServer {
_createModuleId: ReturnType & ((path: string) => number);
@@ -108,13 +133,13 @@ export async function loadMetroConfigAsync(
const serverActionsEnabled =
exp.experiments?.reactServerFunctions ?? env.EXPO_UNSTABLE_SERVER_FUNCTIONS;
-
+ const serverComponentsEnabled = !!exp.experiments?.reactServerComponentRoutes;
if (serverActionsEnabled) {
process.env.EXPO_UNSTABLE_SERVER_FUNCTIONS = '1';
}
// NOTE: Enable all the experimental Metro flags when RSC is enabled.
- if (exp.experiments?.reactServerComponentRoutes || serverActionsEnabled) {
+ if (serverComponentsEnabled || serverActionsEnabled) {
process.env.EXPO_USE_METRO_REQUIRE = '1';
}
@@ -181,12 +206,14 @@ export async function loadMetroConfigAsync(
}
const platformBundlers = getPlatformBundlers(projectRoot, exp);
+ const reduceLogs = shouldReduceLogs();
- if (exp.experiments?.reactCompiler) {
+ const reactCompilerEnabled = !!exp.experiments?.reactCompiler;
+ if (!reduceLogs && reactCompilerEnabled) {
Log.log(chalk.gray`React Compiler enabled`);
}
- if (autolinkingModuleResolutionEnabled) {
+ if (!reduceLogs && autolinkingModuleResolutionEnabled) {
Log.log(chalk.gray`Expo Autolinking module resolution enabled`);
}
@@ -196,17 +223,17 @@ export async function loadMetroConfigAsync(
);
}
- if (env.EXPO_UNSTABLE_METRO_OPTIMIZE_GRAPH) {
+ if (!reduceLogs && env.EXPO_UNSTABLE_METRO_OPTIMIZE_GRAPH) {
Log.warn(`Experimental bundle optimization is enabled.`);
}
- if (env.EXPO_UNSTABLE_TREE_SHAKING) {
+ if (!reduceLogs && env.EXPO_UNSTABLE_TREE_SHAKING) {
Log.warn(`Experimental tree shaking is enabled.`);
}
- if (env.EXPO_UNSTABLE_LOG_BOX) {
+ if (!reduceLogs && env.EXPO_UNSTABLE_LOG_BOX) {
Log.warn(`Experimental Expo LogBox is enabled.`);
}
- if (serverActionsEnabled) {
+ if (!reduceLogs && serverActionsEnabled) {
Log.warn(
`React Server Functions (beta) are enabled. Route rendering mode: ${exp.experiments?.reactServerComponentRoutes ? 'server' : 'client'}`
);
@@ -220,10 +247,25 @@ export async function loadMetroConfigAsync(
isAutolinkingResolverEnabled: autolinkingModuleResolutionEnabled,
isExporting,
isNamedRequiresEnabled: env.EXPO_USE_METRO_REQUIRE,
- isReactServerComponentsEnabled: !!exp.experiments?.reactServerComponentRoutes,
+ isReactServerComponentsEnabled: serverComponentsEnabled,
getMetroBundler,
});
+ event('config', {
+ serverRoot: event.path(serverRoot),
+ projectRoot: event.path(projectRoot),
+ exporting: isExporting,
+ flags: {
+ autolinkingModuleResolution: autolinkingModuleResolutionEnabled,
+ serverActions: serverActionsEnabled,
+ serverComponents: serverComponentsEnabled,
+ reactCompiler: reactCompilerEnabled,
+ optimizeGraph: env.EXPO_UNSTABLE_METRO_OPTIMIZE_GRAPH,
+ treeshaking: env.EXPO_UNSTABLE_TREE_SHAKING,
+ logbox: env.EXPO_UNSTABLE_LOG_BOX,
+ },
+ });
+
return {
config,
setEventReporter: (logger: (event: any) => void) => (reportEvent = logger),
@@ -317,7 +359,7 @@ export async function instantiateMetroAsync(
resetAtlasFile: isExporting,
});
- const { server, hmrServer, metro } = await runServer(
+ const { address, server, hmrServer, metro } = await runServer(
metroBundler,
metroConfig,
{
@@ -330,6 +372,13 @@ export async function instantiateMetroAsync(
}
);
+ event('instantiate', {
+ atlas: env.EXPO_ATLAS,
+ workers: metroConfig.maxWorkers ?? null,
+ host: address?.address ?? null,
+ port: address?.port ?? null,
+ });
+
// Patch transform file to remove inconvenient customTransformOptions which are only used in single well-known files.
const originalTransformFile = metro
.getBundler()
diff --git a/packages/@expo/cli/src/start/server/metro/runServer-fork.ts b/packages/@expo/cli/src/start/server/metro/runServer-fork.ts
index 4d2d225e4f3ee6..a780d4ae5de88b 100644
--- a/packages/@expo/cli/src/start/server/metro/runServer-fork.ts
+++ b/packages/@expo/cli/src/start/server/metro/runServer-fork.ts
@@ -12,6 +12,7 @@ import type { ConfigT } from '@expo/metro/metro-config';
import assert from 'assert';
import http from 'http';
import https from 'https';
+import type { AddressInfo } from 'net';
import { parse } from 'url';
import type { WebSocketServer } from 'ws';
@@ -20,7 +21,7 @@ import { Log } from '../../../log';
import type { ConnectAppType } from '../middleware/server.types';
export const runServer = async (
- metroBundler: MetroBundlerDevServer,
+ _metroBundler: MetroBundlerDevServer,
config: ConfigT,
{
hasReducedPerformance = false,
@@ -39,6 +40,7 @@ export const runServer = async (
mockServer: boolean;
}
): Promise<{
+ address: AddressInfo | null;
server: http.Server | https.Server;
hmrServer: MetroHmrServer | null;
metro: Server;
@@ -124,14 +126,10 @@ export const runServer = async (
};
if (mockServer) {
- return { server: httpServer, hmrServer: null, metro: metroServer };
+ return { address: null, server: httpServer, hmrServer: null, metro: metroServer };
}
- return new Promise<{
- server: http.Server | https.Server;
- hmrServer: MetroHmrServer;
- metro: Server;
- }>((resolve, reject) => {
+ return new Promise((resolve, reject) => {
httpServer.on('error', (error) => {
reject(error);
});
@@ -164,7 +162,14 @@ export const runServer = async (
}
});
- resolve({ server: httpServer, hmrServer, metro: metroServer });
+ const address = httpServer.address();
+
+ resolve({
+ address: address && typeof address === 'object' ? address : null,
+ server: httpServer,
+ hmrServer,
+ metro: metroServer,
+ });
});
});
};
diff --git a/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts b/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts
index b51bdd8fb08ea2..494a572371dee8 100644
--- a/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts
+++ b/packages/@expo/cli/src/start/server/metro/withMetroMultiPlatform.ts
@@ -241,7 +241,7 @@ export function withExtendedResolver(
: null;
// TODO: Move this to be a transform key for invalidation.
- if (!isExporting && isInteractive()) {
+ if (!isExporting && !env.CI) {
if (isTsconfigPathsEnabled) {
// TODO: We should track all the files that used imports and invalidate them
// currently the user will need to save all the files that use imports to
diff --git a/packages/@expo/cli/src/start/startAsync.ts b/packages/@expo/cli/src/start/startAsync.ts
index e92f061c7cf605..46660c2359b59c 100644
--- a/packages/@expo/cli/src/start/startAsync.ts
+++ b/packages/@expo/cli/src/start/startAsync.ts
@@ -1,6 +1,7 @@
import { getConfig } from '@expo/config';
import chalk from 'chalk';
+import { shouldReduceLogs } from '../events';
import { SimulatorAppPrerequisite } from './doctor/apple/SimulatorAppPrerequisite';
import { getXcodeVersionAsync } from './doctor/apple/XcodePrerequisite';
import { validateDependenciesVersionsAsync } from './doctor/dependencies/validateDependenciesVersions';
@@ -18,8 +19,6 @@ import { profile } from '../utils/profile';
import { maybeCreateMCPServerAsync } from './server/MCP';
import { addMcpCapabilities } from './server/MCPDevToolsPluginCLIExtensions';
-const debug = require('debug')('expo:start');
-
async function getMultiBundlerStartOptions(
projectRoot: string,
options: Options,
@@ -69,7 +68,9 @@ export async function startAsync(
options: Options,
settings: { webOnly?: boolean }
) {
- Log.log(chalk.gray(`Starting project at ${projectRoot}`));
+ if (!shouldReduceLogs()) {
+ Log.log(chalk.gray(`Starting project at ${projectRoot}`));
+ }
const { exp, pkg } = profile(getConfig)(projectRoot);
diff --git a/packages/@expo/cli/src/utils/interactive.ts b/packages/@expo/cli/src/utils/interactive.ts
index 83b5980a3a364e..4c0b9fcd5029b3 100644
--- a/packages/@expo/cli/src/utils/interactive.ts
+++ b/packages/@expo/cli/src/utils/interactive.ts
@@ -1,6 +1,7 @@
+import { shouldReduceLogs } from '../events';
import { env } from './env';
/** @returns `true` if the process is interactive. */
export function isInteractive(): boolean {
- return !env.CI && process.stdout.isTTY;
+ return !shouldReduceLogs() && !env.CI && process.stdout.isTTY;
}
diff --git a/packages/@expo/cli/src/utils/nodeEnv.ts b/packages/@expo/cli/src/utils/nodeEnv.ts
index 356c8725e22ce0..3555f689da14e3 100644
--- a/packages/@expo/cli/src/utils/nodeEnv.ts
+++ b/packages/@expo/cli/src/utils/nodeEnv.ts
@@ -1,6 +1,8 @@
import * as env from '@expo/env';
import path from 'node:path';
+import { events, shouldReduceLogs } from '../events';
+
type EnvOutput = Record;
// TODO(@kitten): We assign this here to run server-side code bundled by metro
@@ -9,6 +11,20 @@ declare namespace globalThis {
let __DEV__: boolean | undefined;
}
+// prettier-ignore
+export const event = events('env', (t) => [
+ t.event<'mode', {
+ nodeEnv: string;
+ babelEnv: string;
+ mode: 'development' | 'production';
+ }>(),
+ t.event<'load', {
+ mode: string | undefined;
+ files: string[];
+ env: Record;
+ }>(),
+]);
+
/**
* Set the environment to production or development
* lots of tools use this to determine if they should run in a dev mode.
@@ -17,6 +33,12 @@ export function setNodeEnv(mode: 'development' | 'production') {
process.env.NODE_ENV = process.env.NODE_ENV || mode;
process.env.BABEL_ENV = process.env.BABEL_ENV || process.env.NODE_ENV;
globalThis.__DEV__ = process.env.NODE_ENV !== 'production';
+
+ event('mode', {
+ nodeEnv: process.env.NODE_ENV,
+ babelEnv: process.env.BABEL_ENV,
+ mode,
+ });
}
interface LoadEnvFilesOptions {
@@ -34,8 +56,8 @@ let prevEnvKeys: Set | undefined;
export function loadEnvFiles(projectRoot: string, options?: LoadEnvFilesOptions) {
const params = {
...options,
+ silent: !!options?.silent || shouldReduceLogs(),
force: !!options?.force,
- silent: !!options?.silent,
mode: process.env.NODE_ENV,
systemEnv: process.env,
};
@@ -50,7 +72,17 @@ export function loadEnvFiles(projectRoot: string, options?: LoadEnvFilesOptions)
}
}
- env.logLoadedEnv(envInfo, params);
+ if (envInfo.result === 'loaded') {
+ event('load', {
+ mode: params.mode,
+ files: envInfo.files.map((file) => event.path(file)),
+ env: envOutput,
+ });
+ }
+
+ if (!params.silent) {
+ env.logLoadedEnv(envInfo, params);
+ }
return process.env;
}
@@ -86,5 +118,11 @@ export function reloadEnvFiles(projectRoot: string) {
}
}
}
+
+ event('load', {
+ mode: params.mode,
+ files: envInfo.files.map((file) => event.path(file)),
+ env: envOutput,
+ });
}
}
From 436bed646065bb0ca95cbdbd267459d11f3654a9 Mon Sep 17 00:00:00 2001
From: Jakub Tkacz <32908614+Ubax@users.noreply.github.com>
Date: Tue, 24 Feb 2026 20:02:29 +0100
Subject: [PATCH 07/10] [expo-router] Move react-native-screens from
dependencies to peerDependencies (#43394)
# Why
Reverts part of https://github.com/expo/expo/pull/43379
# How
# Test Plan
# Checklist
- [ ] I added a `changelog.md` entry and rebuilt the package sources
according to [this short
guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting)
- [ ] This diff will work correctly for `npx expo prebuild` & EAS Build
(eg: updated a module plugin).
- [ ] Conforms with the [Documentation Writing Style
Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md)
---
packages/expo-router/package.json | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/packages/expo-router/package.json b/packages/expo-router/package.json
index 7443a21f6cc570..5f4bb13bfd3720 100644
--- a/packages/expo-router/package.json
+++ b/packages/expo-router/package.json
@@ -95,6 +95,7 @@
"react-native-gesture-handler": "*",
"react-native-reanimated": "*",
"react-native-safe-area-context": ">= 5.4.0",
+ "react-native-screens": "~4.24.0",
"react-native-web": "*",
"react-server-dom-webpack": "~19.0.4 || ~19.1.5 || ~19.2.4"
},
@@ -152,7 +153,7 @@
"query-string": "^7.1.3",
"react-fast-compare": "^3.2.2",
"react-native-is-edge-to-edge": "^1.2.1",
- "react-native-screens": "4.24.0",
+ "react-native-screens": "~4.24.0",
"semver": "~7.6.3",
"server-only": "^0.0.1",
"sf-symbols-typescript": "^2.1.0",
From 796c7098c2e32c898c1cf91dc647be926c543a3a Mon Sep 17 00:00:00 2001
From: Vojtech Novak
Date: Tue, 24 Feb 2026 20:04:08 +0100
Subject: [PATCH 08/10] [notifications] fix not forwarding new options to the
OS when permissions were already granted (#43378)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
# Why
Fixes an issue on iOS where `requestPermissionsAsync` would not forward
new authorization options to the operating system when notification
permissions were previously granted. This prevented users from
requesting additional notification permissions (like alerts, badges, or
sounds) after the initial permission grant.
closes #20086, closes #20072
# How
Changed the implementation to call `requestAuthorizationOptions`
directly instead of going through the general permissions system.
A previous attempt (#20086) was rejected because expo go's permission
scoping. However, I noticed that notification permissions on expo go are
not scoped because some 7 years ago they were "temporarily" excluded
from scoping.
So question is, do we want to handle this in a smarter way or have the
exclusion be permanent?
# Test Plan
- notification tester app
Details
```
/**
* Manual test for https://github.com/expo/expo/issues/20072
* Verifies that requestPermissionsAsync forwards new options to the OS
* even when notifications are already granted.
*
* Steps to verify the fix:
* 1. Press "Request: alert only" → grants notifications with alert
* 2. Press "Get permissions" → note allowsSound is false
* 3. Press "Request: alert + sound" → should update options
* 4. Press "Get permissions" → allowsSound should now be true
*
* Before the fix, step 3 would short-circuit and step 4 would still show allowsSound: false.
*/
function PermissionOptionsTest() {
const [result, setResult] = useState('');
if (Platform.OS !== 'ios') {
return null;
}
return (
<>
Permission options test (#20072)
# Checklist
- [x] I added a `changelog.md` entry and rebuilt the package sources
according to [this short
guide](https://github.com/expo/expo/blob/main/CONTRIBUTING.md#-before-submitting)
- [ ] This diff will work correctly for `npx expo prebuild` & EAS Build
(eg: updated a module plugin).
- [ ] Conforms with the [Documentation Writing Style
Guide](https://github.com/expo/expo/blob/main/guides/Expo%20Documentation%20Writing%20Style%20Guide.md)
---------
Co-authored-by: Expo Bot <34669131+expo-bot@users.noreply.github.com>
Co-authored-by: Tomasz Sapeta
---
.../Permissions/EXScopedPermissions.m | 2 +-
packages/expo-notifications/CHANGELOG.md | 4 ++--
.../Permissions/PermissionsModule.swift | 13 +++++--------
3 files changed, 8 insertions(+), 11 deletions(-)
diff --git a/apps/expo-go/ios/Exponent/Versioned/Core/UniversalModules/Permissions/EXScopedPermissions.m b/apps/expo-go/ios/Exponent/Versioned/Core/UniversalModules/Permissions/EXScopedPermissions.m
index 98c2beea741e5c..c9efcae1143a03 100644
--- a/apps/expo-go/ios/Exponent/Versioned/Core/UniversalModules/Permissions/EXScopedPermissions.m
+++ b/apps/expo-go/ios/Exponent/Versioned/Core/UniversalModules/Permissions/EXScopedPermissions.m
@@ -164,7 +164,7 @@ + (NSString *)textForPermissionType:(NSString *)type
- (BOOL)shouldVerifyScopedPermission:(NSString *)permissionType
{
- // temporarily exclude notifactions from permissions per experience; system brightness is always granted
+ // exclude notifications from permissions per experience; system brightness is always granted
return ![@[@"notifications", @"userFacingNotifications", @"systemBrightness"] containsObject:permissionType];
}
diff --git a/packages/expo-notifications/CHANGELOG.md b/packages/expo-notifications/CHANGELOG.md
index eebf6a5097946d..132181290f156d 100644
--- a/packages/expo-notifications/CHANGELOG.md
+++ b/packages/expo-notifications/CHANGELOG.md
@@ -8,9 +8,9 @@
### 🐛 Bug fixes
-### 💡 Others
+- [ios] Fixed `requestPermissionsAsync` not forwarding new options to the OS when notifications were already granted ([#43378](https://github.com/expo/expo/pull/43378) by [@vonovak](https://github.com/vonovak))
-- [ios] avoid higher quality-of-service thread waiting on lower quality-of-service thread when requesting permissions ([#43377](https://github.com/expo/expo/pull/43377) by [@vonovak](https://github.com/vonovak))
+### 💡 Others
## 55.0.9 — 2026-02-20
diff --git a/packages/expo-notifications/ios/ExpoNotifications/Permissions/PermissionsModule.swift b/packages/expo-notifications/ios/ExpoNotifications/Permissions/PermissionsModule.swift
index 03a38d7c34004a..11ff5ec6943a3d 100644
--- a/packages/expo-notifications/ios/ExpoNotifications/Permissions/PermissionsModule.swift
+++ b/packages/expo-notifications/ios/ExpoNotifications/Permissions/PermissionsModule.swift
@@ -2,7 +2,6 @@
import ExpoModulesCore
import UIKit
-import MachO
public class PermissionsModule: Module {
var permissionsManager: (any EXPermissionsInterface)?
@@ -34,13 +33,11 @@ public class PermissionsModule: Module {
: defaultAuthorizationOptions
requester.setAuthorizationOptions(options)
- appContext?
- .permissions?
- .askForPermission(
- usingRequesterClass: ExpoNotificationsPermissionsRequester.self,
- resolve: promise.resolver,
- reject: promise.legacyRejecter
- )
+ // Call `requestAuthorization` directly to ensure new options are always
+ // forwarded to the OS, even if notifications were previously granted.
+ // iOS safely handles repeated calls to `requestAuthorization(options:)`.
+ // Expo Go notifications permissions are not scoped
+ requester.requestAuthorizationOptions(options, resolver: promise.resolver, rejecter: promise.legacyRejecter)
}
}
}
From 9e0073a36aad6087f4cc4b7dcacb7652fe0c2798 Mon Sep 17 00:00:00 2001
From: Christian Falch <875252+chrfalch@users.noreply.github.com>
Date: Tue, 24 Feb 2026 21:39:14 +0100
Subject: [PATCH 09/10] [cli] Fixed missing guard when loading
expo-modules.config.json files (#43386)
# Why
When loading expo-modules.config.json files, we don't have a guard for
wrong configurations.
# How
Added unit tests to check problem, and wrapped loading in try..catch
with clear warnings.
# Test Plan
Test in BareExpo
# Checklist
- [x] This diff will work correctly for `npx expo prebuild` & EAS Build
(eg: updated a module plugin).
---
.../src/start/server/DevToolsPluginManager.ts | 12 ++-
.../__tests__/DevToolsPluginManager-test.ts | 94 +++++++++++++++++++
2 files changed, 105 insertions(+), 1 deletion(-)
create mode 100644 packages/@expo/cli/src/start/server/__tests__/DevToolsPluginManager-test.ts
diff --git a/packages/@expo/cli/src/start/server/DevToolsPluginManager.ts b/packages/@expo/cli/src/start/server/DevToolsPluginManager.ts
index e95e4a19428187..ce7d661d6184c8 100644
--- a/packages/@expo/cli/src/start/server/DevToolsPluginManager.ts
+++ b/packages/@expo/cli/src/start/server/DevToolsPluginManager.ts
@@ -37,7 +37,17 @@ export default class DevToolsPluginManager {
).filter((maybePlugin) => maybePlugin != null);
debug('Found autolinked plugins', plugins);
return plugins
- .map((pluginInfo) => new DevToolsPlugin(pluginInfo, this.projectRoot))
+ .map((pluginInfo) => {
+ try {
+ return new DevToolsPlugin(pluginInfo, this.projectRoot);
+ } catch (error: any) {
+ Log.warn(
+ `Skipping plugin "${pluginInfo.packageName}": ${error.message ?? 'invalid configuration'}`
+ );
+ debug('Plugin validation error for %s: %O', pluginInfo.packageName, error);
+ return null;
+ }
+ })
.filter((p) => p != null) as DevToolsPlugin[];
}
}
diff --git a/packages/@expo/cli/src/start/server/__tests__/DevToolsPluginManager-test.ts b/packages/@expo/cli/src/start/server/__tests__/DevToolsPluginManager-test.ts
new file mode 100644
index 00000000000000..ccb08c182ef84c
--- /dev/null
+++ b/packages/@expo/cli/src/start/server/__tests__/DevToolsPluginManager-test.ts
@@ -0,0 +1,94 @@
+import { Log } from '../../../log';
+import DevToolsPluginManager from '../DevToolsPluginManager';
+
+jest.mock('../../../log');
+
+// Mock the autolinking module
+jest.mock('expo/internal/unstable-autolinking-exports', () => ({
+ makeCachedDependenciesLinker: jest.fn(),
+ scanExpoModuleResolutionsForPlatform: jest.fn(),
+ getLinkingImplementationForPlatform: jest.fn(),
+}));
+
+const autolinking = require('expo/internal/unstable-autolinking-exports') as jest.Mocked<
+ typeof import('expo-modules-autolinking/exports')
+>;
+
+function mockAutolinkingPlugins(
+ plugins: { packageName: string; packageRoot: string; cliExtensions?: any; webpageRoot?: string }[]
+) {
+ const revisions: Record = {};
+ const descriptors: Record = {};
+
+ for (const plugin of plugins) {
+ revisions[plugin.packageName] = { name: plugin.packageName };
+ descriptors[plugin.packageName] = plugin;
+ }
+
+ autolinking.makeCachedDependenciesLinker.mockReturnValue({} as any);
+ autolinking.scanExpoModuleResolutionsForPlatform.mockResolvedValue(revisions as any);
+ autolinking.getLinkingImplementationForPlatform.mockReturnValue({
+ resolveModuleAsync: jest.fn(async (name: string) => descriptors[name] ?? null),
+ } as any);
+}
+
+describe('DevToolsPluginManager', () => {
+ it('should return valid plugins', async () => {
+ mockAutolinkingPlugins([
+ {
+ packageName: 'valid-plugin',
+ packageRoot: '/path/to/valid-plugin',
+ webpageRoot: '/web',
+ },
+ ]);
+
+ const manager = new DevToolsPluginManager('/project');
+ const plugins = await manager.queryPluginsAsync();
+
+ expect(plugins.length).toBe(1);
+ expect(plugins[0].packageName).toBe('valid-plugin');
+ });
+
+ it('should skip a plugin with an invalid config without affecting other valid plugins', async () => {
+ mockAutolinkingPlugins([
+ {
+ packageName: 'valid-plugin',
+ packageRoot: '/path/to/valid-plugin',
+ webpageRoot: '/web',
+ },
+ {
+ packageName: 'invalid-plugin',
+ packageRoot: '/path/to/invalid-plugin',
+ cliExtensions: {
+ // Missing required `commands` and `entryPoint` fields
+ description: 'An invalid extension',
+ },
+ },
+ {
+ packageName: 'another-valid-plugin',
+ packageRoot: '/path/to/another-valid-plugin',
+ cliExtensions: {
+ description: 'A valid CLI extension',
+ entryPoint: 'index.js',
+ commands: [
+ {
+ name: 'test-cmd',
+ title: 'Test Command',
+ environments: ['cli'],
+ },
+ ],
+ },
+ },
+ ]);
+
+ const manager = new DevToolsPluginManager('/project');
+ const plugins = await manager.queryPluginsAsync();
+
+ expect(plugins.length).toBe(2);
+ expect(plugins[0].packageName).toBe('valid-plugin');
+ expect(plugins[1].packageName).toBe('another-valid-plugin');
+ expect(Log.warn).toHaveBeenCalledWith(
+ expect.stringContaining('Skipping plugin "invalid-plugin"')
+ );
+ });
+});
From 49c424864d2ea2a2c989fec7ae6c083fd3c08576 Mon Sep 17 00:00:00 2001
From: Christian Falch <875252+chrfalch@users.noreply.github.com>
Date: Tue, 24 Feb 2026 21:39:37 +0100
Subject: [PATCH 10/10] [tools] Rewrite et ci-status as interactive et
ci-inspect TUI (#43355)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
# Why
We should align the ci-status command with the gitHub-inspect command
and make it smaller and faster.
# How
Rewrite the static et ci-status command as an interactive TUI dashboard,
renamed to et ci-inspect (aliases: ci), matching the UX patterns
established in et github-inspect.
- Interactive category-based navigation (Broken Workflows, Needs
Attention, High Volume Issues, All Workflows) with arrow keys, Enter to
drill down, Esc to go back
- Detail view per workflow with daily trend bar charts, failed run
history, and inline log inspection
- RECOMMENDED badge on the highest-priority actionable category
- Parallel data fetching for GitHub Actions and EAS workflows with ora
spinner
- Real EAS log fetching in the detail view via eas workflow:view + eas
workflow:logs (replaces the old stub message)
- Fixed success rate calculation to exclude in-progress/queued runs from
the denominator
- "Needs Attention" category now requires at least one actual failure
- Removed the --inspect CLI flag — the TUI detail view now covers the
same functionality inline
- Auth warnings only shown when a service is not authenticated
# Test Plan
- Run et ci-inspect and verify interactive navigation through
categories, workflow lists, and detail views
- Run et ci-inspect -w last to verify week selection still works
- Run et ci-inspect -b to verify branch option
- Verify (f) toggles failed runs in detail view
- Verify (l) downloads and displays error logs for both GitHub Actions
and EAS workflows
- Verify r reloads data from the workflow list
- Verify Esc navigates back through each level
---------
Co-authored-by: Claude Opus 4.6
---
tools/src/commands/CIInspectCommand.ts | 1187 ++++++++++++++++++++
tools/src/commands/CIStatusCommand.ts | 1386 ------------------------
2 files changed, 1187 insertions(+), 1386 deletions(-)
create mode 100644 tools/src/commands/CIInspectCommand.ts
delete mode 100644 tools/src/commands/CIStatusCommand.ts
diff --git a/tools/src/commands/CIInspectCommand.ts b/tools/src/commands/CIInspectCommand.ts
new file mode 100644
index 00000000000000..b7fe9d4050f75f
--- /dev/null
+++ b/tools/src/commands/CIInspectCommand.ts
@@ -0,0 +1,1187 @@
+import { Command } from '@expo/commander';
+import spawnAsync from '@expo/spawn-async';
+import chalk from 'chalk';
+import { glob } from 'glob';
+import ora from 'ora';
+import path from 'path';
+
+import { EXPO_DIR } from '../Constants';
+import { getAuthenticatedUserAsync } from '../GitHub';
+import {
+ downloadJobLogsAsync,
+ getJobsForWorkflowRunAsync,
+ getWorkflowRunsForRepoAsync,
+} from '../GitHubActions';
+import logger from '../Logger';
+
+// --- TUI helpers ---
+
+const MAX_VISIBLE_ITEMS = 15;
+
+function waitForKey(validKeys: string[]): Promise {
+ return new Promise((resolve) => {
+ const { stdin } = process;
+ const wasRaw = stdin.isRaw;
+
+ stdin.setRawMode(true);
+ stdin.resume();
+ stdin.setEncoding('utf8');
+
+ const onData = (data: string) => {
+ let key: string;
+
+ if (data === '\u001b[A') {
+ key = 'up';
+ } else if (data === '\u001b[B') {
+ key = 'down';
+ } else if (data === '\r' || data === '\n') {
+ key = 'enter';
+ } else if (data === '\u001b' || data === '\u001b\u001b') {
+ key = 'escape';
+ } else if (data === '\u0003') {
+ stdin.setRawMode(wasRaw ?? false);
+ stdin.pause();
+ stdin.removeListener('data', onData);
+ process.exit(0);
+ } else {
+ key = data.toLowerCase();
+ }
+
+ if (validKeys.includes(key)) {
+ stdin.setRawMode(wasRaw ?? false);
+ stdin.pause();
+ stdin.removeListener('data', onData);
+ resolve(key);
+ }
+ };
+
+ stdin.on('data', onData);
+ });
+}
+
+function clearLines(count: number): void {
+ for (let i = 0; i < count; i++) {
+ process.stdout.write('\x1b[1A\x1b[2K');
+ }
+}
+
+function getScrollWindow(total: number, selectedIndex: number): { start: number; end: number } {
+ if (total <= MAX_VISIBLE_ITEMS) {
+ return { start: 0, end: total };
+ }
+
+ const half = Math.floor(MAX_VISIBLE_ITEMS / 2);
+ let start = selectedIndex - half;
+ if (start < 0) start = 0;
+ let end = start + MAX_VISIBLE_ITEMS;
+ if (end > total) {
+ end = total;
+ start = end - MAX_VISIBLE_ITEMS;
+ }
+
+ return { start, end };
+}
+
+// --- Types ---
+
+type ActionOptions = {
+ branch: string;
+ week?: string;
+};
+
+type AuthStatus = {
+ github: boolean;
+ githubUser: string | null;
+ eas: boolean;
+ easUser: string | null;
+};
+
+interface DailyRate {
+ label: string; // Mon, Tue, etc.
+ date: string; // YYYY-MM-DD
+ total: number;
+ successful: number;
+}
+
+type FailedRun = {
+ id: number | string;
+ date: string;
+ url?: string;
+ commitMessage?: string;
+ source: 'github' | 'eas';
+ project?: string;
+};
+
+type WorkflowItem = {
+ name: string;
+ source: 'github' | 'eas';
+ project?: string;
+ total: number;
+ success: number;
+ failed: number;
+ cancelled: number;
+ other: number;
+ successRate: number;
+ dailyRates: DailyRate[];
+ failedRuns: FailedRun[];
+};
+
+type CategoryInfo = {
+ key: string;
+ label: string;
+ guidance: string;
+ items: WorkflowItem[];
+};
+
+// --- Date utilities ---
+
+function getISOWeekNumber(date: Date): number {
+ const d = new Date(Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()));
+ const dayNum = d.getUTCDay() || 7;
+ d.setUTCDate(d.getUTCDate() + 4 - dayNum);
+ const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
+ return Math.ceil(((d.getTime() - yearStart.getTime()) / 86400000 + 1) / 7);
+}
+
+function getMondayOfWeek(week: number, year: number): Date {
+ const jan4 = new Date(year, 0, 4);
+ const dayOfWeek = jan4.getDay() || 7;
+ const week1Monday = new Date(jan4);
+ week1Monday.setDate(jan4.getDate() - (dayOfWeek - 1));
+ const monday = new Date(week1Monday);
+ monday.setDate(week1Monday.getDate() + (week - 1) * 7);
+ monday.setHours(0, 0, 0, 0);
+ return monday;
+}
+
+function parseDateRange(weekOption?: string): [Date, Date, number, Date] {
+ const now = new Date();
+ const currentYear = now.getFullYear();
+ const currentWeek = getISOWeekNumber(now);
+
+ let targetWeek: number;
+ if (!weekOption) {
+ targetWeek = currentWeek;
+ } else if (weekOption === 'last' || weekOption === 'prev') {
+ targetWeek = currentWeek - 1;
+ } else {
+ targetWeek = parseInt(weekOption, 10);
+ if (isNaN(targetWeek) || targetWeek < 1 || targetWeek > 53) {
+ logger.error(`Invalid week number: ${weekOption}. Use 1-53, "last", or "prev".`);
+ process.exit(1);
+ }
+ }
+
+ const monday = getMondayOfWeek(targetWeek, currentYear);
+ const friday = new Date(monday);
+ friday.setDate(monday.getDate() + 4);
+ friday.setHours(23, 59, 59, 999);
+
+ const dataEndDate = friday < now ? friday : now;
+ return [monday, dataEndDate, targetWeek, friday];
+}
+
+// --- Classification + stats helpers ---
+
+function classifyGitHubRun(run: any): 'success' | 'failure' | 'cancelled' | 'other' {
+ if (run.conclusion === 'success') return 'success';
+ if (run.conclusion === 'failure') return 'failure';
+ if (run.conclusion === 'cancelled') return 'cancelled';
+ return 'other';
+}
+
+function classifyEASRun(run: any): 'success' | 'failure' | 'cancelled' | 'other' {
+ const status = (run.status ?? '').toUpperCase();
+ if (status === 'SUCCESS' || status === 'FINISHED') return 'success';
+ if (status === 'FAILURE' || status === 'ERRORED') return 'failure';
+ if (status === 'CANCELED') return 'cancelled';
+ return 'other';
+}
+
+function countRunStats(
+ runs: any[],
+ classify: (run: any) => 'success' | 'failure' | 'cancelled' | 'other'
+): {
+ total: number;
+ success: number;
+ failed: number;
+ cancelled: number;
+ other: number;
+ successRate: number;
+} {
+ let success = 0,
+ failed = 0,
+ cancelled = 0,
+ other = 0;
+ for (const run of runs) {
+ const c = classify(run);
+ if (c === 'success') success++;
+ else if (c === 'failure') failed++;
+ else if (c === 'cancelled') cancelled++;
+ else other++;
+ }
+ const total = runs.length;
+ const concluded = success + failed + cancelled;
+ const successRate = concluded > 0 ? ((success + cancelled) / concluded) * 100 : 0;
+ return { total, success, failed, cancelled, other, successRate };
+}
+
+function computeDailyRates(
+ runs: any[],
+ startDate: Date,
+ getTimestamp: (run: any) => string | undefined,
+ isSuccess: (run: any) => boolean,
+ isConcluded: (run: any) => boolean
+): DailyRate[] {
+ const dayNames = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri'];
+ const monday = new Date(startDate);
+ monday.setHours(0, 0, 0, 0);
+
+ const dailyRates: DailyRate[] = [];
+
+ for (let i = 0; i < 5; i++) {
+ const dayStart = new Date(monday);
+ dayStart.setDate(monday.getDate() + i);
+ dayStart.setHours(0, 0, 0, 0);
+ const dayEnd = new Date(dayStart);
+ dayEnd.setHours(23, 59, 59, 999);
+
+ const dayRuns = runs.filter((r) => {
+ const ts = getTimestamp(r);
+ if (!ts) return false;
+ const d = new Date(ts);
+ return d >= dayStart && d <= dayEnd;
+ });
+
+ const concluded = dayRuns.filter(isConcluded);
+ const successful = concluded.filter(isSuccess);
+
+ dailyRates.push({
+ label: dayNames[i],
+ date: dayStart.toISOString().split('T')[0],
+ total: concluded.length,
+ successful: successful.length,
+ });
+ }
+
+ return dailyRates;
+}
+
+function successRateColor(rate: number): string {
+ const pct = `${rate.toFixed(1)}%`;
+ if (rate >= 90) return chalk.green(pct);
+ if (rate >= 75) return chalk.yellow(pct);
+ return chalk.red(pct);
+}
+
+// --- Auth helpers ---
+
+async function checkAuth(): Promise {
+ const status: AuthStatus = { github: false, githubUser: null, eas: false, easUser: null };
+
+ if (process.env.GITHUB_TOKEN) {
+ try {
+ const user = await getAuthenticatedUserAsync();
+ status.github = true;
+ status.githubUser = user.login;
+ } catch {
+ // Token exists but is invalid
+ }
+ }
+
+ try {
+ const result = await spawnAsync('eas', ['whoami'], {
+ env: { ...process.env, EXPO_NO_DOCTOR: 'true' },
+ });
+ const firstLine = result.stdout.trim().split('\n')[0].trim();
+ if (firstLine) {
+ status.eas = true;
+ status.easUser = firstLine;
+ }
+ } catch {
+ // Not logged in or eas not installed
+ }
+
+ return status;
+}
+
+function printAuthStatus(auth: AuthStatus): void {
+ const warnings: string[] = [];
+ if (!auth.github) {
+ warnings.push(
+ `GitHub: ${chalk.red('\u2717')} not authenticated \u2014 run ${chalk.cyan('export GITHUB_TOKEN="$(gh auth token)"')}`
+ );
+ }
+ if (!auth.eas) {
+ warnings.push(
+ `EAS: ${chalk.red('\u2717')} not authenticated \u2014 run ${chalk.cyan('eas login')}`
+ );
+ }
+ if (warnings.length > 0) {
+ for (const w of warnings) {
+ logger.log(` ${w}`);
+ }
+ logger.log('');
+ }
+}
+
+// --- EAS helpers ---
+
+async function findEASProjectDirs(): Promise {
+ const pattern = 'apps/*/.eas/workflows';
+ const matches = await glob(pattern, { cwd: EXPO_DIR });
+ return matches.map((match) => path.resolve(EXPO_DIR, path.dirname(path.dirname(match))));
+}
+
+/**
+ * Fetches workflow runs from EAS CLI for a given project.
+ *
+ * **Limitation:** The EAS CLI returns at most 100 runs across ALL workflows in the project,
+ * with no server-side date filtering. Date filtering is done client-side after fetching.
+ */
+async function fetchEASRuns(
+ projectDir: string,
+ projectName: string,
+ env: Record
+): Promise {
+ let output: string;
+ try {
+ const result = await spawnAsync('eas', ['workflow:runs', '--json', '--limit', '100'], {
+ cwd: projectDir,
+ env,
+ });
+ output = result.stdout;
+ } catch (error: any) {
+ const stderr = error.stderr?.trim();
+ logger.warn(
+ `Failed to fetch Expo Workflow runs for ${projectName}: ${stderr || error.message}`
+ );
+ return [];
+ }
+
+ try {
+ const runs = JSON.parse(output);
+ return Array.isArray(runs) ? runs : [];
+ } catch {
+ logger.warn(`Failed to parse EAS CLI output for ${projectName}.`);
+ return [];
+ }
+}
+
+// --- Log analysis helpers ---
+
+function extractErrorSnippets(log: string, maxLines: number = 80): string[] {
+ const lines = log.split('\n');
+ const snippets: string[] = [];
+
+ const errorPatterns = [
+ /##\[error\]/i,
+ /Error:/i,
+ /FAIL /,
+ /FAILED/,
+ /error\[/i,
+ /panic:/i,
+ /Exception:/i,
+ /AssertionError/i,
+ /TypeError:/i,
+ /ReferenceError:/i,
+ /SyntaxError:/i,
+ /Build failed/i,
+ /Process completed with exit code [^0]/,
+ /Command failed/i,
+ /fatal:/i,
+ ];
+
+ const errorLineIndices = new Set();
+ for (let i = 0; i < lines.length; i++) {
+ for (const pattern of errorPatterns) {
+ if (pattern.test(lines[i])) {
+ for (let j = Math.max(0, i - 5); j <= Math.min(lines.length - 1, i + 10); j++) {
+ errorLineIndices.add(j);
+ }
+ break;
+ }
+ }
+ }
+
+ if (errorLineIndices.size > 0) {
+ const sorted = [...errorLineIndices].sort((a, b) => a - b);
+ let currentSnippet: string[] = [];
+ let lastIdx = -2;
+
+ for (const idx of sorted) {
+ if (idx !== lastIdx + 1 && currentSnippet.length > 0) {
+ snippets.push(currentSnippet.join('\n'));
+ currentSnippet = [];
+ }
+ currentSnippet.push(lines[idx]);
+ lastIdx = idx;
+ }
+ if (currentSnippet.length > 0) {
+ snippets.push(currentSnippet.join('\n'));
+ }
+ }
+
+ if (snippets.length === 0) {
+ const tail = lines.slice(-maxLines).join('\n');
+ if (tail.trim()) {
+ snippets.push(tail);
+ }
+ }
+
+ const joined = snippets.join('\n...\n');
+ const joinedLines = joined.split('\n');
+ if (joinedLines.length > maxLines) {
+ return [joinedLines.slice(0, maxLines).join('\n') + '\n... (truncated)'];
+ }
+ return snippets;
+}
+
+function stripLogTimestamps(log: string): string {
+ return log.replace(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z /gm, '');
+}
+
+function printLogSnippets(snippets: string[]): void {
+ logger.log(`\n ${chalk.bold('Error output:')}`);
+ logger.log(' \u250c' + '\u2500'.repeat(70));
+ for (let si = 0; si < snippets.length; si++) {
+ const indented = snippets[si]
+ .split('\n')
+ .map((line) => ` \u2502 ${line}`)
+ .join('\n');
+ logger.log(indented);
+ if (si < snippets.length - 1) {
+ logger.log(' \u2502 ...');
+ }
+ }
+ logger.log(' \u2514' + '\u2500'.repeat(70));
+}
+
+// --- Data fetching (returns WorkflowItem[]) ---
+
+async function fetchGitHubWorkflowItems(
+ branch: string,
+ startDate: Date,
+ endDate: Date
+): Promise {
+ const runs = await getWorkflowRunsForRepoAsync(branch, { startDate, endDate });
+ if (!runs.length) return [];
+
+ const byWorkflow = new Map();
+ for (const run of runs) {
+ const name = run.name ?? 'unknown';
+ if (!byWorkflow.has(name)) byWorkflow.set(name, []);
+ byWorkflow.get(name)!.push(run);
+ }
+
+ const items: WorkflowItem[] = [];
+ for (const [name, wfRuns] of byWorkflow) {
+ const stats = countRunStats(wfRuns, classifyGitHubRun);
+ const dailyRates = computeDailyRates(
+ wfRuns,
+ startDate,
+ (r) => r.created_at ?? r.run_started_at,
+ (r) => {
+ const c = classifyGitHubRun(r);
+ return c === 'success' || c === 'cancelled';
+ },
+ (r) => classifyGitHubRun(r) !== 'other'
+ );
+
+ const failed = wfRuns
+ .filter((r: any) => r.conclusion === 'failure')
+ .slice(0, 5)
+ .map((r: any) => ({
+ id: r.id,
+ date: new Date(r.created_at).toISOString().slice(0, 10),
+ url: r.html_url,
+ commitMessage: r.head_commit?.message?.split('\n')[0],
+ source: 'github' as const,
+ }));
+
+ items.push({
+ name,
+ source: 'github',
+ ...stats,
+ dailyRates,
+ failedRuns: failed,
+ });
+ }
+
+ return items;
+}
+
+async function fetchEASWorkflowItems(startDate: Date, endDate: Date): Promise {
+ let projectDirs: string[];
+ try {
+ projectDirs = await findEASProjectDirs();
+ } catch {
+ return [];
+ }
+ if (!projectDirs.length) return [];
+
+ const easEnv = {
+ ...process.env,
+ EXPO_NO_DOCTOR: 'true',
+ EAS_BUILD_PROFILE: process.env.EAS_BUILD_PROFILE ?? 'release-client',
+ };
+
+ const items: WorkflowItem[] = [];
+
+ for (const projectDir of projectDirs) {
+ const projectName = path.basename(projectDir);
+ const allRuns = await fetchEASRuns(projectDir, projectName, easEnv);
+
+ const runsInRange = allRuns.filter((r) => {
+ const ts = r.startedAt ?? r.createdAt ?? r.created_at;
+ if (!ts) return false;
+ const d = new Date(ts);
+ return d >= startDate && d <= endDate;
+ });
+
+ if (!runsInRange.length) continue;
+
+ const byWorkflow = new Map();
+ for (const run of runsInRange) {
+ const name = run.workflowName ?? run.workflow_name ?? 'unknown';
+ if (!byWorkflow.has(name)) byWorkflow.set(name, []);
+ byWorkflow.get(name)!.push(run);
+ }
+
+ for (const [name, wfRuns] of byWorkflow) {
+ const stats = countRunStats(wfRuns, classifyEASRun);
+ const dailyRates = computeDailyRates(
+ wfRuns,
+ startDate,
+ (r) => r.startedAt ?? r.createdAt ?? r.created_at,
+ (r) => {
+ const c = classifyEASRun(r);
+ return c === 'success' || c === 'cancelled';
+ },
+ (r) => classifyEASRun(r) !== 'other'
+ );
+
+ const failed = wfRuns
+ .filter((r: any) => {
+ const s = (r.status ?? '').toUpperCase();
+ return s === 'FAILURE' || s === 'ERRORED';
+ })
+ .slice(0, 5)
+ .map((r: any) => ({
+ id: r.id,
+ date: new Date(r.startedAt ?? r.createdAt).toISOString().slice(0, 10),
+ commitMessage: r.gitCommitMessage?.split('\n')[0],
+ source: 'eas' as const,
+ project: projectName,
+ }));
+
+ items.push({
+ name,
+ source: 'eas',
+ project: projectName,
+ ...stats,
+ dailyRates,
+ failedRuns: failed,
+ });
+ }
+ }
+
+ return items;
+}
+
+// --- Category building ---
+
+function buildCategories(items: WorkflowItem[]): CategoryInfo[] {
+ const broken = items.filter((w) => w.successRate === 0 && w.total >= 2 && w.failed >= 2);
+ const needsAttention = items.filter(
+ (w) => w.successRate > 0 && w.successRate < 75 && w.total >= 2 && w.failed > 0
+ );
+ const highVolume = items.filter((w) => w.total >= 10 && w.successRate < 90 && w.failed > 0);
+ const allSorted = [...items].sort((a, b) => b.total - a.total);
+
+ return [
+ {
+ key: 'broken',
+ label: 'Broken Workflows',
+ guidance: 'These workflows are consistently failing and need immediate investigation.',
+ items: broken.sort((a, b) => b.failed - a.failed),
+ },
+ {
+ key: 'attention',
+ label: 'Needs Attention',
+ guidance:
+ 'These workflows have elevated failure rates. Review recent failures to identify patterns.',
+ items: needsAttention.sort((a, b) => a.successRate - b.successRate),
+ },
+ {
+ key: 'high-volume',
+ label: 'High Volume Issues',
+ guidance:
+ 'Frequently-running workflows with notable failure rates \u2014 even small percentages add up.',
+ items: highVolume.sort((a, b) => b.failed - a.failed),
+ },
+ {
+ key: 'all',
+ label: 'All Workflows',
+ guidance: 'Complete overview of all workflows sorted by run count.',
+ items: allSorted,
+ },
+ ];
+}
+
+// --- Interactive display helpers ---
+
+function showCompactStatus(
+ weekNum: number,
+ startStr: string,
+ endStr: string,
+ branch: string,
+ ghItems: WorkflowItem[],
+ easItems: WorkflowItem[],
+ categories: CategoryInfo[]
+): void {
+ const ghTotal = ghItems.reduce((s, w) => s + w.total, 0);
+ const ghSuccess = ghItems.reduce((s, w) => s + w.success, 0);
+ const ghCancelled = ghItems.reduce((s, w) => s + w.cancelled, 0);
+ const ghRate = ghTotal > 0 ? ((ghSuccess + ghCancelled) / ghTotal) * 100 : 0;
+
+ const easTotal = easItems.reduce((s, w) => s + w.total, 0);
+ const easSuccess = easItems.reduce((s, w) => s + w.success, 0);
+ const easCancelled = easItems.reduce((s, w) => s + w.cancelled, 0);
+ const easRate = easTotal > 0 ? ((easSuccess + easCancelled) / easTotal) * 100 : 0;
+
+ const brokenCount = categories.find((c) => c.key === 'broken')?.items.length ?? 0;
+ const attentionCount = categories.find((c) => c.key === 'attention')?.items.length ?? 0;
+
+ logger.log('');
+ logger.log(
+ chalk.bold(`CI Metrics \u2014 Week ${weekNum} (${startStr} \u2192 ${endStr}) \u2014 ${branch}`)
+ );
+ logger.log('');
+
+ if (ghTotal > 0) {
+ logger.log(` GitHub Actions: ${ghTotal} runs, ${successRateColor(ghRate)} success rate`);
+ }
+ if (easTotal > 0) {
+ logger.log(` Expo Workflows: ${easTotal} runs, ${successRateColor(easRate)} success rate`);
+ }
+ if (ghTotal === 0 && easTotal === 0) {
+ logger.log(chalk.gray(' No workflow runs found.'));
+ }
+
+ const alerts: string[] = [];
+ if (brokenCount > 0) alerts.push(chalk.red(`${brokenCount} broken`));
+ if (attentionCount > 0) alerts.push(chalk.yellow(`${attentionCount} needs attention`));
+ if (alerts.length > 0) {
+ logger.log(` ${alerts.join(', ')}`);
+ }
+
+ logger.log('');
+}
+
+function showCategoryList(categories: CategoryInfo[], selectedIndex: number): void {
+ const recommendedIdx = categories.findIndex((c) => c.key !== 'all' && c.items.length > 0);
+
+ for (let i = 0; i < categories.length; i++) {
+ const cat = categories[i];
+ const prefix = i === selectedIndex ? chalk.green('\u25b6') : ' ';
+ const badge = i === recommendedIdx ? chalk.bgGreen.black(' RECOMMENDED ') + ' ' : '';
+ const count = chalk.cyan(`(${cat.items.length})`);
+ logger.log(` ${prefix} ${chalk.green(`${i + 1}.`)} ${badge}${chalk.bold(cat.label)} ${count}`);
+ logger.log(chalk.dim(` ${cat.guidance}`));
+ logger.log('');
+ }
+ logger.log(chalk.gray(' \u2191\u2193 navigate / Enter select / Esc quit'));
+}
+
+function categoryLineCount(categories: CategoryInfo[]): number {
+ // Each category: prefix line + guidance line + blank line, plus the hint line
+ return categories.length * 3 + 1;
+}
+
+function showWorkflowList(category: CategoryInfo, selectedIndex: number): void {
+ const total = category.items.length;
+ const { start, end } = getScrollWindow(total, selectedIndex);
+
+ logger.log(chalk.bold(category.label));
+ logger.log(chalk.dim(` ${category.guidance}`));
+ logger.log('');
+
+ if (total === 0) {
+ logger.log(chalk.gray(' No workflows in this category.'));
+ logger.log('');
+ logger.log(chalk.gray(' Esc back'));
+ return;
+ }
+
+ if (start > 0) {
+ logger.log(chalk.gray(` \u25b2 ${start} more above`));
+ }
+
+ for (let i = start; i < end; i++) {
+ const wf = category.items[i];
+ const prefix = i === selectedIndex ? chalk.green('\u25b6') : ' ';
+ const pos = chalk.gray(`${i + 1}/${total}`);
+ const sourceTag = wf.source === 'eas' ? chalk.gray(`[${wf.project}] `) : '';
+ const statsStr = `${wf.total} runs, ${successRateColor(wf.successRate)}, ${chalk.red(`${wf.failed}`)} failed`;
+
+ logger.log(` ${prefix} ${pos} ${sourceTag}${wf.name} \u2014 ${statsStr}`);
+ }
+
+ if (end < total) {
+ logger.log(chalk.gray(` \u25bc ${total - end} more below`));
+ }
+
+ logger.log('');
+ logger.log(chalk.gray(' \u2191\u2193 navigate / Enter expand / Esc back'));
+}
+
+function workflowListLineCount(category: CategoryInfo, selectedIndex: number): number {
+ const total = category.items.length;
+ if (total === 0) {
+ // Title + guidance + blank + "no workflows" + blank + hint
+ return 6;
+ }
+ const { start, end } = getScrollWindow(total, selectedIndex);
+ const visibleItems = end - start;
+ const hasAbove = start > 0 ? 1 : 0;
+ const hasBelow = end < total ? 1 : 0;
+ // Title + guidance + blank + above? + items + below? + blank + hint
+ return 3 + hasAbove + visibleItems + hasBelow + 2;
+}
+
+function renderDailyTrend(dailyRates: DailyRate[]): number {
+ let lines = 0;
+ const barWidth = 15;
+
+ logger.log(chalk.bold(' Daily Trend'));
+ lines++;
+
+ let prevRate: number | null = null;
+ for (const day of dailyRates) {
+ const rate = day.total > 0 ? (day.successful / day.total) * 100 : -1;
+
+ if (rate < 0) {
+ logger.log(` ${chalk.gray(day.label)} ${chalk.gray('\u2014 no data')}`);
+ lines++;
+ continue;
+ }
+
+ const filled = Math.round((rate / 100) * barWidth);
+ const barColor = rate >= 90 ? chalk.green : rate >= 75 ? chalk.yellow : chalk.red;
+ const bar = barColor('\u2588'.repeat(filled)) + chalk.gray('\u2591'.repeat(barWidth - filled));
+
+ let trend = ' ';
+ if (prevRate !== null) {
+ const diff = rate - prevRate;
+ if (diff > 2) trend = chalk.green('\u2191');
+ else if (diff < -2) trend = chalk.red('\u2193');
+ else trend = chalk.gray('\u2192');
+ }
+
+ logger.log(
+ ` ${day.label} ${bar} ${successRateColor(rate)} ${trend} ${chalk.gray(`(${day.total} runs)`)}`
+ );
+ lines++;
+ prevRate = rate;
+ }
+
+ logger.log('');
+ lines++;
+ return lines;
+}
+
+function renderDetailView(wf: WorkflowItem, showFailed: boolean): number {
+ let lines = 0;
+
+ const sourceTag = wf.source === 'eas' ? chalk.gray(` [${wf.project}]`) : '';
+ logger.log(chalk.bold(`${wf.name}${sourceTag}`));
+ lines++;
+ logger.log('');
+ lines++;
+
+ logger.log(
+ ` Total: ${wf.total} ${chalk.green(`${wf.success} success`)} ${chalk.red(`${wf.failed} failed`)} ${chalk.gray(`${wf.cancelled} cancelled`)} ${chalk.gray(`${wf.other} other`)}`
+ );
+ lines++;
+ logger.log(` Success rate: ${successRateColor(wf.successRate)}`);
+ lines++;
+ logger.log('');
+ lines++;
+
+ lines += renderDailyTrend(wf.dailyRates);
+
+ if (showFailed) {
+ logger.log(chalk.bold(' Recent Failed Runs'));
+ lines++;
+ if (wf.failedRuns.length === 0) {
+ logger.log(chalk.gray(' No failed runs.'));
+ lines++;
+ } else {
+ for (const run of wf.failedRuns) {
+ const commit = run.commitMessage ? chalk.gray(` \u2014 ${run.commitMessage}`) : '';
+ const url = run.url ? chalk.gray(` ${run.url}`) : '';
+ logger.log(` ${chalk.red('\u2717')} ${run.date}${commit}${url}`);
+ lines++;
+ }
+ }
+ logger.log('');
+ lines++;
+ }
+
+ const failedToggle = showFailed ? chalk.yellow('(f)ailed runs') : chalk.green('(f)ailed runs');
+ const parts = [failedToggle, chalk.green('(l)ogs'), chalk.gray('Esc back')];
+ logger.log(chalk.gray(' ') + parts.join(chalk.gray(' / ')));
+ lines++;
+
+ return lines;
+}
+
+// --- Log inspection ---
+
+async function inspectLatestFailureLogs(wf: WorkflowItem): Promise {
+ if (wf.failedRuns.length === 0) {
+ logger.log(chalk.gray(' No failed runs to inspect.'));
+ return;
+ }
+
+ const latestFailed = wf.failedRuns[0];
+
+ if (wf.source === 'github') {
+ logger.log(chalk.gray(` Downloading logs for run ${latestFailed.id}...`));
+
+ let jobs;
+ try {
+ jobs = await getJobsForWorkflowRunAsync(latestFailed.id as number);
+ } catch (error: any) {
+ logger.warn(` Failed to fetch jobs: ${error.message}`);
+ return;
+ }
+
+ const failedJobs = jobs.filter((j) => j.conclusion === 'failure');
+ if (!failedJobs.length) {
+ logger.log(chalk.gray(' No failed jobs found.'));
+ return;
+ }
+
+ for (const job of failedJobs) {
+ logger.log(`\n ${chalk.red('\u2717')} ${chalk.bold(job.name)}`);
+
+ const failedSteps = (job.steps ?? []).filter((s) => s.conclusion === 'failure');
+ for (const step of failedSteps) {
+ logger.log(` Step: ${chalk.red(step.name)}`);
+ }
+
+ const rawLog = await downloadJobLogsAsync(job.id);
+ if (!rawLog) {
+ logger.warn(` Could not download log.`);
+ continue;
+ }
+
+ const log = stripLogTimestamps(rawLog);
+ const snippets = extractErrorSnippets(log);
+ if (snippets.length) {
+ printLogSnippets(snippets);
+ }
+ }
+ } else {
+ // EAS workflow logs
+ if (!wf.project) {
+ logger.log(chalk.gray(' No project info available for this EAS workflow.'));
+ return;
+ }
+
+ const projectDirs = await findEASProjectDirs();
+ const projectDir = projectDirs.find((d) => path.basename(d) === wf.project);
+ if (!projectDir) {
+ logger.log(chalk.gray(` Could not find project directory for ${wf.project}.`));
+ return;
+ }
+
+ const easEnv = {
+ ...process.env,
+ EXPO_NO_DOCTOR: 'true',
+ EAS_BUILD_PROFILE: process.env.EAS_BUILD_PROFILE ?? 'release-client',
+ };
+
+ logger.log(chalk.gray(` Fetching run details for ${latestFailed.id}...`));
+
+ const runDetails = await runEASCommand(
+ ['workflow:view', String(latestFailed.id), '--json', '--non-interactive'],
+ projectDir,
+ easEnv
+ );
+
+ if (!runDetails?.jobs) {
+ logger.warn(` Could not fetch run details.`);
+ return;
+ }
+
+ const failedJobs = runDetails.jobs.filter(
+ (j: any) => (j.status ?? '').toUpperCase() === 'FAILURE'
+ );
+
+ if (!failedJobs.length) {
+ logger.log(chalk.gray(' No failed jobs found.'));
+ return;
+ }
+
+ if (runDetails.logURL) {
+ logger.log(` ${chalk.gray(runDetails.logURL)}`);
+ }
+
+ for (const job of failedJobs) {
+ const jobName = job.name ?? job.key ?? 'unknown';
+ logger.log(`\n ${chalk.red('\u2717')} ${chalk.bold(jobName)}`);
+
+ logger.log(chalk.gray(` Downloading log for "${jobName}"...`));
+
+ let rawLog: string | null = null;
+ try {
+ const result = await spawnAsync(
+ 'eas',
+ ['workflow:logs', job.id, '--all-steps', '--non-interactive'],
+ { cwd: projectDir, env: easEnv }
+ );
+ rawLog = result.stdout;
+ } catch {
+ logger.warn(` Could not download log for this job.`);
+ continue;
+ }
+
+ if (!rawLog?.trim()) {
+ logger.warn(` Log is empty.`);
+ continue;
+ }
+
+ const log = stripLogTimestamps(rawLog);
+ const snippets = extractErrorSnippets(log);
+ if (snippets.length) {
+ printLogSnippets(snippets);
+ }
+ }
+ }
+}
+
+// --- Interactive detail view ---
+
+async function showDetailInteractive(wf: WorkflowItem): Promise {
+ let showFailed = false;
+ let lastRenderedCount = 0;
+
+ const render = () => {
+ if (lastRenderedCount > 0) clearLines(lastRenderedCount);
+ lastRenderedCount = renderDetailView(wf, showFailed);
+ };
+
+ render();
+
+ while (true) {
+ const key = await waitForKey(['escape', 'f', 'l']);
+
+ if (key === 'escape') {
+ if (showFailed) {
+ showFailed = false;
+ render();
+ } else {
+ clearLines(lastRenderedCount);
+ return;
+ }
+ } else if (key === 'f') {
+ showFailed = !showFailed;
+ render();
+ } else if (key === 'l') {
+ await inspectLatestFailureLogs(wf);
+ logger.log('');
+ logger.log(chalk.gray(' Press any key to continue...'));
+ await waitForKey(['escape', 'f', 'l', 'enter', 'up', 'down']);
+ // After viewing logs, reset since output has scrolled
+ lastRenderedCount = 0;
+ render();
+ }
+ }
+}
+
+// --- Interactive dashboard ---
+
+async function interactiveDashboard(options: ActionOptions, auth: AuthStatus): Promise {
+ const branch = options.branch;
+ const [startDate, endDate, weekNum, weekFriday] = parseDateRange(options.week);
+ const startStr = startDate.toISOString().slice(0, 10);
+ const endStr = weekFriday.toISOString().slice(0, 10);
+
+ const spinner = ora('Loading CI/CD metrics...').start();
+
+ let [ghItems, easItems] = await Promise.all([
+ auth.github
+ ? fetchGitHubWorkflowItems(branch, startDate, endDate).catch((err) => {
+ spinner.warn(`GitHub Actions fetch failed: ${err.message}`);
+ return [] as WorkflowItem[];
+ })
+ : Promise.resolve([] as WorkflowItem[]),
+ auth.eas
+ ? fetchEASWorkflowItems(startDate, endDate).catch((err) => {
+ spinner.warn(`Expo Workflows fetch failed: ${err.message}`);
+ return [] as WorkflowItem[];
+ })
+ : Promise.resolve([] as WorkflowItem[]),
+ ]);
+
+ spinner.stop();
+
+ let allItems = [...ghItems, ...easItems];
+ let categories = buildCategories(allItems);
+
+ showCompactStatus(weekNum, startStr, endStr, branch, ghItems, easItems, categories);
+
+ if (allItems.length === 0) {
+ return;
+ }
+
+ // Category selection
+ let catIndex = 0;
+ showCategoryList(categories, catIndex);
+
+ while (true) {
+ const key = await waitForKey(['up', 'down', 'enter', 'escape']);
+
+ if (key === 'escape') {
+ clearLines(categoryLineCount(categories));
+ return;
+ } else if (key === 'up') {
+ if (catIndex > 0) {
+ clearLines(categoryLineCount(categories));
+ catIndex--;
+ showCategoryList(categories, catIndex);
+ }
+ } else if (key === 'down') {
+ if (catIndex < categories.length - 1) {
+ clearLines(categoryLineCount(categories));
+ catIndex++;
+ showCategoryList(categories, catIndex);
+ }
+ } else if (key === 'enter') {
+ clearLines(categoryLineCount(categories));
+ const selectedCategory = categories[catIndex];
+
+ // Workflow browsing within category
+ let wfIndex = 0;
+ showWorkflowList(selectedCategory, wfIndex);
+
+ let lastLineCount = workflowListLineCount(selectedCategory, wfIndex);
+ let backToCategories = false;
+
+ while (true) {
+ const wfKey = await waitForKey(['up', 'down', 'enter', 'escape', 'r']);
+
+ if (wfKey === 'escape') {
+ clearLines(lastLineCount);
+ backToCategories = true;
+ break;
+ } else if (wfKey === 'up') {
+ if (wfIndex > 0) {
+ clearLines(lastLineCount);
+ wfIndex--;
+ showWorkflowList(selectedCategory, wfIndex);
+ lastLineCount = workflowListLineCount(selectedCategory, wfIndex);
+ }
+ } else if (wfKey === 'down') {
+ if (wfIndex < selectedCategory.items.length - 1) {
+ clearLines(lastLineCount);
+ wfIndex++;
+ showWorkflowList(selectedCategory, wfIndex);
+ lastLineCount = workflowListLineCount(selectedCategory, wfIndex);
+ }
+ } else if (wfKey === 'enter' && selectedCategory.items.length > 0) {
+ clearLines(lastLineCount);
+ const wf = selectedCategory.items[wfIndex];
+
+ await showDetailInteractive(wf);
+
+ // Re-show workflow list
+ showWorkflowList(selectedCategory, wfIndex);
+ lastLineCount = workflowListLineCount(selectedCategory, wfIndex);
+ } else if (wfKey === 'r') {
+ clearLines(lastLineCount);
+ const reloadSpinner = ora('Reloading data...').start();
+
+ const [newGhItems, newEasItems] = await Promise.all([
+ auth.github
+ ? fetchGitHubWorkflowItems(branch, startDate, endDate).catch(
+ () => [] as WorkflowItem[]
+ )
+ : Promise.resolve([] as WorkflowItem[]),
+ auth.eas
+ ? fetchEASWorkflowItems(startDate, endDate).catch(() => [] as WorkflowItem[])
+ : Promise.resolve([] as WorkflowItem[]),
+ ]);
+
+ reloadSpinner.stop();
+
+ ghItems = newGhItems;
+ easItems = newEasItems;
+ allItems = [...ghItems, ...easItems];
+ categories = buildCategories(allItems);
+ showCompactStatus(weekNum, startStr, endStr, branch, ghItems, easItems, categories);
+ if (allItems.length === 0) {
+ return;
+ }
+ catIndex = 0;
+ backToCategories = true;
+ break;
+ }
+ }
+
+ if (backToCategories) {
+ showCategoryList(categories, catIndex);
+ }
+ }
+ }
+}
+
+async function runEASCommand(
+ args: string[],
+ projectDir: string,
+ env: Record
+): Promise {
+ try {
+ const result = await spawnAsync('eas', args, { cwd: projectDir, env });
+ return JSON.parse(result.stdout);
+ } catch {
+ return null;
+ }
+}
+
+// --- Entry point ---
+
+async function action(options: ActionOptions) {
+ const auth = await checkAuth();
+
+ if (!auth.github && !auth.eas) {
+ printAuthStatus(auth);
+ logger.error('No services authenticated. Please log in to at least one service.');
+ return;
+ }
+
+ printAuthStatus(auth);
+
+ await interactiveDashboard(options, auth);
+ logger.log('');
+}
+
+export default (program: Command) => {
+ program
+ .command('ci-inspect')
+ .alias('ci', 'cii')
+ .description(
+ 'Interactive CI/CD dashboard for GitHub Actions and Expo Workflows. ' +
+ 'Shows workflow health categories, success rates, daily trends, and failure inspection. ' +
+ 'Navigate with arrow keys, Enter to drill down, Esc to go back.'
+ )
+ .option('-b, --branch ', 'Branch to check', 'main')
+ .option(
+ '-w, --week ',
+ 'ISO week number (1-53), or "last"/"prev" for previous week. Defaults to current week.'
+ )
+ .asyncAction(action);
+};
diff --git a/tools/src/commands/CIStatusCommand.ts b/tools/src/commands/CIStatusCommand.ts
deleted file mode 100644
index 22352c81c4d94c..00000000000000
--- a/tools/src/commands/CIStatusCommand.ts
+++ /dev/null
@@ -1,1386 +0,0 @@
-import { Command } from '@expo/commander';
-import spawnAsync from '@expo/spawn-async';
-import chalk from 'chalk';
-import Table from 'cli-table3';
-import { glob } from 'glob';
-import path from 'path';
-
-import { EXPO_DIR } from '../Constants';
-import { getAuthenticatedUserAsync } from '../GitHub';
-import {
- downloadJobLogsAsync,
- getJobsForWorkflowRunAsync,
- getWorkflowRunsForRepoAsync,
-} from '../GitHubActions';
-import logger from '../Logger';
-
-type ActionOptions = {
- branch: string;
- week?: string;
- inspect?: string;
-};
-
-type AuthStatus = {
- github: boolean;
- githubUser: string | null;
- eas: boolean;
- easUser: string | null;
-};
-
-/**
- * Get the ISO week number for a date.
- */
-function getISOWeekNumber(date: Date): number {
- const d = new Date(Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()));
- const dayNum = d.getUTCDay() || 7;
- d.setUTCDate(d.getUTCDate() + 4 - dayNum);
- const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
- return Math.ceil(((d.getTime() - yearStart.getTime()) / 86400000 + 1) / 7);
-}
-
-/**
- * Get the Monday at 00:00:00 for a given ISO week number and year.
- */
-function getMondayOfWeek(week: number, year: number): Date {
- // Jan 4 is always in ISO week 1
- const jan4 = new Date(year, 0, 4);
- const dayOfWeek = jan4.getDay() || 7; // 1=Mon..7=Sun
- const week1Monday = new Date(jan4);
- week1Monday.setDate(jan4.getDate() - (dayOfWeek - 1));
- const monday = new Date(week1Monday);
- monday.setDate(week1Monday.getDate() + (week - 1) * 7);
- monday.setHours(0, 0, 0, 0);
- return monday;
-}
-
-/**
- * Parse --week flag. Accepts a week number (e.g. "5") or "last"/"prev" for previous week.
- * Returns [startDate, endDate, weekNumber].
- */
-function parseDateRange(weekOption?: string): [Date, Date, number] {
- const now = new Date();
- const currentYear = now.getFullYear();
- const currentWeek = getISOWeekNumber(now);
-
- let targetWeek: number;
- if (!weekOption) {
- targetWeek = currentWeek;
- } else if (weekOption === 'last' || weekOption === 'prev') {
- targetWeek = currentWeek - 1;
- } else {
- targetWeek = parseInt(weekOption, 10);
- if (isNaN(targetWeek) || targetWeek < 1 || targetWeek > 53) {
- logger.error(`Invalid week number: ${weekOption}. Use 1-53, "last", or "prev".`);
- process.exit(1);
- }
- }
-
- const monday = getMondayOfWeek(targetWeek, currentYear);
- const friday = new Date(monday);
- friday.setDate(monday.getDate() + 4);
- friday.setHours(23, 59, 59, 999);
-
- // For past weeks, always use Friday EOD. For current/future, cap at now.
- const endDate = friday < now ? friday : now;
-
- return [monday, endDate, targetWeek];
-}
-
-function conclusionColor(conclusion: string | null, status: string): string {
- if (status === 'in_progress' || status === 'queued') {
- return chalk.yellow(status);
- }
- if (conclusion === 'success') {
- return chalk.green('success');
- }
- if (conclusion === 'failure') {
- return chalk.red('failure');
- }
- if (conclusion === 'cancelled') {
- return chalk.gray('cancelled');
- }
- return chalk.gray(conclusion ?? status);
-}
-
-function easStatusColor(status: string): string {
- switch (status) {
- case 'FAILURE':
- case 'ERRORED':
- return chalk.red(status);
- case 'IN_PROGRESS':
- case 'PENDING':
- return chalk.yellow(status);
- case 'SUCCESS':
- case 'FINISHED':
- return chalk.green(status);
- case 'CANCELED':
- return chalk.gray(status);
- default:
- return chalk.gray(status);
- }
-}
-
-interface WorkflowStats {
- name: string;
- total: number;
- success: number;
- failed: number;
- cancelled: number;
- other: number;
- successRate: number;
-}
-
-interface DailyRate {
- label: string; // Mon, Tue, etc.
- date: string; // YYYY-MM-DD
- total: number;
- successful: number;
-}
-
-interface SectionResult {
- source: string;
- totalRuns: number;
- successRate: number;
- failedRuns: number;
- workflows: WorkflowStats[];
- dailyRates: DailyRate[];
-}
-
-/**
- * Compute per-day success rates from a list of runs.
- * Each run must have a date field and a conclusion/status field.
- * Returns an entry for each day Mon-Fri within the date range.
- */
-function computeDailyRates(
- runs: any[],
- startDate: Date,
- getTimestamp: (run: any) => string | undefined,
- isSuccess: (run: any) => boolean,
- isConcluded: (run: any) => boolean
-): DailyRate[] {
- const dayNames = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri'];
- const monday = new Date(startDate);
- monday.setHours(0, 0, 0, 0);
-
- const dailyRates: DailyRate[] = [];
-
- for (let i = 0; i < 5; i++) {
- const dayStart = new Date(monday);
- dayStart.setDate(monday.getDate() + i);
- dayStart.setHours(0, 0, 0, 0);
- const dayEnd = new Date(dayStart);
- dayEnd.setHours(23, 59, 59, 999);
-
- const dayRuns = runs.filter((r) => {
- const ts = getTimestamp(r);
- if (!ts) return false;
- const d = new Date(ts);
- return d >= dayStart && d <= dayEnd;
- });
-
- // Only count concluded runs for rate calculation
- const concluded = dayRuns.filter(isConcluded);
- const successful = concluded.filter(isSuccess);
-
- dailyRates.push({
- label: dayNames[i],
- date: dayStart.toISOString().split('T')[0],
- total: concluded.length,
- successful: successful.length,
- });
- }
-
- return dailyRates;
-}
-
-/**
- * Count run stats from a list of runs using a classifier function.
- * The classifier maps a run to one of: 'success', 'failure', 'cancelled', or 'other'.
- */
-function countRunStats(
- runs: any[],
- classify: (run: any) => 'success' | 'failure' | 'cancelled' | 'other'
-): {
- total: number;
- success: number;
- failed: number;
- cancelled: number;
- other: number;
- successRate: number;
-} {
- let success = 0,
- failed = 0,
- cancelled = 0,
- other = 0;
- for (const run of runs) {
- const c = classify(run);
- if (c === 'success') success++;
- else if (c === 'failure') failed++;
- else if (c === 'cancelled') cancelled++;
- else other++;
- }
- const total = runs.length;
- const effective = success + cancelled;
- const successRate = total > 0 ? (effective / total) * 100 : 0;
- return { total, success, failed, cancelled, other, successRate };
-}
-
-/** Classify a GitHub Actions run. */
-function classifyGitHubRun(run: any): 'success' | 'failure' | 'cancelled' | 'other' {
- if (run.conclusion === 'success') return 'success';
- if (run.conclusion === 'failure') return 'failure';
- if (run.conclusion === 'cancelled') return 'cancelled';
- return 'other';
-}
-
-/** Classify an EAS workflow run. */
-function classifyEASRun(run: any): 'success' | 'failure' | 'cancelled' | 'other' {
- const status = (run.status ?? '').toUpperCase();
- if (status === 'SUCCESS' || status === 'FINISHED') return 'success';
- if (status === 'FAILURE' || status === 'ERRORED') return 'failure';
- if (status === 'CANCELED') return 'cancelled';
- return 'other';
-}
-
-/**
- * Build a workflow breakdown table and return per-workflow stats.
- * Groups runs by name using the provided getName function,
- * classifies each run, and prints the table.
- */
-function buildWorkflowBreakdown(
- runs: any[],
- getName: (run: any) => string,
- classify: (run: any) => 'success' | 'failure' | 'cancelled' | 'other'
-): WorkflowStats[] {
- const workflowMap = new Map();
- for (const run of runs) {
- const name = getName(run);
- if (!workflowMap.has(name)) {
- workflowMap.set(name, []);
- }
- workflowMap.get(name)!.push(run);
- }
-
- const table = new Table({
- head: ['Workflow', 'Total', 'Success', 'Failed', 'Cancelled', 'Other', 'Success Rate'],
- style: { head: ['cyan'] },
- });
-
- const sorted = [...workflowMap.entries()].sort((a, b) => b[1].length - a[1].length);
- const stats: WorkflowStats[] = [];
-
- for (const [name, wfRuns] of sorted) {
- const s = countRunStats(wfRuns, classify);
- stats.push({ name, ...s });
- table.push([
- name,
- String(s.total),
- String(s.success),
- s.failed > 0 ? chalk.red(String(s.failed)) : '0',
- String(s.cancelled),
- String(s.other),
- successRateColor(s.successRate),
- ]);
- }
-
- logger.log(table.toString());
- logger.log('');
-
- return stats;
-}
-
-function successRateColor(rate: number): string {
- const pct = `${rate.toFixed(1)}%`;
- if (rate >= 90) return chalk.green(pct);
- if (rate >= 75) return chalk.yellow(pct);
- return chalk.red(pct);
-}
-
-async function printGitHubActionsStatus(
- branch: string,
- startDate: Date,
- endDate: Date
-): Promise {
- logger.info(`\n${chalk.bold('GitHub Actions')} (branch: ${branch})\n`);
-
- logger.info(chalk.gray('Fetching workflow runs from GitHub Actions...'));
-
- let runs;
- try {
- runs = await getWorkflowRunsForRepoAsync(branch, { startDate, endDate });
- } catch (error: any) {
- logger.error(`Failed to fetch GitHub Actions runs: ${error.message}`);
- logger.warn('Make sure GITHUB_TOKEN is set in your environment.');
- return null;
- }
-
- logger.info(chalk.gray(`Fetched ${runs.length} runs, processing...`));
-
- if (!runs.length) {
- logger.log('No workflow runs found.');
- return null;
- }
-
- // --- Latest run per workflow (current status) ---
- const latestByWorkflow = new Map();
- for (const run of runs) {
- if (!latestByWorkflow.has(run.name!)) {
- latestByWorkflow.set(run.name!, run);
- }
- }
-
- const sortedLatest = [...latestByWorkflow.values()].sort((a, b) =>
- (a.name ?? '').localeCompare(b.name ?? '')
- );
-
- const statusTable = new Table({
- head: ['Workflow', 'Status', 'URL'],
- style: { head: ['cyan'] },
- });
-
- let failing = 0;
- let inProgress = 0;
- let passing = 0;
-
- for (const run of sortedLatest) {
- const status = conclusionColor(run.conclusion, run.status!);
- statusTable.push([run.name ?? 'unknown', status, run.html_url]);
-
- if (run.conclusion === 'failure') {
- failing++;
- } else if (run.status === 'in_progress' || run.status === 'queued') {
- inProgress++;
- } else if (run.conclusion === 'success') {
- passing++;
- }
- }
-
- logger.log(statusTable.toString());
- logger.log(
- `\n ${chalk.red(`${failing} failing`)}, ${chalk.yellow(`${inProgress} in progress`)}, ${chalk.green(`${passing} passing`)}\n`
- );
-
- // --- Workflow breakdown (matching github-metrics CI output) ---
- logger.info(chalk.gray('Computing workflow breakdown...\n'));
-
- const overall = countRunStats(runs, classifyGitHubRun);
-
- logger.info(` ${chalk.bold('CI/CD Success Rate')}\n`);
- logger.log(` Total workflow runs: ${overall.total}`);
- logger.log(` Successful: ${chalk.green(String(overall.success))}`);
- logger.log(` Failed: ${chalk.red(String(overall.failed))}`);
- logger.log(` Cancelled: ${chalk.gray(String(overall.cancelled))}`);
- logger.log(` Other: ${chalk.gray(String(overall.other))}`);
- logger.log(
- ` Success rate: ${successRateColor(overall.successRate)} (cancelled counted as success)\n`
- );
-
- const workflowStats = buildWorkflowBreakdown(runs, (r) => r.name ?? 'unknown', classifyGitHubRun);
-
- // Compute daily rates for trend
- const dailyRates = computeDailyRates(
- runs,
- startDate,
- (r) => r.created_at ?? r.run_started_at,
- (r) => {
- const c = classifyGitHubRun(r);
- return c === 'success' || c === 'cancelled';
- },
- (r) => classifyGitHubRun(r) !== 'other'
- );
-
- return {
- source: 'GitHub Actions',
- totalRuns: overall.total,
- successRate: overall.successRate,
- failedRuns: overall.failed,
- workflows: workflowStats,
- dailyRates,
- };
-}
-
-async function findEASProjectDirs(): Promise {
- const pattern = 'apps/*/.eas/workflows';
- const matches = await glob(pattern, { cwd: EXPO_DIR });
- return matches.map((match) => path.resolve(EXPO_DIR, path.dirname(path.dirname(match))));
-}
-
-/**
- * Fetches workflow runs from EAS CLI for a given project.
- *
- * **Limitation:** The EAS CLI (`eas workflow:runs`) returns at most 100 runs across ALL
- * workflows in the project, with no server-side date filtering. This means:
- * - For projects with many workflows or frequent runs, older runs fall off the window.
- * - When using `--week` to inspect past weeks, failures may no longer be visible if they've
- * been pushed out by newer runs.
- * - Unlike GitHub Actions (which supports server-side `created` date filtering with pagination),
- * EAS date filtering is done client-side after fetching the capped result set.
- */
-async function fetchEASRuns(
- projectDir: string,
- projectName: string,
- env: Record
-): Promise {
- let output: string;
- try {
- const result = await spawnAsync('eas', ['workflow:runs', '--json', '--limit', '100'], {
- cwd: projectDir,
- env,
- });
- output = result.stdout;
- } catch (error: any) {
- const stderr = error.stderr?.trim();
- logger.warn(
- `Failed to fetch Expo Workflow runs for ${projectName}: ${stderr || error.message}`
- );
- return [];
- }
-
- try {
- const runs = JSON.parse(output);
- return Array.isArray(runs) ? runs : [];
- } catch {
- logger.warn(`Failed to parse EAS CLI output for ${projectName}.`);
- return [];
- }
-}
-
-async function printExpoWorkflowsStatus(startDate: Date, endDate: Date): Promise {
- logger.info(`\n${chalk.bold('Expo Workflows')}\n`);
-
- logger.info(chalk.gray('Discovering EAS projects...'));
-
- let projectDirs: string[];
- try {
- projectDirs = await findEASProjectDirs();
- } catch (error: any) {
- logger.warn(`Failed to discover EAS projects: ${error.message}`);
- return [];
- }
-
- if (!projectDirs.length) {
- logger.log('No EAS workflow projects found under apps/.');
- return [];
- }
-
- logger.info(chalk.gray(`Found ${projectDirs.length} project(s) with workflows`));
-
- const easEnv = {
- ...process.env,
- EXPO_NO_DOCTOR: 'true',
- EAS_BUILD_PROFILE: process.env.EAS_BUILD_PROFILE ?? 'release-client',
- };
-
- // --- Failures table ---
- const failuresTable = new Table({
- head: ['Project', 'Workflow', 'Status', 'Run ID'],
- style: { head: ['cyan'] },
- });
-
- let hasFailures = false;
- const allProjectRuns: { project: string; runs: any[] }[] = [];
-
- for (const projectDir of projectDirs) {
- const projectName = path.basename(projectDir);
-
- // Fetch all runs for the breakdown
- logger.info(chalk.gray(`Fetching workflow runs for ${projectName}...`));
- const allRuns = await fetchEASRuns(projectDir, projectName, easEnv);
-
- // Filter to date range
- const runsInRange = allRuns.filter((r) => {
- const ts = r.startedAt ?? r.createdAt ?? r.created_at;
- if (!ts) return false;
- const d = new Date(ts);
- return d >= startDate && d <= endDate;
- });
-
- if (runsInRange.length) {
- allProjectRuns.push({ project: projectName, runs: runsInRange });
- }
-
- // Latest failure per workflow for the failures table
- const failedRuns = runsInRange.filter((r) => (r.status ?? '').toUpperCase() === 'FAILURE');
-
- if (failedRuns.length) {
- hasFailures = true;
- const latestByWorkflow = new Map();
- for (const run of failedRuns) {
- const name = run.workflowName ?? run.workflow_name ?? 'unknown';
- if (!latestByWorkflow.has(name)) {
- latestByWorkflow.set(name, run);
- }
- }
- for (const [workflowName, run] of [...latestByWorkflow.entries()].sort((a, b) =>
- a[0].localeCompare(b[0])
- )) {
- failuresTable.push([
- projectName,
- workflowName,
- easStatusColor(run.status ?? 'UNKNOWN'),
- run.id ?? '',
- ]);
- }
- }
- }
-
- if (hasFailures) {
- logger.log(failuresTable.toString());
- } else {
- logger.log('No failing Expo Workflow runs found.');
- }
-
- // --- Workflow breakdown (matching github-metrics style) ---
- const results: SectionResult[] = [];
-
- if (allProjectRuns.length > 0) {
- logger.info(chalk.gray('\nComputing workflow breakdown...\n'));
-
- for (const { project, runs } of allProjectRuns) {
- const overall = countRunStats(runs, classifyEASRun);
-
- logger.info(` ${chalk.bold(`${project} — CI/CD Success Rate`)}\n`);
- logger.log(` Total workflow runs: ${overall.total}`);
- logger.log(` Successful: ${chalk.green(String(overall.success))}`);
- logger.log(` Failed: ${chalk.red(String(overall.failed))}`);
- logger.log(` Cancelled: ${chalk.gray(String(overall.cancelled))}`);
- logger.log(` Other: ${chalk.gray(String(overall.other))}`);
- logger.log(
- ` Success rate: ${successRateColor(overall.successRate)} (cancelled counted as success)\n`
- );
-
- const workflowStats = buildWorkflowBreakdown(
- runs,
- (r) => r.workflowName ?? r.workflow_name ?? 'unknown',
- classifyEASRun
- );
-
- // Compute daily rates for trend
- const dailyRates = computeDailyRates(
- runs,
- startDate,
- (r) => r.startedAt ?? r.createdAt ?? r.created_at,
- (r) => {
- const c = classifyEASRun(r);
- return c === 'success' || c === 'cancelled';
- },
- (r) => classifyEASRun(r) !== 'other'
- );
-
- results.push({
- source: `EAS: ${project}`,
- totalRuns: overall.total,
- successRate: overall.successRate,
- failedRuns: overall.failed,
- workflows: workflowStats,
- dailyRates,
- });
- }
- }
-
- return results;
-}
-
-function printWeekTrend(results: SectionResult[]): void {
- logger.log('─'.repeat(40));
- logger.info(`\n${chalk.bold('Week Trend')}\n`);
-
- // Merge daily rates across all sections
- const mergedDays = new Map();
-
- for (const result of results) {
- for (const day of result.dailyRates) {
- const existing = mergedDays.get(day.date) ?? { label: day.label, total: 0, successful: 0 };
- existing.total += day.total;
- existing.successful += day.successful;
- mergedDays.set(day.date, existing);
- }
- }
-
- const sortedDays = [...mergedDays.entries()].sort((a, b) => a[0].localeCompare(b[0]));
-
- const barWidth = 15;
- let prevRate: number | null = null;
- const rates: number[] = [];
-
- for (const [, day] of sortedDays) {
- const rate = day.total > 0 ? (day.successful / day.total) * 100 : -1;
-
- if (rate < 0) {
- // No data for this day
- logger.log(` ${chalk.gray(day.label)} ${chalk.gray('— no data')}`);
- continue;
- }
-
- rates.push(rate);
-
- const filled = Math.round((rate / 100) * barWidth);
- const barColor = rate >= 90 ? chalk.green : rate >= 75 ? chalk.yellow : chalk.red;
- const bar = barColor('█'.repeat(filled)) + chalk.gray('░'.repeat(barWidth - filled));
-
- let trend = ' ';
- if (prevRate !== null) {
- const diff = rate - prevRate;
- if (diff > 2) {
- trend = chalk.green('↑');
- } else if (diff < -2) {
- trend = chalk.red('↓');
- } else {
- trend = chalk.gray('→');
- }
- }
-
- const rateStr = successRateColor(rate);
-
- logger.log(
- ` ${day.label} ${bar} ${rateStr} ${trend} ${chalk.gray(`(${day.total} runs)`)}`
- );
- prevRate = rate;
- }
-
- // Overall trend line
- if (rates.length >= 2) {
- const first = rates[0];
- const last = rates[rates.length - 1];
- const diff = last - first;
- const sign = diff >= 0 ? '+' : '';
- const trendColor = diff > 2 ? chalk.green : diff < -2 ? chalk.red : chalk.gray;
- const trendLabel = diff > 2 ? 'Improving' : diff < -2 ? 'Declining' : 'Stable';
- logger.log(
- `\n ${trendColor(`${trendLabel} (${sign}${diff.toFixed(1)}% from ${sortedDays[0][1].label} to ${sortedDays[sortedDays.length - 1][1].label})`)}`
- );
- }
-
- logger.log('');
-}
-
-async function checkAuth(): Promise {
- const status: AuthStatus = { github: false, githubUser: null, eas: false, easUser: null };
-
- logger.info(chalk.gray('Checking GitHub authentication...'));
- if (process.env.GITHUB_TOKEN) {
- try {
- const user = await getAuthenticatedUserAsync();
- status.github = true;
- status.githubUser = user.login;
- } catch {
- // Token exists but is invalid
- }
- }
-
- logger.info(chalk.gray('Checking EAS authentication...'));
- try {
- const result = await spawnAsync('eas', ['whoami'], {
- env: { ...process.env, EXPO_NO_DOCTOR: 'true' },
- });
- const firstLine = result.stdout.trim().split('\n')[0].trim();
- if (firstLine) {
- status.eas = true;
- status.easUser = firstLine;
- }
- } catch {
- // Not logged in or eas not installed
- }
-
- return status;
-}
-
-function printAuthStatus(auth: AuthStatus): void {
- const gh = auth.github
- ? `${chalk.green('✓')} ${auth.githubUser}`
- : `${chalk.red('✗')} not authenticated — run ${chalk.cyan('export GITHUB_TOKEN="$(gh auth token)"')}`;
- const eas = auth.eas
- ? `${chalk.green('✓')} ${auth.easUser}`
- : `${chalk.red('✗')} not authenticated — run ${chalk.cyan('eas login')}`;
- logger.log(` GitHub: ${gh} | EAS: ${eas}\n`);
-}
-
-/**
- * Extract the most relevant error lines from a job log.
- * Looks for common error patterns and returns surrounding context.
- */
-function extractErrorSnippets(log: string, maxLines: number = 80): string[] {
- const lines = log.split('\n');
- const snippets: string[] = [];
-
- // Patterns that indicate error regions in GitHub Actions logs
- const errorPatterns = [
- /##\[error\]/i,
- /Error:/i,
- /FAIL /,
- /FAILED/,
- /error\[/i,
- /panic:/i,
- /Exception:/i,
- /AssertionError/i,
- /TypeError:/i,
- /ReferenceError:/i,
- /SyntaxError:/i,
- /Build failed/i,
- /Process completed with exit code [^0]/,
- /Command failed/i,
- /fatal:/i,
- ];
-
- // Find lines matching error patterns and grab context around them
- const errorLineIndices = new Set();
- for (let i = 0; i < lines.length; i++) {
- for (const pattern of errorPatterns) {
- if (pattern.test(lines[i])) {
- // Add surrounding context (5 lines before, 10 after)
- for (let j = Math.max(0, i - 5); j <= Math.min(lines.length - 1, i + 10); j++) {
- errorLineIndices.add(j);
- }
- break;
- }
- }
- }
-
- if (errorLineIndices.size > 0) {
- // Group consecutive lines into snippets
- const sorted = [...errorLineIndices].sort((a, b) => a - b);
- let currentSnippet: string[] = [];
- let lastIdx = -2;
-
- for (const idx of sorted) {
- if (idx !== lastIdx + 1 && currentSnippet.length > 0) {
- snippets.push(currentSnippet.join('\n'));
- currentSnippet = [];
- }
- currentSnippet.push(lines[idx]);
- lastIdx = idx;
- }
- if (currentSnippet.length > 0) {
- snippets.push(currentSnippet.join('\n'));
- }
- }
-
- // If no error patterns found, return the last N lines (tail of log)
- if (snippets.length === 0) {
- const tail = lines.slice(-maxLines).join('\n');
- if (tail.trim()) {
- snippets.push(tail);
- }
- }
-
- // Truncate total output to maxLines
- const joined = snippets.join('\n...\n');
- const joinedLines = joined.split('\n');
- if (joinedLines.length > maxLines) {
- return [joinedLines.slice(0, maxLines).join('\n') + '\n... (truncated)'];
- }
- return snippets;
-}
-
-/**
- * Strip ANSI timestamp prefixes from GitHub Actions log lines.
- * Lines typically look like: "2024-01-15T10:30:45.1234567Z actual content"
- */
-function stripLogTimestamps(log: string): string {
- return log.replace(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z /gm, '');
-}
-
-/**
- * Print log snippets in a bordered box.
- */
-function printLogSnippets(snippets: string[]): void {
- logger.log(`\n ${chalk.bold('Error output:')}`);
- logger.log(' ┌' + '─'.repeat(70));
- for (let si = 0; si < snippets.length; si++) {
- const indented = snippets[si]
- .split('\n')
- .map((line) => ` │ ${line}`)
- .join('\n');
- logger.log(indented);
- if (si < snippets.length - 1) {
- logger.log(' │ ...');
- }
- }
- logger.log(' └' + '─'.repeat(70));
-}
-
-/**
- * Print a failure pattern summary based on timestamps.
- */
-function printFailurePatternSummary(
- failedRuns: { timestamp: Date }[],
- startDate: Date,
- endDate: Date
-): void {
- if (failedRuns.length <= 1) return;
-
- logger.log('─'.repeat(40));
- logger.info(`\n${chalk.bold('Failure Pattern Summary')}\n`);
- logger.log(` ${failedRuns.length} failures in the date range.`);
-
- const midpoint = new Date((startDate.getTime() + endDate.getTime()) / 2);
- const recentCount = failedRuns.filter((r) => r.timestamp > midpoint).length;
- const earlyCount = failedRuns.length - recentCount;
-
- if (recentCount > earlyCount * 2) {
- logger.log(
- ` ${chalk.red('→')} Failures are ${chalk.red('increasing')} — most failures occurred in the second half of the period.`
- );
- } else if (earlyCount > recentCount * 2) {
- logger.log(
- ` ${chalk.green('→')} Failures are ${chalk.green('decreasing')} — most failures occurred in the first half of the period.`
- );
- } else {
- logger.log(
- ` ${chalk.yellow('→')} Failures are ${chalk.yellow('spread evenly')} across the period.`
- );
- }
- logger.log('');
-}
-
-/**
- * Inspect a GitHub Actions workflow by name.
- * Returns true if a matching workflow was found (even if no failures).
- */
-async function inspectGitHubWorkflow(
- workflowName: string,
- branch: string,
- startDate: Date,
- endDate: Date
-): Promise {
- logger.info(chalk.gray('Fetching GitHub Actions workflow runs...'));
-
- let runs;
- try {
- runs = await getWorkflowRunsForRepoAsync(branch, { startDate, endDate });
- } catch (error: any) {
- logger.error(`Failed to fetch GitHub Actions runs: ${error.message}`);
- return false;
- }
-
- // Filter to matching workflow name (case-insensitive partial match)
- const needle = workflowName.toLowerCase();
- const matchingRuns = runs.filter((r: any) => (r.name ?? '').toLowerCase().includes(needle));
-
- if (!matchingRuns.length) {
- return false;
- }
-
- // Get the actual workflow name from the first match
- const actualName = matchingRuns[0].name;
- const workflowRuns = matchingRuns.filter((r: any) => r.name === actualName);
-
- logger.info(`${chalk.gray('Source:')} GitHub Actions\n`);
-
- // Summarize all runs for this workflow
- const failed = workflowRuns.filter((r: any) => r.conclusion === 'failure');
- const succeeded = workflowRuns.filter((r: any) => r.conclusion === 'success');
- const cancelled = workflowRuns.filter((r: any) => r.conclusion === 'cancelled');
-
- logger.log(` Workflow: ${chalk.bold(actualName)}`);
- logger.log(` Total runs: ${workflowRuns.length}`);
- logger.log(
- ` ${chalk.green(`${succeeded.length} passed`)}, ${chalk.red(`${failed.length} failed`)}, ${chalk.gray(`${cancelled.length} cancelled`)}\n`
- );
-
- if (!failed.length) {
- logger.info(chalk.green('No failures found for this workflow.'));
- return true;
- }
-
- // Inspect up to 3 most recent failures
- const recentFailures = failed.slice(0, 3);
- logger.info(chalk.gray(`Inspecting ${recentFailures.length} most recent failure(s)...\n`));
-
- for (const run of recentFailures) {
- const runDate = new Date(run.created_at).toLocaleDateString('en-US', {
- weekday: 'short',
- month: 'short',
- day: 'numeric',
- hour: '2-digit',
- minute: '2-digit',
- });
-
- logger.log('─'.repeat(40));
- logger.log(`\n ${chalk.bold(`Run #${run.run_number}`)} — ${runDate}`);
- logger.log(` ${chalk.gray(run.html_url)}`);
-
- if (run.head_commit?.message) {
- const commitMsg = run.head_commit.message.split('\n')[0];
- logger.log(` Commit: ${chalk.gray(commitMsg)}`);
- }
-
- // Fetch jobs for this run
- logger.info(chalk.gray(` Fetching jobs for run #${run.run_number}...`));
-
- let jobs;
- try {
- jobs = await getJobsForWorkflowRunAsync(run.id);
- } catch (error: any) {
- logger.warn(` Failed to fetch jobs: ${error.message}`);
- continue;
- }
-
- const failedJobs = jobs.filter((j) => j.conclusion === 'failure');
- if (!failedJobs.length) {
- logger.log(` ${chalk.gray('No failed jobs found (run may have been cancelled).')}`);
- continue;
- }
-
- logger.log(` ${chalk.red(`${failedJobs.length} failed job(s)`)}:\n`);
-
- for (const job of failedJobs) {
- logger.log(` ${chalk.red('✗')} ${chalk.bold(job.name)}`);
-
- // Show failed steps
- const failedSteps = (job.steps ?? []).filter((s) => s.conclusion === 'failure');
- if (failedSteps.length) {
- for (const step of failedSteps) {
- logger.log(` Step: ${chalk.red(step.name)}`);
- }
- }
-
- // Download and extract log
- logger.info(chalk.gray(` Downloading log for "${job.name}"...`));
-
- const rawLog = await downloadJobLogsAsync(job.id);
- if (!rawLog) {
- logger.warn(` Could not download log for this job.`);
- continue;
- }
-
- const log = stripLogTimestamps(rawLog);
- const snippets = extractErrorSnippets(log);
- if (snippets.length) {
- printLogSnippets(snippets);
- }
-
- logger.log('');
- }
- }
-
- printFailurePatternSummary(
- failed.map((r: any) => ({ timestamp: new Date(r.created_at) })),
- startDate,
- endDate
- );
-
- return true;
-}
-
-/**
- * Run an EAS CLI command and return parsed JSON, or null on failure.
- */
-async function runEASCommand(
- args: string[],
- projectDir: string,
- env: Record
-): Promise {
- try {
- const result = await spawnAsync('eas', args, { cwd: projectDir, env });
- return JSON.parse(result.stdout);
- } catch {
- return null;
- }
-}
-
-/**
- * Inspect an Expo Workflow by name across discovered EAS projects.
- * Returns true if a matching workflow was found.
- *
- * Note: This relies on `fetchEASRuns` which is capped at 100 most recent runs per project.
- * If the failure you're looking for has been pushed out of that window by newer runs,
- * it won't appear here. For older failures, check the EAS dashboard directly.
- */
-async function inspectEASWorkflow(
- workflowName: string,
- startDate: Date,
- endDate: Date
-): Promise {
- logger.info(chalk.gray('Searching Expo Workflows...'));
-
- let projectDirs: string[];
- try {
- projectDirs = await findEASProjectDirs();
- } catch {
- return false;
- }
-
- if (!projectDirs.length) return false;
-
- const easEnv = {
- ...process.env,
- EXPO_NO_DOCTOR: 'true',
- EAS_BUILD_PROFILE: process.env.EAS_BUILD_PROFILE ?? 'release-client',
- };
-
- const needle = workflowName.toLowerCase();
-
- for (const projectDir of projectDirs) {
- const projectName = path.basename(projectDir);
-
- // Fetch all runs and find matching workflow name
- logger.info(chalk.gray(`Fetching runs for ${projectName}...`));
- const allRuns = await fetchEASRuns(projectDir, projectName, easEnv);
-
- // Filter to date range
- const runsInRange = allRuns.filter((r) => {
- const ts = r.startedAt ?? r.createdAt ?? r.created_at;
- if (!ts) return false;
- const d = new Date(ts);
- return d >= startDate && d <= endDate;
- });
-
- // Find matching workflow name
- const matchingRuns = runsInRange.filter((r) =>
- (r.workflowName ?? r.workflow_name ?? '').toLowerCase().includes(needle)
- );
-
- if (!matchingRuns.length) continue;
-
- // Get the actual workflow name from the first match
- const actualName = matchingRuns[0].workflowName ?? matchingRuns[0].workflow_name;
- const workflowRuns = matchingRuns.filter(
- (r) => (r.workflowName ?? r.workflow_name) === actualName
- );
-
- logger.info(`${chalk.gray('Source:')} Expo Workflows (${projectName})\n`);
-
- // Summarize
- const failed = workflowRuns.filter((r) => (r.status ?? '').toUpperCase() === 'FAILURE');
- const succeeded = workflowRuns.filter((r) => {
- const s = (r.status ?? '').toUpperCase();
- return s === 'SUCCESS' || s === 'FINISHED';
- });
- const cancelled = workflowRuns.filter((r) => (r.status ?? '').toUpperCase() === 'CANCELED');
-
- logger.log(` Workflow: ${chalk.bold(actualName)}`);
- logger.log(` Project: ${projectName}`);
- logger.log(` Total runs: ${workflowRuns.length}`);
- logger.log(
- ` ${chalk.green(`${succeeded.length} passed`)}, ${chalk.red(`${failed.length} failed`)}, ${chalk.gray(`${cancelled.length} cancelled`)}\n`
- );
-
- if (!failed.length) {
- logger.info(chalk.green('No failures found for this workflow.'));
- return true;
- }
-
- // Inspect up to 3 most recent failures
- const recentFailures = failed.slice(0, 3);
- logger.info(chalk.gray(`Inspecting ${recentFailures.length} most recent failure(s)...\n`));
-
- for (const run of recentFailures) {
- const ts = run.startedAt ?? run.createdAt;
- const runDate = ts
- ? new Date(ts).toLocaleDateString('en-US', {
- weekday: 'short',
- month: 'short',
- day: 'numeric',
- hour: '2-digit',
- minute: '2-digit',
- })
- : 'unknown date';
-
- logger.log('─'.repeat(40));
- logger.log(`\n ${chalk.bold(`Run ${run.id}`)} — ${runDate}`);
-
- if (run.gitCommitMessage) {
- const commitMsg = run.gitCommitMessage.split('\n')[0];
- logger.log(` Commit: ${chalk.gray(commitMsg)}`);
- }
-
- // Fetch run details to get jobs
- logger.info(chalk.gray(` Fetching run details...`));
- const runDetails = await runEASCommand(
- ['workflow:view', run.id, '--json', '--non-interactive'],
- projectDir,
- easEnv
- );
-
- if (!runDetails?.jobs) {
- logger.warn(` Could not fetch run details.`);
- continue;
- }
-
- const failedJobs = runDetails.jobs.filter(
- (j: any) => (j.status ?? '').toUpperCase() === 'FAILURE'
- );
-
- if (!failedJobs.length) {
- logger.log(` ${chalk.gray('No failed jobs found.')}`);
- continue;
- }
-
- if (runDetails.logURL) {
- logger.log(` ${chalk.gray(runDetails.logURL)}`);
- }
-
- logger.log(` ${chalk.red(`${failedJobs.length} failed job(s)`)}:\n`);
-
- for (const job of failedJobs) {
- const jobName = job.name ?? job.key ?? 'unknown';
- logger.log(` ${chalk.red('✗')} ${chalk.bold(jobName)}`);
-
- // Download logs for this job
- logger.info(chalk.gray(` Downloading log for "${jobName}"...`));
-
- let rawLog: string | null = null;
- try {
- const result = await spawnAsync(
- 'eas',
- ['workflow:logs', job.id, '--all-steps', '--non-interactive'],
- { cwd: projectDir, env: easEnv }
- );
- rawLog = result.stdout;
- } catch {
- logger.warn(` Could not download log for this job.`);
- continue;
- }
-
- if (!rawLog?.trim()) {
- logger.warn(` Log is empty.`);
- continue;
- }
-
- const log = stripLogTimestamps(rawLog);
- const snippets = extractErrorSnippets(log);
- if (snippets.length) {
- printLogSnippets(snippets);
- }
-
- logger.log('');
- }
- }
-
- printFailurePatternSummary(
- failed.map((r: any) => ({ timestamp: new Date(r.startedAt ?? r.createdAt) })),
- startDate,
- endDate
- );
-
- return true;
- }
-
- return false;
-}
-
-/**
- * Inspect a workflow by name — tries GitHub Actions first, then Expo Workflows.
- */
-async function inspectWorkflow(
- workflowName: string,
- branch: string,
- startDate: Date,
- endDate: Date,
- auth: AuthStatus
-): Promise {
- logger.log(chalk.bold(`\nInspecting workflow: ${chalk.cyan(workflowName)}\n`) + '─'.repeat(40));
-
- // Try GitHub Actions first
- if (auth.github) {
- const found = await inspectGitHubWorkflow(workflowName, branch, startDate, endDate);
- if (found) return;
- }
-
- // Fall back to Expo Workflows
- if (auth.eas) {
- const found = await inspectEASWorkflow(workflowName, startDate, endDate);
- if (found) return;
- }
-
- // No match found anywhere — list available workflows
- logger.warn(`No workflows found matching "${workflowName}".`);
-
- if (auth.github) {
- logger.log('\nAvailable GitHub Actions workflows:');
- try {
- const runs = await getWorkflowRunsForRepoAsync(branch, { startDate, endDate });
- const names = [...new Set(runs.map((r: any) => r.name).filter(Boolean))].sort();
- for (const name of names) {
- logger.log(` - ${name}`);
- }
- } catch {
- // Already fetched above, skip
- }
- }
-
- if (auth.eas) {
- logger.log('\nAvailable Expo Workflows:');
- try {
- const projectDirs = await findEASProjectDirs();
- const easEnv = {
- ...process.env,
- EXPO_NO_DOCTOR: 'true',
- EAS_BUILD_PROFILE: process.env.EAS_BUILD_PROFILE ?? 'release-client',
- };
- for (const projectDir of projectDirs) {
- const projectName = path.basename(projectDir);
- const allRuns = await fetchEASRuns(projectDir, projectName, easEnv);
- const names = [
- ...new Set(allRuns.map((r: any) => r.workflowName ?? r.workflow_name).filter(Boolean)),
- ].sort();
- if (names.length) {
- logger.log(` ${chalk.gray(`[${projectName}]`)}`);
- for (const name of names) {
- logger.log(` - ${name}`);
- }
- }
- }
- } catch {
- // Skip
- }
- }
-}
-
-async function action(options: ActionOptions) {
- const branch = options.branch;
- const [startDate, endDate, weekNum] = parseDateRange(options.week);
-
- const startStr = startDate.toISOString().split('T')[0];
- const endStr = endDate.toISOString().split('T')[0];
-
- logger.log(chalk.bold(`\nCI Status Overview — ${branch} — Week ${weekNum}\n`) + '─'.repeat(40));
- logger.info(`Date range: ${chalk.cyan(startStr)} to ${chalk.cyan(endStr)}\n`);
-
- const auth = await checkAuth();
- printAuthStatus(auth);
-
- if (!auth.github && !auth.eas) {
- logger.error('No services authenticated. Please log in to at least one service above.');
- return;
- }
-
- // --inspect mode: deep-dive into a specific workflow's failures
- if (options.inspect) {
- await inspectWorkflow(options.inspect, branch, startDate, endDate, auth);
- logger.info(chalk.green('Done\n'));
- return;
- }
-
- const allResults: SectionResult[] = [];
-
- if (auth.github) {
- const result = await printGitHubActionsStatus(branch, startDate, endDate);
- if (result) allResults.push(result);
- }
-
- if (auth.eas) {
- const results = await printExpoWorkflowsStatus(startDate, endDate);
- allResults.push(...results);
- }
-
- if (allResults.length > 0) {
- printSummary(allResults);
- printWeekTrend(allResults);
- }
-
- logger.info(chalk.green('Done\n'));
-}
-
-function printSummary(results: SectionResult[]): void {
- logger.log('─'.repeat(40));
- logger.info(`\n${chalk.bold('Summary')}\n`);
-
- // Overall health per section
- for (const result of results) {
- const healthIcon =
- result.successRate >= 90
- ? chalk.green('healthy')
- : result.successRate >= 75
- ? chalk.yellow('needs attention')
- : chalk.red('needs immediate attention');
- logger.log(
- ` ${chalk.bold(result.source)}: ${result.successRate.toFixed(1)}% success rate across ${result.totalRuns} runs — ${healthIcon}`
- );
- }
-
- // Combined stats using the same success rate logic (cancelled counted as success)
- const allWorkflows = results.flatMap((r) => r.workflows);
- const totalRuns = allWorkflows.reduce((s, w) => s + w.total, 0);
- const totalFailed = allWorkflows.reduce((s, w) => s + w.failed, 0);
- const totalSuccess = allWorkflows.reduce((s, w) => s + w.success, 0);
- const totalCancelled = allWorkflows.reduce((s, w) => s + w.cancelled, 0);
- const effectiveSuccess = totalSuccess + totalCancelled;
- const overallRate = totalRuns > 0 ? (effectiveSuccess / totalRuns) * 100 : 100;
-
- logger.log(
- `\n ${chalk.bold('Overall')}: ${totalRuns} total runs, ${chalk.red(`${totalFailed} failures`)}, ${successRateColor(overallRate)} success rate\n`
- );
-
- // Identify workflows that need attention (success rate < 75% with at least 2 runs)
- // Exclude workflows with only "other" runs (no success or failure conclusions yet)
- const troubleWorkflows = allWorkflows
- .filter((w) => w.successRate < 75 && w.total >= 2 && w.success + w.failed + w.cancelled > 0)
- .sort((a, b) => a.successRate - b.successRate);
-
- if (troubleWorkflows.length > 0) {
- logger.info(` ${chalk.bold('Workflows needing attention:')}\n`);
- for (const w of troubleWorkflows) {
- logger.log(
- ` ${chalk.red('→')} ${w.name}: ${chalk.red(`${w.failed}`)} failed out of ${w.total} runs (${successRateColor(w.successRate)} success rate)`
- );
- }
- logger.log('');
- }
-
- // Identify consistently failing workflows (0% success rate, with actual failures)
- const alwaysFailing = allWorkflows.filter((w) => w.successRate === 0 && w.failed >= 2);
- if (alwaysFailing.length > 0) {
- logger.warn(
- ` ${alwaysFailing.length} workflow(s) with 0% success rate — these may be broken and need investigation:`
- );
- for (const w of alwaysFailing) {
- logger.log(` ${chalk.red('✗')} ${w.name} (${w.total} runs, ${w.failed} failed)`);
- }
- logger.log('');
- }
-
- // Highlight workflows with high volume + moderate failure rate
- const highVolumeIssues = allWorkflows
- .filter((w) => w.total >= 10 && w.successRate < 90 && w.failed > 0)
- .sort((a, b) => b.failed - a.failed);
-
- if (highVolumeIssues.length > 0) {
- logger.info(` ${chalk.bold('High-volume workflows with elevated failure rates:')}\n`);
- for (const w of highVolumeIssues) {
- logger.log(
- ` ${chalk.yellow('!')} ${w.name}: ${w.total} runs, ${chalk.red(`${w.failed}`)} failures (${successRateColor(w.successRate)})`
- );
- }
- logger.log('');
- }
-
- // Final recommendation based on the consistent success rate calculation
- if (overallRate >= 90) {
- logger.info(chalk.green(' CI is healthy — no immediate action required.\n'));
- } else if (overallRate >= 75) {
- logger.info(chalk.yellow(' CI needs attention — review the flagged workflows above.\n'));
- } else {
- logger.info(
- chalk.red(
- ' CI needs immediate attention — significant failure rates detected. Prioritize investigating the workflows listed above.\n'
- )
- );
- }
-}
-
-export default (program: Command) => {
- program
- .command('ci-status')
- .alias('ci', 'cis')
- .description(
- 'Shows an overview of CI status for GitHub Actions and Expo Workflows on a branch. ' +
- 'Displays success rates, failure breakdowns, and weekly trends. ' +
- 'Use --inspect to deep-dive into a specific GitHub Actions workflow — ' +
- 'downloads failed job logs and extracts error output for analysis (works great with Claude Code).'
- )
- .option('-b, --branch ', 'Branch to check', 'main')
- .option(
- '-w, --week ',
- 'ISO week number (1-53), or "last"/"prev" for previous week. Defaults to current week.'
- )
- .option(
- '-i, --inspect ',
- 'Deep-inspect a workflow by name (partial match). ' +
- 'Searches GitHub Actions first, then Expo Workflows. ' +
- 'Fetches the 3 most recent failed runs, downloads job logs, and extracts error snippets. ' +
- 'Example: --inspect "iOS Unit Tests"'
- )
- .asyncAction(action);
-};