From bce8fc0b92582a9b370f4b03d8381c9c729ab1a7 Mon Sep 17 00:00:00 2001 From: jordanarldt Date: Fri, 20 Mar 2026 16:59:24 -0500 Subject: [PATCH] CATALYST-1718: feat(cli) - Add catalyst logs command --- packages/catalyst/AGENTS.md | 96 +++ packages/catalyst/README.md | 34 +- .../catalyst/src/cli/commands/logs.spec.ts | 548 ++++++++++++++++++ packages/catalyst/src/cli/commands/logs.ts | 279 +++++++++ packages/catalyst/src/cli/index.spec.ts | 7 + .../catalyst/src/cli/lib/shared-options.ts | 39 ++ packages/catalyst/src/cli/program.ts | 2 + packages/catalyst/tests/mocks/handlers.ts | 19 + 8 files changed, 1023 insertions(+), 1 deletion(-) create mode 100644 packages/catalyst/AGENTS.md create mode 100644 packages/catalyst/src/cli/commands/logs.spec.ts create mode 100644 packages/catalyst/src/cli/commands/logs.ts create mode 100644 packages/catalyst/src/cli/lib/shared-options.ts diff --git a/packages/catalyst/AGENTS.md b/packages/catalyst/AGENTS.md new file mode 100644 index 0000000000..1b968d9392 --- /dev/null +++ b/packages/catalyst/AGENTS.md @@ -0,0 +1,96 @@ +# Catalyst CLI (`@bigcommerce/catalyst`) + +CLI tool for Catalyst development and deployment — handles build, dev server, and deployment to Cloudflare Workers. + +## Directory Structure + +``` +src/cli/ +├── index.ts # Entry point (#!/usr/bin/env node) +├── program.ts # Commander program setup, registers all commands +├── commands/ # CLI command implementations (auth, build, deploy, logs, project, start, telemetry, version) +├── hooks/ # Pre/post action hooks (telemetry) +└── lib/ # Utilities (auth, logger, project config, credentials, wrangler config, telemetry, deployment errors) +templates/ # OpenNext config and public_headers template +tests/mocks/ # MSW handlers and test mocks +dist/cli.js # Bundled output (single ESM file) +``` + +## CLI Commands + +| Command | Description | +|---------|-------------| +| `auth whoami/login/logout` | Manage authentication (device code OAuth flow, credential storage) | +| `build` | Build Catalyst project using OpenNext/Cloudflare adapter | +| `deploy` | Deploy to Cloudflare with bundle upload | +| `logs` | View logs (`tail` default, `query` planned). Supports `--format` (default/json/pretty/short/request) | +| `project create/list/link` | Manage BigCommerce infrastructure projects | +| `start` | Start local preview using OpenNext Cloudflare adapter | +| `telemetry` | Enable/disable/check telemetry | +| `version` | Display version and platform info | + +## Development + +```bash +pnpm dev # Watch mode (rebuilds dist/cli.js on changes) +pnpm build # Production build via tsup +pnpm test # Run tests (vitest) +pnpm test:watch # Watch mode tests +pnpm typecheck # tsc --noEmit +pnpm lint # eslint with 0 warnings threshold +``` + +## Testing Changes + +To test CLI changes directly without a full publish cycle, build the CLI package first, then run the compiled output from inside the `core/` directory using its absolute path: + +```bash +# From packages/catalyst — rebuild the CLI +pnpm build + +# From core/ — run the CLI using the absolute path to the built output +pnpm exec /packages/catalyst/dist/cli.js +``` + +For example: `pnpm exec /packages/catalyst/dist/cli.js project list`. + +## Build + +- **Bundler**: tsup (`tsup.config.ts`) +- **Entry**: `src/cli/index.ts` → `dist/cli.js` (single ESM bundle with source maps) +- **Environment variables** injected at build time: `CLI_SEGMENT_WRITE_KEY`, `CONSOLA_LEVEL` + +## Testing + +- **Framework**: Vitest (`vitest.config.ts`) +- **Coverage threshold**: 100% (strict) +- **Mocking**: MSW for BigCommerce API calls; telemetry always disabled in tests (`vitest.setup.ts`) +- **Test files**: co-located as `*.spec.ts` next to source files + +## Key Dependencies + +- `commander` — CLI framework +- `execa` — process execution (next, pnpm, wrangler) +- `consola` — logging +- `zod` — API response validation +- `conf` — persistent config (`.bigcommerce/project.json`) +- `@segment/analytics-node` — telemetry +- `adm-zip` — bundle zipping for deploy + +## After Making Changes + +Always run `pnpm typecheck` and `pnpm lint` after making code changes, and fix any errors or warnings before considering the work done. The lint config enforces zero warnings (`--max-warnings 0`). Use `pnpm lint --fix` to auto-fix formatting and fixable lint issues before manually editing. + +## Code Style Notes + +- **No `let` when avoidable** — prefer `const` with `while (true)` + `break` over mutable flags. +- **No iterators/generators** — eslint `no-restricted-syntax` disallows `for...of` and generator functions. Use `.forEach()`, `.map()`, etc. +- **Consola for logging** — use `consola` (from `../lib/logger`) instead of `console`. Use `colorize` from `consola/utils` for colored output. +- **Shared CLI options** — commands that need `--store-hash`, `--access-token`, `--api-host`, and `--project-uuid` should use the shared option factories from `lib/shared-options.ts`. Chain `.makeOptionMandatory()` inline where needed to preserve commander's extra-typings inference. +- **SSE stream pattern** — the fetch API's `ReadableStreamDefaultReader` doesn't support async iteration, so `while (true)` + `reader.read()` + `break` is the standard pattern. For long-lived streams, implement a TTL-based reconnect to free connection pool resources, and distinguish server-side disconnects (`TypeError: terminated`) from actual failures for retry logic. + +## Integration Points + +- **BigCommerce Infrastructure API**: `https://api.bigcommerce.com/stores/{storeHash}/v3/infrastructure/` +- **Next.js**: dev/build/start +- **OpenNext + Cloudflare**: serverless build and deploy via Wrangler diff --git a/packages/catalyst/README.md b/packages/catalyst/README.md index ed8f949d92..76eadf49fd 100644 --- a/packages/catalyst/README.md +++ b/packages/catalyst/README.md @@ -1,3 +1,35 @@ # @bigcommerce/catalyst -CLI +CLI tool for Catalyst development and deployment. + +## Developing the CLI + +You'll need two terminal windows: + +### Terminal 1 — Watch mode (rebuilds on changes) + +```bash +cd packages/catalyst +pnpm dev +``` + +This runs `tsup --watch` and rebuilds `dist/cli.js` on every source change. + +### Terminal 2 — Run the CLI + +From the `core/` directory, run the CLI using the absolute path to the built executable: + +```bash +cd core +pnpm exec /packages/catalyst/dist/cli.js +``` + +For example: + +```bash +pnpm exec /packages/catalyst/dist/cli.js project list +pnpm exec /packages/catalyst/dist/cli.js logs tail +pnpm exec /packages/catalyst/dist/cli.js deploy +``` + +Replace `` with the absolute path to your local clone of the `catalyst` repository. diff --git a/packages/catalyst/src/cli/commands/logs.spec.ts b/packages/catalyst/src/cli/commands/logs.spec.ts new file mode 100644 index 0000000000..cf695a303b --- /dev/null +++ b/packages/catalyst/src/cli/commands/logs.spec.ts @@ -0,0 +1,548 @@ +import { Command } from 'commander'; +import { http, HttpResponse } from 'msw'; +import { afterEach, beforeAll, describe, expect, MockInstance, test, vi } from 'vitest'; + +import { server } from '../../../tests/mocks/node'; +import { consola } from '../lib/logger'; +import { program } from '../program'; + +import { logs, parseSSEEvent, tailLogs } from './logs'; + +let exitMock: MockInstance; +let stdoutWriteMock: MockInstance; + +const projectUuid = '6b202364-10f3-11f1-8bc7-fe9b9d8b14ab'; +const storeHash = 'test-store'; +const accessToken = 'test-token'; +const apiHost = 'api.bigcommerce.com'; + +const encoder = new TextEncoder(); + +const validLogEvent = { + uuid: '0f258256-0a83-4704-a456-03e99b4445c2', + project_uuid: projectUuid, + request: { method: 'GET', url: 'https://example.com/test', status_code: 200 }, + logs: [{ timestamp: '2026-03-11T22:05:28.870Z', level: 'info', messages: ['hello world'] }], + exceptions: [], + timestamp: '2026-03-11T22:05:28.870Z', +}; + +const createSSEStream = (events: string[], closeDelay = 10) => + new ReadableStream({ + start(controller) { + events.forEach((event) => { + controller.enqueue(encoder.encode(event)); + }); + setTimeout(() => controller.close(), closeDelay); + }, + }); + +// Creates a handler that serves SSE events on the first request, +// then returns 404 on subsequent requests to break the reconnect loop. +const createOneShotLogHandler = (events: string[], closeDelay = 10) => { + let called = false; + + return http.get( + 'https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', + () => { + if (called) { + return new HttpResponse(null, { status: 404, statusText: 'Not Found' }); + } + + called = true; + + return new HttpResponse(createSSEStream(events, closeDelay), { + status: 200, + headers: { 'Content-Type': 'text/event-stream' }, + }); + }, + ); +}; + +const callTailLogs = async (format: Parameters[4], events?: string[]) => { + const sseEvents = events ?? [`data: ${JSON.stringify(validLogEvent)}\n\n`]; + + server.use(createOneShotLogHandler(sseEvents)); + + await tailLogs(projectUuid, storeHash, accessToken, apiHost, format).catch(() => { + // Expected: tailLogs throws when the one-shot handler returns 404 on reconnect + }); +}; + +beforeAll(() => { + consola.mockTypes(() => vi.fn()); + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + exitMock = vi.spyOn(process, 'exit').mockImplementation(() => null as never); + stdoutWriteMock = vi.spyOn(process.stdout, 'write').mockImplementation(() => true); +}); + +afterEach(() => { + vi.clearAllMocks(); +}); + +describe('command configuration', () => { + test('logs is a properly configured Command with tail and query subcommands', () => { + expect(logs).toBeInstanceOf(Command); + expect(logs.name()).toBe('logs'); + expect(logs.description()).toBe('View logs from your deployed application.'); + + const subcommands = logs.commands.map((c) => c.name()); + + expect(subcommands).toContain('tail'); + expect(subcommands).toContain('query'); + }); + + test('tail subcommand has correct options', () => { + const tail = logs.commands.find((c) => c.name() === 'tail'); + + expect(tail).toBeDefined(); + expect(tail?.options).toEqual( + expect.arrayContaining([ + expect.objectContaining({ flags: '--store-hash ' }), + expect.objectContaining({ flags: '--access-token ' }), + expect.objectContaining({ + flags: '--api-host ', + defaultValue: 'api.bigcommerce.com', + }), + expect.objectContaining({ flags: '--project-uuid ' }), + expect.objectContaining({ flags: '--format ', defaultValue: 'default' }), + ]), + ); + }); + + test('query subcommand has correct options', () => { + const query = logs.commands.find((c) => c.name() === 'query'); + + expect(query).toBeDefined(); + expect(query?.options).toEqual( + expect.arrayContaining([ + expect.objectContaining({ flags: '--store-hash ' }), + expect.objectContaining({ flags: '--access-token ' }), + expect.objectContaining({ flags: '--api-host ' }), + expect.objectContaining({ flags: '--project-uuid ' }), + ]), + ); + }); +}); + +describe('parseSSEEvent', () => { + test('extracts data from a single data line', () => { + expect(parseSSEEvent('data: {"foo":"bar"}')).toBe('{"foo":"bar"}'); + }); + + test('joins multiple data lines with newlines', () => { + expect(parseSSEEvent('data: line1\ndata: line2')).toBe('line1\nline2'); + }); + + test('ignores non-data SSE fields', () => { + expect(parseSSEEvent('event: message\ndata: {"foo":"bar"}\nid: 123')).toBe('{"foo":"bar"}'); + }); + + test('ignores SSE comments', () => { + expect(parseSSEEvent(': this is a comment\ndata: {"foo":"bar"}')).toBe('{"foo":"bar"}'); + }); + + test('returns null for events with no data lines', () => { + expect(parseSSEEvent('event: ping')).toBeNull(); + expect(parseSSEEvent(': comment only')).toBeNull(); + expect(parseSSEEvent('')).toBeNull(); + }); + + test('returns null for heartbeat events with empty data', () => { + expect(parseSSEEvent('data: ')).toBeNull(); + expect(parseSSEEvent('data:')).toBeNull(); + }); +}); + +describe('format: default', () => { + test('logs timestamp, level, and message', async () => { + await callTailLogs('default'); + + expect(consola.info).toHaveBeenCalledWith('Tailing logs...'); + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('[2026-03-11T22:05:28.870Z]')); + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('hello world')); + }); +}); + +describe('format: json', () => { + test('writes raw JSON to stdout', async () => { + await callTailLogs('json'); + + expect(stdoutWriteMock).toHaveBeenCalledWith( + expect.stringContaining('"uuid":"0f258256-0a83-4704-a456-03e99b4445c2"'), + ); + }); +}); + +describe('format: pretty', () => { + test('logs pretty-printed JSON', async () => { + await callTailLogs('pretty'); + + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('"uuid"')); + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('"hello world"')); + }); + + test('preserves unknown fields via loose schema', async () => { + const eventWithExtra = { ...validLogEvent, extra_field: 'should be preserved' }; + + await callTailLogs('pretty', [`data: ${JSON.stringify(eventWithExtra)}\n\n`]); + + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('should be preserved')); + }); +}); + +describe('format: short', () => { + test('logs only the message', async () => { + await callTailLogs('short'); + + expect(consola.log).toHaveBeenCalledWith('hello world'); + }); +}); + +describe('format: request', () => { + test('logs timestamp, level, request info, and message', async () => { + await callTailLogs('request'); + + expect(consola.log).toHaveBeenCalledWith( + expect.stringContaining('GET https://example.com/test'), + ); + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('(200)')); + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('hello world')); + }); +}); + +describe('log event processing', () => { + test.each([ + ['info', 'INFO'], + ['warn', 'WARN'], + ['error', 'ERROR'], + ['debug', 'DEBUG'], + ])('formats %s level as %s', async (level, expected) => { + const event = { ...validLogEvent, logs: [{ ...validLogEvent.logs[0], level }] }; + + await callTailLogs('default', [`data: ${JSON.stringify(event)}\n\n`]); + + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining(expected)); + }); + + test('logs exceptions from the event', async () => { + const event = { + ...validLogEvent, + exceptions: [{ message: 'something broke', stack: 'Error: something broke' }], + }; + + await callTailLogs('default', [`data: ${JSON.stringify(event)}\n\n`]); + + expect(consola.error).toHaveBeenCalledWith( + expect.stringContaining('EXCEPTION'), + expect.objectContaining({ message: 'something broke' }), + ); + }); + + test('serializes non-string messages as JSON', async () => { + const event = { + ...validLogEvent, + logs: [{ ...validLogEvent.logs[0], messages: [{ nested: 'object' }, 42] }], + }; + + await callTailLogs('default', [`data: ${JSON.stringify(event)}\n\n`]); + + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('{"nested":"object"}')); + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('42')); + }); + + test('multiple events in a single chunk are all processed', async () => { + const event1 = { + ...validLogEvent, + logs: [{ ...validLogEvent.logs[0], messages: ['first'] }], + }; + const event2 = { + ...validLogEvent, + logs: [{ ...validLogEvent.logs[0], messages: ['second'] }], + }; + + await callTailLogs('default', [ + `data: ${JSON.stringify(event1)}\n\ndata: ${JSON.stringify(event2)}\n\n`, + ]); + + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('first')); + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('second')); + }); +}); + +describe('error handling', () => { + test('silently ignores heartbeat events', async () => { + await callTailLogs('default', [`data: \n\ndata: ${JSON.stringify(validLogEvent)}\n\n`]); + + expect(consola.warn).not.toHaveBeenCalled(); + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('hello world')); + }); + + test('warns on invalid JSON in stream', async () => { + await callTailLogs('default', [`data: {not valid json}\n\n`]); + + expect(consola.warn).toHaveBeenCalledWith(expect.stringContaining('Failed to parse log event')); + }); + + test('warns on valid JSON that does not match schema', async () => { + await callTailLogs('default', [`data: {"valid":"json","but":"wrong schema"}\n\n`]); + + expect(consola.warn).toHaveBeenCalledWith(expect.stringContaining('Failed to parse log event')); + }); + + test('throws on fatal 4xx status codes', async () => { + server.use( + http.get( + 'https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', + () => new HttpResponse(null, { status: 404, statusText: 'Not Found' }), + ), + ); + + await expect(tailLogs(projectUuid, storeHash, accessToken, apiHost, 'default')).rejects.toThrow( + 'Failed to open log stream: 404 Not Found', + ); + }); + + test('throws on fatal 401 unauthorized', async () => { + server.use( + http.get( + 'https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', + () => new HttpResponse(null, { status: 401, statusText: 'Unauthorized' }), + ), + ); + + await expect(tailLogs(projectUuid, storeHash, accessToken, apiHost, 'default')).rejects.toThrow( + 'Failed to open log stream: 401 Unauthorized', + ); + }); +}); + +describe('retry and reconnect', () => { + test('retries on 5xx errors and throws after max retries', async () => { + server.use( + http.get( + 'https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', + () => new HttpResponse(null, { status: 502, statusText: 'Bad Gateway' }), + ), + ); + + await expect(tailLogs(projectUuid, storeHash, accessToken, apiHost, 'default')).rejects.toThrow( + 'Failed to connect to log stream after 5 retries.', + ); + + // 4 warnings logged for attempts 1-4, then throw on attempt 5 + expect(consola.warn).toHaveBeenCalledTimes(4); + }); + + test('logs retry attempt number in warning messages', async () => { + server.use( + http.get( + 'https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', + () => new HttpResponse(null, { status: 503, statusText: 'Service Unavailable' }), + ), + ); + + await expect( + tailLogs(projectUuid, storeHash, accessToken, apiHost, 'default'), + ).rejects.toThrow(); + + expect(consola.warn).toHaveBeenCalledWith( + expect.stringContaining('attempt 1/5'), + expect.anything(), + ); + expect(consola.warn).toHaveBeenCalledWith( + expect.stringContaining('attempt 4/5'), + expect.anything(), + ); + }); + + test('resets retry counter after a successful connection', async () => { + let requestCount = 0; + + server.use( + http.get( + 'https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', + () => { + requestCount += 1; + + // First 3 requests: 502 errors (builds up retries to 3) + if (requestCount <= 3) { + return new HttpResponse(null, { status: 502, statusText: 'Bad Gateway' }); + } + + // 4th request: successful stream (resets retries to 0) + if (requestCount === 4) { + return new HttpResponse( + createSSEStream([`data: ${JSON.stringify(validLogEvent)}\n\n`]), + { + status: 200, + headers: { 'Content-Type': 'text/event-stream' }, + }, + ); + } + + // 5th+ requests: 502 again until retries are exhausted a second time + return new HttpResponse(null, { status: 502, statusText: 'Bad Gateway' }); + }, + ), + ); + + await expect(tailLogs(projectUuid, storeHash, accessToken, apiHost, 'default')).rejects.toThrow( + 'Failed to connect to log stream after 5 retries.', + ); + + // 3 warnings before success + 4 warnings after success (throw on 5th retry) + expect(consola.warn).toHaveBeenCalledTimes(7); + }); + + test('server disconnect does not increment retry counter', async () => { + let requestCount = 0; + + server.use( + http.get( + 'https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', + () => { + requestCount += 1; + + // Return a stream that immediately errors to simulate server disconnect + if (requestCount <= 3) { + const stream = new ReadableStream({ + start(controller) { + controller.error(new TypeError('terminated')); + }, + }); + + return new HttpResponse(stream, { + status: 200, + headers: { 'Content-Type': 'text/event-stream' }, + }); + } + + // After 3 server disconnects, return 404 to break the loop + return new HttpResponse(null, { status: 404, statusText: 'Not Found' }); + }, + ), + ); + + await expect(tailLogs(projectUuid, storeHash, accessToken, apiHost, 'default')).rejects.toThrow( + 'Failed to open log stream: 404 Not Found', + ); + + // Server disconnect warnings should NOT contain "attempt X/5" + expect(consola.warn).toHaveBeenCalledTimes(3); + expect(consola.warn).toHaveBeenCalledWith('Log stream closed by server, reconnecting...'); + }); + + test('reconnects when connection TTL is reached', { timeout: 3000 }, async () => { + let requestCount = 0; + const ttlMs = 200; + + // Creates a stream that sends data every 30ms via setInterval and never closes. + // The cancel callback cleans up the interval so reader.cancel() resolves. + const createOpenEndedStream = () => { + let intervalId: ReturnType; + + return new ReadableStream({ + start(controller) { + controller.enqueue(encoder.encode(`data: ${JSON.stringify(validLogEvent)}\n\n`)); + intervalId = setInterval(() => { + controller.enqueue(encoder.encode(`data: ${JSON.stringify(validLogEvent)}\n\n`)); + }, 30); + }, + cancel() { + clearInterval(intervalId); + }, + }); + }; + + server.use( + http.get( + 'https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', + () => { + requestCount += 1; + + if (requestCount <= 2) { + return new HttpResponse(createOpenEndedStream(), { + status: 200, + headers: { 'Content-Type': 'text/event-stream' }, + }); + } + + // 3rd request: return 404 to break the reconnect loop + return new HttpResponse(null, { status: 404, statusText: 'Not Found' }); + }, + ), + ); + + await expect( + tailLogs(projectUuid, storeHash, accessToken, apiHost, 'default', ttlMs), + ).rejects.toThrow('Failed to open log stream: 404 Not Found'); + + // Should have connected 3 times: 2 TTL-triggered reconnects + final 404 + expect(requestCount).toBe(3); + + // Events from both successful connections should have been processed + expect(consola.log).toHaveBeenCalledWith(expect.stringContaining('hello world')); + }); +}); + +describe('query subcommand', () => { + test('exits with error as not yet implemented', async () => { + await program.parseAsync([ + 'node', + 'catalyst', + 'logs', + 'query', + '--store-hash', + storeHash, + '--access-token', + accessToken, + ]); + + expect(consola.error).toHaveBeenCalledWith('The query command is not yet implemented.'); + expect(exitMock).toHaveBeenCalledWith(1); + }); +}); + +describe('program integration', () => { + test('logs tail is the default subcommand', async () => { + server.use(createOneShotLogHandler([`data: ${JSON.stringify(validLogEvent)}\n\n`])); + + await program.parseAsync([ + 'node', + 'catalyst', + 'logs', + '--store-hash', + storeHash, + '--access-token', + accessToken, + '--project-uuid', + projectUuid, + ]); + + expect(consola.info).toHaveBeenCalledWith('Tailing logs...'); + expect(exitMock).toHaveBeenCalledWith(1); + }); + + test('logs tail with --format json', async () => { + server.use(createOneShotLogHandler([`data: ${JSON.stringify(validLogEvent)}\n\n`])); + + await program.parseAsync([ + 'node', + 'catalyst', + 'logs', + 'tail', + '--store-hash', + storeHash, + '--access-token', + accessToken, + '--project-uuid', + projectUuid, + '--format', + 'json', + ]); + + expect(consola.info).toHaveBeenCalledWith('Tailing logs...'); + expect(stdoutWriteMock).toHaveBeenCalled(); + }); +}); diff --git a/packages/catalyst/src/cli/commands/logs.ts b/packages/catalyst/src/cli/commands/logs.ts new file mode 100644 index 0000000000..bf2ad2371d --- /dev/null +++ b/packages/catalyst/src/cli/commands/logs.ts @@ -0,0 +1,279 @@ +import { Command, Option } from 'commander'; +import { colorize } from 'consola/utils'; +import { z } from 'zod'; + +import { consola } from '../lib/logger'; +import { + accessTokenOption, + apiHostOption, + projectUuidOption, + resolveProjectUuid, + storeHashOption, +} from '../lib/shared-options'; +import { Telemetry } from '../lib/telemetry'; + +type LogFormat = 'json' | 'pretty' | 'default' | 'short' | 'request'; + +const telemetry = new Telemetry(); + +const DEFAULT_CONNECTION_TTL_MS = 1 * 60 * 1000; // 1 minute +const MAX_RETRIES = 5; + +const isFatalStatusCode = (status: number) => status >= 400 && status < 500; + +const LEVEL_COLORS: Record[0]> = { + INFO: 'green', + WARN: 'yellow', + ERROR: 'red', + DEBUG: 'gray', +}; + +const LogEventSchema = z + .object({ + uuid: z.string(), + project_uuid: z.string(), + request: z.object({ + method: z.string(), + url: z.string(), + status_code: z.number(), + }), + logs: z.array( + z.object({ + timestamp: z.string(), + level: z.string(), + messages: z.array(z.unknown()), + }), + ), + exceptions: z.array(z.unknown()), + timestamp: z.string(), + }) + .loose(); + +class StreamError extends Error { + fatal: boolean; + + constructor(message: string, fatal: boolean) { + super(message); + this.fatal = fatal; + } +} + +const formatMessages = (messages: unknown[]) => + messages.map((m) => (typeof m === 'string' ? m : JSON.stringify(m))).join(' '); + +const formatLogEvent = ( + event: z.infer, + format: 'default' | 'short' | 'request', +) => { + const { request, logs: logEntries, exceptions } = event; + + logEntries.forEach((entry) => { + const msg = formatMessages(entry.messages); + const level = entry.level.toUpperCase(); + const coloredLevel = colorize(LEVEL_COLORS[level] ?? 'white', level); + + switch (format) { + case 'short': + consola.log(msg); + break; + + case 'request': + consola.log( + `[${entry.timestamp}] [${coloredLevel}] ${request.method} ${request.url}` + + ` (${request.status_code}) ${msg}`, + ); + break; + + default: + consola.log(`[${entry.timestamp}] [${coloredLevel}] ${msg}`); + break; + } + }); + + exceptions.forEach((exception) => { + consola.error(`[${event.timestamp}] EXCEPTION`, exception); + }); +}; + +export const parseSSEEvent = (raw: string): string | null => { + const joined = raw + .split('\n') + .flatMap((line) => (line.startsWith('data:') ? [line.slice(5).trim()] : [])) + .join('\n'); + + return joined.length > 0 ? joined : null; +}; + +const processLogEvent = (event: string, format: LogFormat) => { + if (format === 'json') { + process.stdout.write(`${event}\n`); + + return; + } + + try { + const parsed: unknown = JSON.parse(event); + const logEvent = LogEventSchema.parse(parsed); + + if (format === 'pretty') { + consola.log(JSON.stringify(logEvent, null, 2)); + } else { + formatLogEvent(logEvent, format); + } + } catch { + consola.warn(`Failed to parse log event: ${event}`); + } +}; + +const openLogStream = async ( + projectUuid: string, + storeHash: string, + accessToken: string, + apiHost: string, +) => { + const response = await fetch( + `https://${apiHost}/stores/${storeHash}/v3/infrastructure/logs/${projectUuid}/tail`, + { + method: 'GET', + headers: { + 'X-Auth-Token': accessToken, + Accept: 'text/event-stream', + Connection: 'keep-alive', + }, + }, + ); + + if (!response.ok) { + throw new StreamError( + `Failed to open log stream: ${response.status} ${response.statusText}`, + isFatalStatusCode(response.status), + ); + } + + const reader = response.body?.getReader(); + + if (!reader) { + throw new StreamError('Failed to read log stream.', true); + } + + return reader; +}; + +export const tailLogs = async ( + projectUuid: string, + storeHash: string, + accessToken: string, + apiHost: string, + format: LogFormat, + connectionTtlMs = DEFAULT_CONNECTION_TTL_MS, +) => { + consola.info('Tailing logs...'); + + let retries = 0; + + // eslint-disable-next-line no-constant-condition, @typescript-eslint/no-unnecessary-condition + while (true) { + try { + // eslint-disable-next-line no-await-in-loop + const reader = await openLogStream(projectUuid, storeHash, accessToken, apiHost); + const decoder = new TextDecoder(); + const connectTime = Date.now(); + let buffer = ''; + + retries = 0; + + // eslint-disable-next-line no-constant-condition, @typescript-eslint/no-unnecessary-condition + while (true) { + // eslint-disable-next-line no-await-in-loop + const { value, done: streamDone } = await reader.read(); + + if (value) { + buffer += decoder.decode(value, { stream: true }); + + const parts = buffer.split('\n\n'); + + // Last element is either empty (complete event) or a partial chunk to carry over + buffer = parts.pop() ?? ''; + + parts + .map((raw) => parseSSEEvent(raw)) + .filter((event): event is string => event !== null) + .forEach((event) => processLogEvent(event, format)); + } + + if (streamDone || Date.now() - connectTime >= connectionTtlMs) { + void reader.cancel(); + break; + } + } + } catch (error) { + if (error instanceof StreamError && error.fatal) { + throw error; + } + + const isServerDisconnect = error instanceof TypeError && error.message === 'terminated'; + + if (isServerDisconnect) { + consola.warn('Log stream closed by server, reconnecting...'); + } else { + retries += 1; + + if (retries >= MAX_RETRIES) { + throw new Error(`Failed to connect to log stream after ${MAX_RETRIES} retries.`); + } + + consola.warn( + `Log stream disconnected, reconnecting (attempt ${retries}/${MAX_RETRIES})...`, + error, + ); + } + } + } +}; + +const tail = new Command('tail') + .description('Tail live logs from your deployed application.') + .addOption(storeHashOption().makeOptionMandatory()) + .addOption(accessTokenOption().makeOptionMandatory()) + .addOption(apiHostOption()) + .addOption(projectUuidOption()) + .addOption( + new Option('--format ', 'Output format for log events.') + .choices(['json', 'pretty', 'default', 'short', 'request']) + .default('default'), + ) + .action(async (options) => { + try { + await telemetry.identify(options.storeHash); + + const projectUuid = resolveProjectUuid(options); + + await tailLogs( + projectUuid, + options.storeHash, + options.accessToken, + options.apiHost, + options.format, + ); + } catch (error) { + consola.error(error); + process.exit(1); + } + }); + +const query = new Command('query') + .description('Query historical logs from your deployed application.') + .addOption(storeHashOption().makeOptionMandatory()) + .addOption(accessTokenOption().makeOptionMandatory()) + .addOption(apiHostOption()) + .addOption(projectUuidOption()) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + .action((_options) => { + consola.error('The query command is not yet implemented.'); + process.exit(1); + }); + +export const logs = new Command('logs') + .description('View logs from your deployed application.') + .addCommand(tail, { isDefault: true }) + .addCommand(query); diff --git a/packages/catalyst/src/cli/index.spec.ts b/packages/catalyst/src/cli/index.spec.ts index d028081917..7eca4ec7da 100644 --- a/packages/catalyst/src/cli/index.spec.ts +++ b/packages/catalyst/src/cli/index.spec.ts @@ -29,6 +29,7 @@ describe('CLI program', () => { expect(commands).toContain('deploy'); expect(commands).toContain('project'); expect(commands).toContain('auth'); + expect(commands).toContain('logs'); const projectCmd = program.commands.find((cmd) => cmd.name() === 'project'); @@ -41,6 +42,12 @@ describe('CLI program', () => { expect(authCmd?.commands.map((c) => c.name())).toEqual( expect.arrayContaining(['whoami', 'login', 'logout']), ); + + const logsCmd = program.commands.find((cmd) => cmd.name() === 'logs'); + + expect(logsCmd?.commands.map((c) => c.name())).toEqual( + expect.arrayContaining(['tail', 'query']), + ); }); test('telemetry hooks are called when executing version command', async () => { diff --git a/packages/catalyst/src/cli/lib/shared-options.ts b/packages/catalyst/src/cli/lib/shared-options.ts new file mode 100644 index 0000000000..e0d29d802c --- /dev/null +++ b/packages/catalyst/src/cli/lib/shared-options.ts @@ -0,0 +1,39 @@ +import { Option } from 'commander'; + +import { getProjectConfig } from './project-config'; + +export const storeHashOption = () => + new Option( + '--store-hash ', + 'BigCommerce store hash. Can be found in the URL of your store Control Panel.', + ).env('BIGCOMMERCE_STORE_HASH'); + +export const accessTokenOption = () => + new Option( + '--access-token ', + 'BigCommerce access token. Can be found after creating a store-level API account.', + ).env('BIGCOMMERCE_ACCESS_TOKEN'); + +export const apiHostOption = () => + new Option('--api-host ', 'BigCommerce API host. The default is api.bigcommerce.com.') + .env('BIGCOMMERCE_API_HOST') + .default('api.bigcommerce.com'); + +export const projectUuidOption = () => + new Option( + '--project-uuid ', + 'BigCommerce infrastructure project UUID. Can be found via the BigCommerce API (GET /v3/infrastructure/projects).', + ).env('BIGCOMMERCE_PROJECT_UUID'); + +export const resolveProjectUuid = (options: { projectUuid?: string }) => { + const config = getProjectConfig(); + const projectUuid = options.projectUuid ?? config.get('projectUuid'); + + if (!projectUuid) { + throw new Error( + 'Project UUID is required. Please run either `catalyst project link` or `catalyst project create` or this command again with --project-uuid .', + ); + } + + return projectUuid; +}; diff --git a/packages/catalyst/src/cli/program.ts b/packages/catalyst/src/cli/program.ts index ed451a3c24..782be2d3d3 100644 --- a/packages/catalyst/src/cli/program.ts +++ b/packages/catalyst/src/cli/program.ts @@ -9,6 +9,7 @@ import PACKAGE_INFO from '../../package.json'; import { auth } from './commands/auth'; import { build } from './commands/build'; import { deploy } from './commands/deploy'; +import { logs } from './commands/logs'; import { project } from './commands/project'; import { start } from './commands/start'; import { telemetry } from './commands/telemetry'; @@ -60,6 +61,7 @@ program .addCommand(start) .addCommand(build) .addCommand(deploy) + .addCommand(logs) .addCommand(project) .addCommand(auth) .addCommand(telemetry) diff --git a/packages/catalyst/tests/mocks/handlers.ts b/packages/catalyst/tests/mocks/handlers.ts index a63e8113e6..c16ea054db 100644 --- a/packages/catalyst/tests/mocks/handlers.ts +++ b/packages/catalyst/tests/mocks/handlers.ts @@ -108,6 +108,25 @@ export const handlers = [ }); }), + // Handler for log tailing + http.get('https://:apiHost/stores/:storeHash/v3/infrastructure/logs/:projectUuid/tail', () => { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue( + encoder.encode( + 'data: {"uuid":"0f258256-0a83-4704-a456-03e99b4445c2","project_uuid":"6b202364-10f3-11f1-8bc7-fe9b9d8b14ab","request":{"method":"GET","url":"https://example.com/test","status_code":200},"logs":[{"timestamp":"2026-03-11T22:05:28.870Z","level":"info","messages":["hello world"]}],"exceptions":[],"timestamp":"2026-03-11T22:05:28.870Z"}\n\n', + ), + ); + setTimeout(() => controller.close(), 10); + }, + }); + + return new HttpResponse(stream, { + status: 200, + headers: { 'Content-Type': 'text/event-stream' }, + }); + }), + // Handle for createProjects http.post('https://:apiHost/stores/:storeHash/v3/infrastructure/projects', () => HttpResponse.json({