π File detail
utils/plugins/officialMarketplaceGcs.ts
π― Use case
This file lives under βutils/β, which covers cross-cutting helpers (shell, tempfiles, settings, messages, process input, β¦). On the API surface it exposes fetchOfficialMarketplaceFromGcs and classifyGcsError β mainly functions, hooks, or classes. Dependencies touch HTTP client, Node filesystem, and Node path helpers. It composes internal code from bootstrap, services, debug, dxt, and errors (relative imports). What the file header says: inc-5046: fetch the official marketplace from a GCS mirror instead of git-cloning GitHub on every startup. Backend (anthropic#317037) publishes a marketplace-only zip alongside the titanium squashfs, keyed by base repo SHA. This module fetches the `latest` pointer, compares again.
Generated from folder role, exports, dependency roots, and inline comments β not hand-reviewed for every path.
π§ Inline summary
inc-5046: fetch the official marketplace from a GCS mirror instead of git-cloning GitHub on every startup. Backend (anthropic#317037) publishes a marketplace-only zip alongside the titanium squashfs, keyed by base repo SHA. This module fetches the `latest` pointer, compares against a local sentinel, and downloads+extracts the zip when there's a new SHA. Callers decide fallback behavior on failure.
π€ Exports (heuristic)
fetchOfficialMarketplaceFromGcsclassifyGcsError
π External import roots
Package roots from from "β¦" (relative paths omitted).
axiosfspath
π₯οΈ Source preview
/**
* inc-5046: fetch the official marketplace from a GCS mirror instead of
* git-cloning GitHub on every startup.
*
* Backend (anthropic#317037) publishes a marketplace-only zip alongside the
* titanium squashfs, keyed by base repo SHA. This module fetches the `latest`
* pointer, compares against a local sentinel, and downloads+extracts the zip
* when there's a new SHA. Callers decide fallback behavior on failure.
*/
import axios from 'axios'
import { chmod, mkdir, readFile, rename, rm, writeFile } from 'fs/promises'
import { dirname, join, resolve, sep } from 'path'
import { waitForScrollIdle } from '../../bootstrap/state.js'
import type { AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS } from '../../services/analytics/index.js'
import { logEvent } from '../../services/analytics/index.js'
import { logForDebugging } from '../debug.js'
import { parseZipModes, unzipFile } from '../dxt/zip.js'
import { errorMessage, getErrnoCode } from '../errors.js'
type SafeString = AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
// CDN-fronted domain for the public GCS bucket (same bucket the native
// binary ships from β nativeInstaller/download.ts:24 uses the raw GCS URL).
// `{sha}.zip` is content-addressed so CDN can cache it indefinitely;
// `latest` has Cache-Control: max-age=300 so CDN staleness is bounded.
// Backend (anthropic#317037) populates this prefix.
const GCS_BASE =
'https://downloads.claude.ai/claude-code-releases/plugins/claude-plugins-official'
// Zip arc paths are seed-dir-relative (marketplaces/claude-plugins-official/β¦)
// so the titanium seed machinery can use the same zip. Strip this prefix when
// extracting for a laptop install.
const ARC_PREFIX = 'marketplaces/claude-plugins-official/'
/**
* Fetch the official marketplace from GCS and extract to installLocation.
* Idempotent β checks a `.gcs-sha` sentinel before downloading the ~3.5MB zip.
*
* @param installLocation where to extract (must be inside marketplacesCacheDir)
* @param marketplacesCacheDir the plugins marketplace cache root β passed in
* by callers (rather than imported from pluginDirectories) to break a
* circular-dep edge through marketplaceManager
* @returns the fetched SHA on success (including no-op), null on any failure
* (network, 404, zip parse). Caller decides whether to fall through to git.
*/
export async function fetchOfficialMarketplaceFromGcs(
installLocation: string,
marketplacesCacheDir: string,
): Promise<string | null> {
// Defense in depth: this function does `rm(installLocation, {recursive})`
// during the atomic swap. A corrupted known_marketplaces.json (gh-32793 β
// Windows path read on WSL, literal tilde, manual edit) could point at the
// user's project. Refuse any path outside the marketplaces cache dir.
// Same guard as refreshMarketplace() at marketplaceManager.ts:~2392 but
// inside the function so ALL callers are covered.
const cacheDir = resolve(marketplacesCacheDir)
const resolvedLoc = resolve(installLocation)
if (resolvedLoc !== cacheDir && !resolvedLoc.startsWith(cacheDir + sep)) {
logForDebugging(
`fetchOfficialMarketplaceFromGcs: refusing path outside cache dir: ${installLocation}`,
{ level: 'error' },
)
return null
}
// Network + zip extraction competes for the event loop with scroll frames.
// This is a fire-and-forget startup call β delaying by a few hundred ms
// until scroll settles is invisible to the user.
await waitForScrollIdle()
const start = performance.now()
let outcome: 'noop' | 'updated' | 'failed' = 'failed'
let sha: string | undefined
let bytes: number | undefined
let errKind: string | undefined
try {
// 1. Latest pointer β ~40 bytes, backend sets Cache-Control: no-cache,
// max-age=300. Cheap enough to hit every startup.
const latest = await axios.get(`${GCS_BASE}/latest`, {
responseType: 'text',
timeout: 10_000,
})
sha = String(latest.data).trim()
if (!sha) {
// Empty /latest body β backend misconfigured. Bail (null), don't
// lock into a permanently-broken empty-sentinel state.
throw new Error('latest pointer returned empty body')
}
// 2. Sentinel check β `.gcs-sha` at the install root holds the last
// extracted SHA. Matching means we already have this content.
const sentinelPath = join(installLocation, '.gcs-sha')
const currentSha = await readFile(sentinelPath, 'utf8').then(
s => s.trim(),
() => null, // ENOENT β first fetch, proceed to download
)
if (currentSha === sha) {
outcome = 'noop'
return sha
}
// 3. Download zip and extract to a staging dir, then atomic-swap into
// place. Crash mid-extract leaves a .staging dir (next run rm's it)
// rather than a half-written installLocation.
const zipResp = await axios.get(`${GCS_BASE}/${sha}.zip`, {
responseType: 'arraybuffer',
timeout: 60_000,
})
const zipBuf = Buffer.from(zipResp.data)
bytes = zipBuf.length
const files = await unzipFile(zipBuf)
// fflate doesn't surface external_attr, so parse the central directory
// ourselves to recover exec bits. Without this, hooks/scripts extract as
// 0644 and `sh -c "/path/script.sh"` (hooks.ts:~1002) fails with EACCES
// on Unix. Git-clone preserves +x natively; this keeps GCS at parity.
const modes = parseZipModes(zipBuf)
const staging = `${installLocation}.staging`
await rm(staging, { recursive: true, force: true })
await mkdir(staging, { recursive: true })
for (const [arcPath, data] of Object.entries(files)) {
if (!arcPath.startsWith(ARC_PREFIX)) continue
const rel = arcPath.slice(ARC_PREFIX.length)
if (!rel || rel.endsWith('/')) continue // prefix dir entry or subdir entry
const dest = join(staging, rel)
await mkdir(dirname(dest), { recursive: true })
await writeFile(dest, data)
const mode = modes[arcPath]
if (mode && mode & 0o111) {
// Only chmod when an exec bit is set β skip plain files to save syscalls.
// Swallow EPERM/ENOTSUP (NFS root_squash, some FUSE mounts) β losing +x
// is the pre-PR behavior and better than aborting mid-extraction.
await chmod(dest, mode & 0o777).catch(() => {})
}
}
await writeFile(join(staging, '.gcs-sha'), sha)
// Atomic swap: rm old, rename staging. Brief window where installLocation
// doesn't exist β acceptable for a background refresh (caller retries next
// startup if it crashes here).
await rm(installLocation, { recursive: true, force: true })
await rename(staging, installLocation)
outcome = 'updated'
return sha
} catch (e) {
errKind = classifyGcsError(e)
logForDebugging(
`Official marketplace GCS fetch failed: ${errorMessage(e)}`,
{ level: 'warn' },
)
return null
} finally {
// tengu_plugin_remote_fetch schema shared with the telemetry PR
// (.daisy/inc-5046/index.md) β adds source:'marketplace_gcs'. All string
// values below are static enums or a git SHA β not code/filepaths/PII.
logEvent('tengu_plugin_remote_fetch', {
source: 'marketplace_gcs' as SafeString,
host: 'downloads.claude.ai' as SafeString,
is_official: true,
outcome: outcome as SafeString,
duration_ms: Math.round(performance.now() - start),
...(bytes !== undefined && { bytes }),
...(sha && { sha: sha as SafeString }),
...(errKind && { error_kind: errKind as SafeString }),
})
}
}
// Bounded set of errno codes we report by name. Anything else buckets as
// fs_other to keep dashboard cardinality tractable.
const KNOWN_FS_CODES = new Set([
'ENOSPC',
'EACCES',
'EPERM',
'EXDEV',
'EBUSY',
'ENOENT',
'ENOTDIR',
'EROFS',
'EMFILE',
'ENAMETOOLONG',
])
/**
* Classify a GCS fetch error into a stable telemetry bucket.
*
* Telemetry from v2.1.83+ showed 50% of failures landing in 'other' β and
* 99.99% of those had both sha+bytes set, meaning download succeeded but
* extraction/fs failed. This splits that bucket so we can see whether the
* failures are fixable (wrong staging dir, cross-device rename) or inherent
* (disk full, permission denied) before flipping the git-fallback kill switch.
*/
export function classifyGcsError(e: unknown): string {
if (axios.isAxiosError(e)) {
if (e.code === 'ECONNABORTED') return 'timeout'
if (e.response) return `http_${e.response.status}`
return 'network'
}
const code = getErrnoCode(e)
// Node fs errno codes are E<UPPERCASE> (ENOSPC, EACCES). Axios also sets
// .code (ERR_NETWORK, ERR_BAD_OPTION, EPROTO) β don't bucket those as fs.
if (code && /^E[A-Z]+$/.test(code) && !code.startsWith('ERR_')) {
return KNOWN_FS_CODES.has(code) ? `fs_${code}` : 'fs_other'
}
// fflate sets numeric .code (0-14) on inflate/unzip errors β catches
// deflate-level corruption ("unexpected EOF", "invalid block type") that
// the message regex misses.
if (typeof (e as { code?: unknown })?.code === 'number') return 'zip_parse'
const msg = errorMessage(e)
if (/unzip|invalid zip|central directory/i.test(msg)) return 'zip_parse'
if (/empty body/.test(msg)) return 'empty_latest'
return 'other'
}