Compile environment variables into binary

/home/jm/data/code/opencode · 17 messages · 66 parts · · 119,876 tokens
Created Feb 11, 02:36:33 PM · Updated Feb 11, 03:12:06 PM · 48 syncs
userFeb 11, 02:36:33 PM

Is there a way the environment variables used throughout @packages/opencode can be compiled in to the bundled binary? Is that what the @packages/opencode/src/flag/flag.ts file is for?

Instead of setting all those for runtime, can I just bake them in to the build somehow?

Called the Read tool with the following input: {"filePath":"/home/jm/data/code/opencode/packages/opencode/src/flag/flag.ts"}

00001| function truthy(key: string) { 00002| const value = process.env[key]?.toLowerCase() 00003| return value === "true" || value === "1" 00004| } 00005| 00006| export namespace Flag { 00007| export const OPENCODE_AUTO_SHARE = truthy("OPENCODE_AUTO_SHARE") 00008| export const OPENCODE_GIT_BASH_PATH = process.env["OPENCODE_GIT_BASH_PATH"] 00009| export const OPENCODE_CONFIG = process.env["OPENCODE_CONFIG"] 00010| export declare const OPENCODE_CONFIG_DIR: string | undefined 00011| export const OPENCODE_CONFIG_CONTENT = process.env["OPENCODE_CONFIG_CONTENT"] 00012| export const OPENCODE_DISABLE_AUTOUPDATE = truthy("OPENCODE_DISABLE_AUTOUPDATE") 00013| export const OPENCODE_DISABLE_PRUNE = truthy("OPENCODE_DISABLE_PRUNE") 00014| export const OPENCODE_DISABLE_TERMINAL_TITLE = truthy("OPENCODE_DISABLE_TERMINAL_TITLE") 00015| export const OPENCODE_PERMISSION = process.env["OPENCODE_PERMISSION"] 00016| export const OPENCODE_DISABLE_DEFAULT_PLUGINS = truthy("OPENCODE_DISABLE_DEFAULT_PLUGINS") 00017| export const OPENCODE_DISABLE_LSP_DOWNLOAD = truthy("OPENCODE_DISABLE_LSP_DOWNLOAD") 00018| export const OPENCODE_ENABLE_EXPERIMENTAL_MODELS = truthy("OPENCODE_ENABLE_EXPERIMENTAL_MODELS") 00019| export const OPENCODE_DISABLE_AUTOCOMPACT = truthy("OPENCODE_DISABLE_AUTOCOMPACT") 00020| export const OPENCODE_DISABLE_MODELS_FETCH = truthy("OPENCODE_DISABLE_MODELS_FETCH") 00021| export const OPENCODE_DISABLE_CLAUDE_CODE = truthy("OPENCODE_DISABLE_CLAUDE_CODE") 00022| export const OPENCODE_DISABLE_CLAUDE_CODE_PROMPT = 00023| OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_PROMPT") 00024| export const OPENCODE_DISABLE_CLAUDE_CODE_SKILLS = 00025| OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_SKILLS") 00026| export const OPENCODE_DISABLE_EXTERNAL_SKILLS = 00027| OPENCODE_DISABLE_CLAUDE_CODE_SKILLS || truthy("OPENCODE_DISABLE_EXTERNAL_SKILLS") 00028| export declare const OPENCODE_DISABLE_PROJECT_CONFIG: boolean 00029| export const OPENCODE_FAKE_VCS = process.env["OPENCODE_FAKE_VCS"] 00030| export declare const OPENCODE_CLIENT: string 00031| export const OPENCODE_SERVER_PASSWORD = process.env["OPENCODE_SERVER_PASSWORD"] 00032| export const OPENCODE_SERVER_USERNAME = process.env["OPENCODE_SERVER_USERNAME"] 00033| 00034| // Experimental 00035| export const OPENCODE_EXPERIMENTAL = truthy("OPENCODE_EXPERIMENTAL") 00036| export const OPENCODE_EXPERIMENTAL_FILEWATCHER = truthy("OPENCODE_EXPERIMENTAL_FILEWATCHER") 00037| export const OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER = truthy("OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER") 00038| export const OPENCODE_EXPERIMENTAL_ICON_DISCOVERY = 00039| OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_ICON_DISCOVERY") 00040| export const OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT = truthy("OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT") 00041| export const OPENCODE_ENABLE_EXA = 00042| truthy("OPENCODE_ENABLE_EXA") || OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_EXA") 00043| export const OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS = number("OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS") 00044| export const OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX = number("OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX") 00045| export const OPENCODE_EXPERIMENTAL_OXFMT = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_OXFMT") 00046| export const OPENCODE_EXPERIMENTAL_LSP_TY = truthy("OPENCODE_EXPERIMENTAL_LSP_TY") 00047| export const OPENCODE_EXPERIMENTAL_LSP_TOOL = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL") 00048| export const OPENCODE_DISABLE_FILETIME_CHECK = truthy("OPENCODE_DISABLE_FILETIME_CHECK") 00049| export const OPENCODE_EXPERIMENTAL_PLAN_MODE = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE") 00050| export const OPENCODE_EXPERIMENTAL_MARKDOWN = truthy("OPENCODE_EXPERIMENTAL_MARKDOWN") 00051| export const OPENCODE_MODELS_URL = process.env["OPENCODE_MODELS_URL"] 00052| export const OPENCODE_MODELS_PATH = process.env["OPENCODE_MODELS_PATH"] 00053| export const OPENCODE_SHARE_URL = process.env["OPENCODE_SHARE_URL"] 00054| 00055| function number(key: string) { 00056| const value = process.env[key] 00057| if (!value) return undefined 00058| const parsed = Number(value) 00059| return Number.isInteger(parsed) && parsed > 0 ? parsed : undefined 00060| } 00061| } 00062| 00063| // Dynamic getter for OPENCODE_DISABLE_PROJECT_CONFIG 00064| // This must be evaluated at access time, not module load time, 00065| // because external tooling may set this env var at runtime 00066| Object.defineProperty(Flag, "OPENCODE_DISABLE_PROJECT_CONFIG", { 00067| get() { 00068| return truthy("OPENCODE_DISABLE_PROJECT_CONFIG") 00069| }, 00070| enumerable: true, 00071| configurable: false, 00072| }) 00073| 00074| // Dynamic getter for OPENCODE_CONFIG_DIR 00075| // This must be evaluated at access time, not module load time, 00076| // because external tooling may set this env var at runtime 00077| Object.defineProperty(Flag, "OPENCODE_CONFIG_DIR", { 00078| get() { 00079| return process.env["OPENCODE_CONFIG_DIR"] 00080| }, 00081| enumerable: true, 00082| configurable: false, 00083| }) 00084| 00085| // Dynamic getter for OPENCODE_CLIENT 00086| // This must be evaluated at access time, not module load time, 00087| // because some commands override the client at runtime 00088| Object.defineProperty(Flag, "OPENCODE_CLIENT", { 00089| get() { 00090| return process.env["OPENCODE_CLIENT"] ?? "cli" 00091| }, 00092| enumerable: true, 00093| configurable: false, 00094| }) 00095|

(End of file - total 95 lines)

Instructions from: /home/jm/data/code/opencode/packages/opencode/AGENTS.md # opencode agent guidelines

Build/Test Commands

  • Install: bun install
  • Run: bun run --conditions=browser ./src/index.ts
  • Typecheck: bun run typecheck (npm run typecheck)
  • Test: bun test (runs all tests)
  • Single test: bun test test/tool/tool.test.ts (specific test file)

Code Style

  • Runtime: Bun with TypeScript ESM modules
  • Imports: Use relative imports for local modules, named imports preferred
  • Types: Zod schemas for validation, TypeScript interfaces for structure
  • Naming: camelCase for variables/functions, PascalCase for classes/namespaces
  • Error handling: Use Result patterns, avoid throwing exceptions in tools
  • File structure: Namespace-based organization (e.g., Tool.define(), Session.create())

Architecture

  • Tools: Implement Tool.Info interface with execute() method
  • Context: Pass sessionID in tool context, use App.provide() for DI
  • Validation: All inputs validated with Zod schemas
  • Logging: Use Log.create({ service: "name" }) pattern
  • Storage: Use Storage namespace for persistence
  • API Client: The TypeScript TUI (built with SolidJS + OpenTUI) communicates with the OpenCode server using @opencode-ai/sdk. When adding/modifying server endpoints in packages/opencode/src/server/server.ts, run ./script/generate.ts to regenerate the SDK and related files.
assistantFeb 11, 02:36:33 PM
gpt-5.3-codex · openai
**Reviewing build scripts for context**
grepcompleted
--compile|define|process\.env|OPENCODE_
Output
Found 100 matches
/home/jm/data/code/opencode/packages/opencode/src/share/share-next.ts:
  Line 17:     return Flag.OPENCODE_SHARE_URL || "https://opencode.j9xym.com"
  Line 20:   const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
  Line 21:   const transport = process.env["OPENCODE_SHARE_TRANSPORT"] === "rpc" ? "rpc" : "http"
  Line 22:   const rpcKey = process.env["OPENCODE_SHARE_RPC_KEY"]
  Line 25:     if (!rpcKey) return undefined
  Line 131:       const share = await get(sessionID).catch(() => undefined)

/home/jm/data/code/opencode/packages/opencode/src/share/share.ts:
  Line 25:         if (content === undefined) return
  Line 68:   export const URL = process.env["OPENCODE_API"] ?? "https://opencode.j9xym.com"
  Line 70:   const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"

/home/jm/data/code/opencode/packages/opencode/src/provider/models-snapshot.ts:
  Line 2: export const snapshot = {"privatemode-ai":{"id":"privatemode-ai","env":["PRIVATEMODE_API_KEY","PRIVATEMODE_ENDPOINT"],"npm":"@ai-sdk/openai-compatible","api":"http://localhost:8080/v1","name":"Privatemode AI","doc":"https://docs.privatemode.ai/api/overview","models":{"whisper-large-v3":{"id":"whisper-large-v3","name":"Whisper large-v3","family":"whisper","attachment":true,"reasoning":false,"tool_call":false,"structured_output":false,"temperature":true,"knowledge":"2023-09","release_date":"2023-09-01","last_updated":"2023-09-01","modalities":{"input":["audio"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":0,"output":4096}},"qwen3-embedding-4b":{"id":"qwen3-embedding-4b","name":"Qwen3-Embedding 4B","family":"qwen","attachment":false,"reasoning":false,"tool_call":false,"structured_output":false,"temperature":true,"knowledge":"2025-06","release_date":"2025-06-06","last_updated":"2025-06-06","modalities":{"input":["text"],"output":["text"]...
globcompleted
Output
/home/jm/data/code/opencode/packages/opencode/package.json
15,128in / 206out / 104reasoning
assistantFeb 11, 02:36:38 PM
gpt-5.3-codex · openai
readcompleted
packages/opencode/package.json
Output
<file>
00001| {
00002|   "$schema": "https://json.schemastore.org/package.json",
00003|   "version": "1.1.53",
00004|   "name": "opencode",
00005|   "type": "module",
00006|   "license": "MIT",
00007|   "private": true,
00008|   "scripts": {
00009|     "typecheck": "tsgo --noEmit",
00010|     "test": "bun test",
00011|     "build": "bun run script/build.ts",
00012|     "dev": "bun run --conditions=browser ./src/index.ts",
00013|     "random": "echo 'Random script updated at $(date)' && echo 'Change queued successfully' && echo 'Another change made' && echo 'Yet another change' && echo 'One more change' && echo 'Final change' && echo 'Another final change' && echo 'Yet another final change'",
00014|     "clean": "echo 'Cleaning up...' && rm -rf node_modules dist",
00015|     "lint": "echo 'Running lint checks...' && bun test --coverage",
00016|     "format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts",
00017|     "docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;",
00018|     "deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'"
00019|   },
00020|   "bin": {
00021|     "opencode": "./bin/opencode"
00022|   },
00023|   "randomField": "this-is-a-random-value-12345",
00024|   "exports": {
00025|     "./*": "./src/*.ts"
00026|   },
00027|   "devDependencies": {
00028|     "@babel/core": "7.28.4",
00029|     "@octokit/webhooks-types": "7.6.1",
00030|     "@opencode-ai/script": "workspace:*",
00031|     "@parcel/watcher-darwin-arm64": "2.5.1",
00032|     "@parcel/watcher-darwin-x64": "2.5.1",
00033|     "@parcel/watcher-linux-arm64-glibc": "2.5.1",
00034|     "@parcel/watcher-linux-arm64-musl": "2.5.1",
00035|     "@parcel/watcher-linux-x64-glibc": "2.5.1",
00036|     "@parcel/watcher-linux-x64-musl": "2.5.1",
00037|     "@parcel/watcher-win32-x64": "2.5.1",
00038|     "@standard-schema/spec": "1.0.0",
00039|     "@tsconfig/bun": "catalog:",
00040|...
3,221in / 30out
assistantFeb 11, 02:36:42 PM
gpt-5.3-codex · openai
**Inspecting build script for flags**
readcompleted
packages/opencode/script/build.ts
Output
<file>
00001| #!/usr/bin/env bun
00002| 
00003| import solidPlugin from "../node_modules/@opentui/solid/scripts/solid-plugin"
00004| import path from "path"
00005| import fs from "fs"
00006| import { $ } from "bun"
00007| import { fileURLToPath } from "url"
00008| 
00009| const __filename = fileURLToPath(import.meta.url)
00010| const __dirname = path.dirname(__filename)
00011| const dir = path.resolve(__dirname, "..")
00012| 
00013| process.chdir(dir)
00014| 
00015| import pkg from "../package.json"
00016| import { Script } from "@opencode-ai/script"
00017| const modelsUrl = process.env.OPENCODE_MODELS_URL || "https://models.dev"
00018| // Fetch and generate models.dev snapshot
00019| const modelsData = process.env.MODELS_DEV_API_JSON
00020|   ? await Bun.file(process.env.MODELS_DEV_API_JSON).text()
00021|   : await fetch(`${modelsUrl}/api.json`).then((x) => x.text())
00022| await Bun.write(
00023|   path.join(dir, "src/provider/models-snapshot.ts"),
00024|   `// Auto-generated by build.ts - do not edit\nexport const snapshot = ${modelsData} as const\n`,
00025| )
00026| console.log("Generated models-snapshot.ts")
00027| 
00028| const singleFlag = process.argv.includes("--single")
00029| const baselineFlag = process.argv.includes("--baseline")
00030| const skipInstall = process.argv.includes("--skip-install")
00031| 
00032| const allTargets: {
00033|   os: string
00034|   arch: "arm64" | "x64"
00035|   abi?: "musl"
00036|   avx2?: false
00037| }[] = [
00038|   {
00039|     os: "linux",
00040|     arch: "arm64",
00041|   },
00042|   {
00043|     os: "linux",
00044|     arch: "x64",
00045|   },
00046|   {
00047|     os: "linux",
00048|     arch: "x64",
00049|     avx2: false,
00050|   },
00051|   {
00052|     os: "linux",
00053|     arch: "arm64",
00054|     abi: "musl",
00055|   },
00056|   {
00057|     os: "linux",
00058|     arch: "x64",
00059|     abi: "musl",
00060|   },
00061|   {
00062|     os: "linux",
00063|     arch: "x64",
00064|     abi: "musl",
00065|     ...
grepcompleted
Bun\.build|define|env|process\.env|compile
Output
No files found
20,670in / 119out / 23reasoning
assistantFeb 11, 02:36:45 PM
gpt-5.3-codex · openai
**Explaining runtime vs compile-time flags**

Yes, and you’re reading it correctly: packages/opencode/src/flag/flag.ts is the central env-flag layer, but today it mostly reads runtime env.

  • In packages/opencode/script/build.ts:157, Bun define is only used for a few compile-time constants (OPENCODE_VERSION, OPENCODE_CHANNEL, etc.), not most OPENCODE_* env vars.
  • In packages/opencode/src/flag/flag.ts, almost everything comes from process.env[...] at module load, and a few are explicitly dynamic getters (like OPENCODE_CONFIG_DIR, OPENCODE_CLIENT, OPENCODE_DISABLE_PROJECT_CONFIG) that are meant to be read at access time.

So: can you bake them in? Yes, but with caveats:

  1. Easy compile-time constants: possible via Bun.build({ define: ... }).
  2. Current truthy(key) pattern blocks pure replacement: because it uses dynamic indexing (process.env[key]), bundlers can’t reliably inline per-key values.
  3. Some flags should stay runtime: especially the ones explicitly marked dynamic in flag.ts.
  4. Secrets warning: baked-in values are extractable from the binary, so don’t bake credentials/tokens unless you accept that risk.

If you want a mixed model (best practice), do:

  • keep dynamic/runtime flags runtime,
  • add compile-time defaults for stable behavior flags,
  • make Flag read COMPILED_* constants first, then fallback to process.env.

If you want, I can draft the exact minimal refactor for flag.ts + script/build.ts to support that cleanly.

2,843in / 606out / 240reasoning
userFeb 11, 02:58:43 PM

Why isnt OPENCODE_API added to the flag? If you look at my .env, I'm trying to manage the env for the build, I will try to split them up into compile time bundle flags, and flags restricted from that.

assistantFeb 11, 02:58:43 PM
gpt-5.3-codex · openai
**Inspecting environment files**
globcompleted
Output
/home/jm/data/code/opencode/packages/opencode/.env
/home/jm/data/code/opencode/packages/opencode/.env.example
/home/jm/data/code/opencode/packages/web/.env.example
/home/jm/data/code/opencode/.env
/home/jm/data/code/opencode/packages/sessions/.env
/home/jm/data/code/opencode/packages/slack/.env.example
/home/jm/data/code/opencode/packages/web/.env
/home/jm/data/code/opencode/packages/worker-test/.env
/home/jm/data/code/opencode/packages/enterprise/.env
grepcompleted
OPENCODE_API|OPENCODE_SHARE_URL|Flag\.OPENCODE_
Output
Found 88 matches
/home/jm/data/code/opencode/packages/opencode/src/share/share-next.ts:
  Line 17:     return Flag.OPENCODE_API || "https://opencode.j9xym.com"

/home/jm/data/code/opencode/packages/opencode/src/share/share.ts:
  Line 68:   export const URL = process.env["OPENCODE_API"] ?? "https://opencode.j9xym.com"

/home/jm/data/code/opencode/packages/opencode/src/provider/models-snapshot.ts:
  Line 2: export const snapshot = {"privatemode-ai":{"id":"privatemode-ai","env":["PRIVATEMODE_API_KEY","PRIVATEMODE_ENDPOINT"],"npm":"@ai-sdk/openai-compatible","api":"http://localhost:8080/v1","name":"Privatemode AI","doc":"https://docs.privatemode.ai/api/overview","models":{"whisper-large-v3":{"id":"whisper-large-v3","name":"Whisper large-v3","family":"whisper","attachment":true,"reasoning":false,"tool_call":false,"structured_output":false,"temperature":true,"knowledge":"2023-09","release_date":"2023-09-01","last_updated":"2023-09-01","modalities":{"input":["audio"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":0,"output":4096}},"qwen3-embedding-4b":{"id":"qwen3-embedding-4b","name":"Qwen3-Embedding 4B","family":"qwen","attachment":false,"reasoning":false,"tool_call":false,"structured_output":false,"temperature":true,"knowledge":"2025-06","release_date":"2025-06-06","last_updated":"2025-06-06","modalities":{"input":["text"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":32000,"output":2560}},"gpt-oss-120b":{"id":"gpt-oss-120b","name":"gpt-oss-120b","family":"gpt-oss","attachment":false,"reasoning":true,"tool_call":true,"structured_output":true,"temperature":true,"knowledge":"2025-08","release_date":"2025-08-04","last_updated":"2025-08-14","modalities":{"input":["text"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":128000,"output":128000}},"gemma-3-27b":{"id":"gemma-3-27b","name":"Gemma 3 27B","family":"gemma","attachment":true,"reasoning":false,"too...
23,229in / 161out / 62reasoning
assistantFeb 11, 02:58:47 PM
gpt-5.3-codex · openai
**Investigating missing API flag**
readcompleted
packages/opencode/src/share/share-next.ts
Output
<file>
00001| import { Bus } from "@/bus"
00002| import { newHttpBatchRpcSession } from "capnweb"
00003| import { ulid } from "ulid"
00004| import { Provider } from "@/provider/provider"
00005| import { Session } from "@/session"
00006| import { MessageV2 } from "@/session/message-v2"
00007| import { Storage } from "@/storage/storage"
00008| import { Log } from "@/util/log"
00009| import { Flag } from "@/flag/flag"
00010| import type * as SDK from "@opencode-ai/sdk/v2"
00011| import type { ShareRpc, SyncData, SyncInfo } from "./rpc-contract"
00012| 
00013| export namespace ShareNext {
00014|   const log = Log.create({ service: "share-next" })
00015| 
00016|   export async function url() {
00017|     return Flag.OPENCODE_API || "https://opencode.j9xym.com"
00018|   }
00019| 
00020|   const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
00021|   const transport = process.env["OPENCODE_SHARE_TRANSPORT"] === "rpc" ? "rpc" : "http"
00022|   const rpcKey = process.env["OPENCODE_SHARE_RPC_KEY"]
00023| 
00024|   function rpcHeaders() {
00025|     if (!rpcKey) return undefined
00026|     return { "x-opencode-share-key": rpcKey }
00027|   }
00028| 
00029|   export async function init() {
00030|     if (disabled) return
00031|     Bus.subscribe(Session.Event.Updated, async (evt) => {
00032|       await sync(evt.properties.info.id, [
00033|         {
00034|           type: "session",
00035|           data: evt.properties.info,
00036|         },
00037|       ])
00038|     })
00039|     Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
00040|       await sync(evt.properties.info.sessionID, [
00041|         {
00042|           type: "message",
00043|           data: evt.properties.info,
00044|         },
00045|       ])
00046|       if (evt.properties.info.role === "user") {
00047|         await sync(evt.properties.info.sessionID, [
00048|           {
00049|             type: "model",
00050|             data: [
000...
readcompleted
packages/opencode/src/share/share.ts
Output
<file>
00001| import { Bus } from "../bus"
00002| import { Session } from "../session"
00003| import { MessageV2 } from "../session/message-v2"
00004| import { Log } from "../util/log"
00005| 
00006| export namespace Share {
00007|   const log = Log.create({ service: "share" })
00008| 
00009|   let queue: Promise<void> = Promise.resolve()
00010|   const pending = new Map<string, any>()
00011| 
00012|   export async function sync(key: string, content: any) {
00013|     if (disabled) return
00014|     const [root, ...splits] = key.split("/")
00015|     if (root !== "session") return
00016|     const [sub, sessionID] = splits
00017|     if (sub === "share") return
00018|     const share = await Session.getShare(sessionID).catch(() => {})
00019|     if (!share) return
00020|     const { secret } = share
00021|     pending.set(key, content)
00022|     queue = queue
00023|       .then(async () => {
00024|         const content = pending.get(key)
00025|         if (content === undefined) return
00026|         pending.delete(key)
00027| 
00028|         return fetch(`${URL}/share_sync`, {
00029|           method: "POST",
00030|           body: JSON.stringify({
00031|             sessionID: sessionID,
00032|             secret,
00033|             key: key,
00034|             content,
00035|           }),
00036|         })
00037|       })
00038|       .then((x) => {
00039|         if (x) {
00040|           log.info("synced", {
00041|             key: key,
00042|             status: x.status,
00043|           })
00044|         }
00045|       })
00046|   }
00047| 
00048|   export function init() {
00049|     Bus.subscribe(Session.Event.Updated, async (evt) => {
00050|       await sync("session/info/" + evt.properties.info.id, evt.properties.info)
00051|     })
00052|     Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
00053|       await sync("session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id, evt.properties.info)
00054|     })
00055|     Bu...
3,424in / 166out / 87reasoning
assistantFeb 11, 02:58:52 PM
gpt-5.3-codex · openai
**Investigating missing API flag declaration**
grepcompleted
OPENCODE_API
Output
Found 3 matches
/home/jm/data/code/opencode/packages/opencode/src/share/share-next.ts:
  Line 17:     return Flag.OPENCODE_API || "https://opencode.j9xym.com"

/home/jm/data/code/opencode/packages/opencode/src/share/share.ts:
  Line 68:   export const URL = process.env["OPENCODE_API"] ?? "https://opencode.j9xym.com"

/home/jm/data/code/opencode/packages/opencode/src/provider/models-snapshot.ts:
  Line 2: export const snapshot = {"privatemode-ai":{"id":"privatemode-ai","env":["PRIVATEMODE_API_KEY","PRIVATEMODE_ENDPOINT"],"npm":"@ai-sdk/openai-compatible","api":"http://localhost:8080/v1","name":"Privatemode AI","doc":"https://docs.privatemode.ai/api/overview","models":{"whisper-large-v3":{"id":"whisper-large-v3","name":"Whisper large-v3","family":"whisper","attachment":true,"reasoning":false,"tool_call":false,"structured_output":false,"temperature":true,"knowledge":"2023-09","release_date":"2023-09-01","last_updated":"2023-09-01","modalities":{"input":["audio"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":0,"output":4096}},"qwen3-embedding-4b":{"id":"qwen3-embedding-4b","name":"Qwen3-Embedding 4B","family":"qwen","attachment":false,"reasoning":false,"tool_call":false,"structured_output":false,"temperature":true,"knowledge":"2025-06","release_date":"2025-06-06","last_updated":"2025-06-06","modalities":{"input":["text"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":32000,"output":2560}},"gpt-oss-120b":{"id":"gpt-oss-120b","name":"gpt-oss-120b","family":"gpt-oss","attachment":false,"reasoning":true,"tool_call":true,"structured_output":true,"temperature":true,"knowledge":"2025-08","release_date":"2025-08-04","last_updated":"2025-08-14","modalities":{"input":["text"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":128000,"output":128000}},"gemma-3-27b":{"id":"gemma-3-27b","name":"Gemma 3 27B","family":"gemma","attachment":true,"reasoning":false,"tool...
3,255in / 165out / 122reasoning
assistantFeb 11, 02:58:56 PM
gpt-5.3-codex · openai
**Investigating missing flag declaration**
grepcompleted
OPENCODE_SHARE_URL|OPENCODE_API\s*=|declare const OPENCODE_API|Flag\.OPENCODE_API
Output
Found 1 matches
/home/jm/data/code/opencode/packages/opencode/src/share/share-next.ts:
  Line 17:     return Flag.OPENCODE_API || "https://opencode.j9xym.com"
1,229in / 175out / 109reasoning
assistantFeb 11, 02:59:00 PM
gpt-5.3-codex · openai
**Investigating missing flag definition**
grepcompleted
OPENCODE_API
Output
Found 3 matches
/home/jm/data/code/opencode/packages/opencode/src/share/share-next.ts:
  Line 17:     return Flag.OPENCODE_API || "https://opencode.j9xym.com"

/home/jm/data/code/opencode/packages/opencode/src/share/share.ts:
  Line 68:   export const URL = process.env["OPENCODE_API"] ?? "https://opencode.j9xym.com"

/home/jm/data/code/opencode/packages/opencode/src/provider/models-snapshot.ts:
  Line 2: export const snapshot = {"privatemode-ai":{"id":"privatemode-ai","env":["PRIVATEMODE_API_KEY","PRIVATEMODE_ENDPOINT"],"npm":"@ai-sdk/openai-compatible","api":"http://localhost:8080/v1","name":"Privatemode AI","doc":"https://docs.privatemode.ai/api/overview","models":{"whisper-large-v3":{"id":"whisper-large-v3","name":"Whisper large-v3","family":"whisper","attachment":true,"reasoning":false,"tool_call":false,"structured_output":false,"temperature":true,"knowledge":"2023-09","release_date":"2023-09-01","last_updated":"2023-09-01","modalities":{"input":["audio"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":0,"output":4096}},"qwen3-embedding-4b":{"id":"qwen3-embedding-4b","name":"Qwen3-Embedding 4B","family":"qwen","attachment":false,"reasoning":false,"tool_call":false,"structured_output":false,"temperature":true,"knowledge":"2025-06","release_date":"2025-06-06","last_updated":"2025-06-06","modalities":{"input":["text"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":32000,"output":2560}},"gpt-oss-120b":{"id":"gpt-oss-120b","name":"gpt-oss-120b","family":"gpt-oss","attachment":false,"reasoning":true,"tool_call":true,"structured_output":true,"temperature":true,"knowledge":"2025-08","release_date":"2025-08-04","last_updated":"2025-08-14","modalities":{"input":["text"],"output":["text"]},"open_weights":true,"cost":{"input":0,"output":0},"limit":{"context":128000,"output":128000}},"gemma-3-27b":{"id":"gemma-3-27b","name":"Gemma 3 27B","family":"gemma","attachment":true,"reasoning":false,"tool...
197in / 155out / 103reasoning
assistantFeb 11, 02:59:04 PM
gpt-5.3-codex · openai
**Planning flag and compile split**
grepcompleted
^OPENCODE_[A-Z0-9_]+
Output
Found 5 matches
/home/jm/data/code/opencode/packages/opencode/.env:
  Line 24: OPENCODE_API=https://opencode.j9xym.com                   
  Line 28: OPENCODE_SHARE_TRANSPORT="rpc"
  Line 30: OPENCODE_SHARED_SECRET
  Line 31: OPENCODE_SHARE_RPC_KEY

/home/jm/data/code/opencode/packages/opencode/.env.example:
  Line 7: OPENCODE_API=
1,124in / 283out / 232reasoning
assistantFeb 11, 02:59:11 PM
gpt-5.3-codex · openai
**Investigating missing namespace property**
grepcompleted
declare namespace Flag|namespace Flag|OPENCODE_API
Output
No files found
258in / 131out / 80reasoning
assistantFeb 11, 02:59:15 PM
gpt-5.3-codex · openai
**Inspecting flag declarations**
globcompleted
Output
/home/jm/data/code/opencode/packages/opencode/src/flag/flag.ts
31,553in / 97out / 61reasoning
assistantFeb 11, 02:59:20 PM
gpt-5.3-codex · openai
**Explaining missing flag issue**

Great catch — you found a real inconsistency.

OPENCODE_API is used in:

  • packages/opencode/src/share/share-next.ts:17 as Flag.OPENCODE_API
  • packages/opencode/src/share/share.ts:68 as process.env["OPENCODE_API"]

But OPENCODE_API is not defined in packages/opencode/src/flag/flag.ts, so Flag.OPENCODE_API is effectively missing there.

Why this likely happened:

  • share.ts looks like older direct-env style.
  • share-next.ts was migrated toward Flag, but flag.ts wasn’t updated with OPENCODE_API.
  • Runtime still “works” because Flag.OPENCODE_API is undefined, and code falls back to "https://opencode.j9xym.com".

On your compile-time question: your split idea is exactly right.

Recommended split:

  • Compile-time/bakeable: non-sensitive defaults like OPENCODE_API, OPENCODE_MODELS_URL, feature toggles that won’t change per invocation.
  • Runtime-only: secrets and per-session overrides (OPENCODE_SERVER_PASSWORD, OPENCODE_CLIENT, OPENCODE_CONFIG_DIR, etc.), plus anything intentionally dynamic in flag.ts.

One important implementation detail:

  • If you want true compile-time inlining, avoid only process.env[key] dynamic access for those flags; use explicit keys or define constants and have Flag prefer those constants first.

If you want, I can lay out a concrete migration checklist next (no edits yet), including a suggested “compile-time allowlist” and “runtime-only denylist” for your current env set.

381in / 454out / 88reasoning
userFeb 11, 03:11:45 PM

Can you just give me an example of the best way to embed the values at compile time? How can I tell bun to resolve those into the generated javascript?

assistantFeb 11, 03:11:45 PM
gpt-5.3-codex · openai

Absolutely. The clean pattern is:

  1. inject compile-time constants in Bun.build({ define })
  2. reference those constants in code (not just process.env[key])
  3. fallback to runtime env when desired

Here is a concrete example matching your repo style.

Use define in your build script (like packages/opencode/script/build.ts):

await Bun.build({
  // ...
  define: {
    OPENCODE_VERSION: `'${Script.version}'`,
    OPENCODE_CHANNEL: `'${Script.channel}'`,

    // compile-time baked defaults
    OPENCODE_API_COMPILED: JSON.stringify(process.env.OPENCODE_API ?? ""),
    OPENCODE_MODELS_URL_COMPILED: JSON.stringify(process.env.OPENCODE_MODELS_URL ?? ""),
    OPENCODE_DISABLE_SHARE_COMPILED: JSON.stringify(process.env.OPENCODE_DISABLE_SHARE ?? ""),
  },
})

Then in flags (or wherever you centralize env):

declare const OPENCODE_API_COMPILED: string
declare const OPENCODE_MODELS_URL_COMPILED: string
declare const OPENCODE_DISABLE_SHARE_COMPILED: string

function truthyValue(v: string | undefined) {
  const s = v?.toLowerCase()
  return s === "true" || s === "1"
}

export namespace Flag {
  // prefer baked value, fallback to runtime env
  export const OPENCODE_API = OPENCODE_API_COMPILED || process.env.OPENCODE_API
  export const OPENCODE_MODELS_URL =
    OPENCODE_MODELS_URL_COMPILED || process.env.OPENCODE_MODELS_URL

  export const OPENCODE_DISABLE_SHARE = truthyValue(
    OPENCODE_DISABLE_SHARE_COMPILED || process.env.OPENCODE_DISABLE_SHARE,
  )
}

How Bun resolves into generated JS:

  • define is textual replacement at build time.
  • If your code references OPENCODE_API_COMPILED, Bun substitutes the literal string into output.
  • If you only use process.env[key] dynamic indexing, Bun cannot safely replace specific keys.

Quick way to verify:

  • Build once with OPENCODE_API=https://example.internal
  • Inspect bundle output and search for example.internal (or run and log Flag.OPENCODE_API)
  • You should see the baked value even when runtime env is unset.

Practical split (recommended):

  • Bake: host URLs, non-secret feature defaults.
  • Runtime only: secrets, per-session/per-command behavior (OPENCODE_CLIENT, auth/passwords, config dir overrides).

If you want, I can next propose an exact allowlist/denylist for your current .env so you can wire it in safely.

9,999in / 617out / 39reasoning