Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions packages/opencode/src/cli/cmd/tui/thread.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import type { EventSource } from "./context/sdk"
import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
import { writeHeapSnapshot } from "v8"
import { TuiConfig } from "./config/tui"
import { OPENCODE_PROCESS_ROLE, OPENCODE_RUN_ID, ensureRunID, sanitizedProcessEnv } from "@/util/opencode-process"

declare global {
const OPENCODE_WORKER_PATH: string
Expand Down Expand Up @@ -129,11 +130,13 @@ export const TuiThreadCommand = cmd({
return
}
const cwd = Filesystem.resolve(process.cwd())
const env = sanitizedProcessEnv({
[OPENCODE_PROCESS_ROLE]: "worker",
[OPENCODE_RUN_ID]: ensureRunID(),
})

const worker = new Worker(file, {
env: Object.fromEntries(
Object.entries(process.env).filter((entry): entry is [string, string] => entry[1] !== undefined),
),
env,
})
worker.onerror = (e) => {
Log.Default.error("thread error", {
Expand Down
3 changes: 3 additions & 0 deletions packages/opencode/src/cli/cmd/tui/worker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ import { Flag } from "@/flag/flag"
import { writeHeapSnapshot } from "node:v8"
import { Heap } from "@/cli/heap"
import { AppRuntime } from "@/effect/app-runtime"
import { ensureProcessMetadata } from "@/util/opencode-process"

ensureProcessMetadata("worker")

await Log.init({
print: process.argv.includes("--print-logs"),
Expand Down
50 changes: 30 additions & 20 deletions packages/opencode/src/effect/observability.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@ import { OtlpLogger, OtlpSerialization } from "effect/unstable/observability"
import * as EffectLogger from "./logger"
import { Flag } from "@/flag/flag"
import { InstallationChannel, InstallationVersion } from "@/installation/version"
import { ensureProcessMetadata } from "@/util/opencode-process"

const base = Flag.OTEL_EXPORTER_OTLP_ENDPOINT
export const enabled = !!base
const processID = crypto.randomUUID()

const headers = Flag.OTEL_EXPORTER_OTLP_HEADERS
? Flag.OTEL_EXPORTER_OTLP_HEADERS.split(",").reduce(
Expand All @@ -19,26 +21,34 @@ const headers = Flag.OTEL_EXPORTER_OTLP_HEADERS
)
: undefined

const resource = {
serviceName: "opencode",
serviceVersion: InstallationVersion,
attributes: {
"deployment.environment.name": InstallationChannel,
"opencode.client": Flag.OPENCODE_CLIENT,
},
function resource() {
const processMetadata = ensureProcessMetadata("main")
return {
serviceName: "opencode",
serviceVersion: InstallationVersion,
attributes: {
"deployment.environment.name": InstallationChannel,
"opencode.client": Flag.OPENCODE_CLIENT,
"opencode.process_role": processMetadata.processRole,
"opencode.run_id": processMetadata.runID,
"service.instance.id": processID,
},
}
}

const logs = Logger.layer(
[
EffectLogger.logger,
OtlpLogger.make({
url: `${base}/v1/logs`,
resource,
headers,
}),
],
{ mergeWithExisting: false },
).pipe(Layer.provide(OtlpSerialization.layerJson), Layer.provide(FetchHttpClient.layer))
function logs() {
return Logger.layer(
[
EffectLogger.logger,
OtlpLogger.make({
url: `${base}/v1/logs`,
resource: resource(),
headers,
}),
],
{ mergeWithExisting: false },
).pipe(Layer.provide(OtlpSerialization.layerJson), Layer.provide(FetchHttpClient.layer))
}

const traces = async () => {
const NodeSdk = await import("@effect/opentelemetry/NodeSdk")
Expand All @@ -58,7 +68,7 @@ const traces = async () => {
context.setGlobalContextManager(mgr)

return NodeSdk.layer(() => ({
resource,
resource: resource(),
spanProcessor: new SdkBase.BatchSpanProcessor(
new OTLP.OTLPTraceExporter({
url: `${base}/v1/traces`,
Expand All @@ -73,7 +83,7 @@ export const layer = !base
: Layer.unwrap(
Effect.gen(function* () {
const trace = yield* Effect.promise(traces)
return Layer.mergeAll(trace, logs)
return Layer.mergeAll(trace, logs())
}),
)

Expand Down
5 changes: 2 additions & 3 deletions packages/opencode/src/file/ripgrep.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { ripgrep } from "ripgrep"

import { Filesystem } from "@/util"
import { Log } from "@/util"
import { sanitizedProcessEnv } from "@/util/opencode-process"

const log = Log.create({ service: "ripgrep" })

Expand Down Expand Up @@ -157,9 +158,7 @@ type WorkerError = {
}

function env() {
const env = Object.fromEntries(
Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined),
)
const env = sanitizedProcessEnv()
delete env.RIPGREP_CONFIG_PATH
return env
}
Expand Down
5 changes: 2 additions & 3 deletions packages/opencode/src/file/ripgrep.worker.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import { ripgrep } from "ripgrep"
import { sanitizedProcessEnv } from "@/util/opencode-process"

function env() {
const env = Object.fromEntries(
Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined),
)
const env = sanitizedProcessEnv()
delete env.RIPGREP_CONFIG_PATH
return env
}
Expand Down
5 changes: 5 additions & 0 deletions packages/opencode/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ import { errorMessage } from "./util/error"
import { PluginCommand } from "./cli/cmd/plug"
import { Heap } from "./cli/heap"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { ensureProcessMetadata } from "./util/opencode-process"

const processMetadata = ensureProcessMetadata("main")

process.on("unhandledRejection", (e) => {
Log.Default.error("rejection", {
Expand Down Expand Up @@ -108,6 +111,8 @@ const cli = yargs(args)
Log.Default.info("opencode", {
version: InstallationVersion,
args: process.argv.slice(2),
process_role: processMetadata.processRole,
run_id: processMetadata.runID,
})

const marker = path.join(Global.Path.data, "opencode.db")
Expand Down
14 changes: 7 additions & 7 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -968,7 +968,7 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model
family: model.family,
api: {
id: model.id,
url: model.provider?.api ?? provider.api!,
url: model.provider?.api ?? provider.api ?? "",
npm: model.provider?.npm ?? provider.npm ?? "@ai-sdk/openai-compatible",
},
status: model.status ?? "active",
Expand All @@ -981,10 +981,10 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model
output: model.limit.output,
},
capabilities: {
temperature: model.temperature,
reasoning: model.reasoning,
attachment: model.attachment,
toolcall: model.tool_call,
temperature: model.temperature ?? false,
reasoning: model.reasoning ?? false,
attachment: model.attachment ?? false,
toolcall: model.tool_call ?? true,
input: {
text: model.modalities?.input?.includes("text") ?? false,
audio: model.modalities?.input?.includes("audio") ?? false,
Expand All @@ -1001,7 +1001,7 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model
},
interleaved: model.interleaved ?? false,
},
release_date: model.release_date,
release_date: model.release_date ?? "",
variants: {},
}

Expand Down Expand Up @@ -1143,7 +1143,7 @@ const layer: Layer.Layer<
existingModel?.api.npm ??
modelsDev[providerID]?.npm ??
"@ai-sdk/openai-compatible",
url: model.provider?.api ?? provider?.api ?? existingModel?.api.url ?? modelsDev[providerID]?.api,
url: model.provider?.api ?? provider?.api ?? existingModel?.api.url ?? modelsDev[providerID]?.api ?? "",
},
status: model.status ?? existingModel?.status ?? "active",
name,
Expand Down
24 changes: 24 additions & 0 deletions packages/opencode/src/util/opencode-process.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
export const OPENCODE_RUN_ID = "OPENCODE_RUN_ID"
export const OPENCODE_PROCESS_ROLE = "OPENCODE_PROCESS_ROLE"

export function ensureRunID() {
return (process.env[OPENCODE_RUN_ID] ??= crypto.randomUUID())
}

export function ensureProcessRole(fallback: "main" | "worker") {
return (process.env[OPENCODE_PROCESS_ROLE] ??= fallback)
}

export function ensureProcessMetadata(fallback: "main" | "worker") {
return {
runID: ensureRunID(),
processRole: ensureProcessRole(fallback),
}
}

export function sanitizedProcessEnv(overrides?: Record<string, string>) {
const env = Object.fromEntries(
Object.entries(process.env).filter((entry): entry is [string, string] => entry[1] !== undefined),
)
return overrides ? Object.assign(env, overrides) : env
}
34 changes: 33 additions & 1 deletion packages/opencode/test/provider/provider.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1916,7 +1916,7 @@ test("mode cost preserves over-200k pricing from base model", () => {
},
},
},
} as ModelsDev.Provider
} as unknown as ModelsDev.Provider

const model = Provider.fromModelsDevProvider(provider).models["gpt-5.4-fast"]
expect(model.cost.input).toEqual(5)
Expand All @@ -1934,6 +1934,38 @@ test("mode cost preserves over-200k pricing from base model", () => {
})
})

test("models.dev normalization fills required response fields", () => {
const provider = {
id: "gateway",
name: "Gateway",
env: [],
models: {
"gpt-5.4": {
id: "gpt-5.4",
name: "GPT-5.4",
family: "gpt",
cost: {
input: 2.5,
output: 15,
},
limit: {
context: 1_050_000,
input: 922_000,
output: 128_000,
},
},
},
} as unknown as ModelsDev.Provider

const model = Provider.fromModelsDevProvider(provider).models["gpt-5.4"]
expect(model.api.url).toBe("")
expect(model.capabilities.temperature).toBe(false)
expect(model.capabilities.reasoning).toBe(false)
expect(model.capabilities.attachment).toBe(false)
expect(model.capabilities.toolcall).toBe(true)
expect(model.release_date).toBe("")
})

test("model variants are generated for reasoning models", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
Expand Down
Loading