Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 36 additions & 6 deletions lib/prompts/codex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
import type { CacheMetadata, GitHubRelease } from "../types.js";

// Codex instructions constants
const GITHUB_API_RELEASES =
"https://api.github.com/repos/openai/codex/releases/latest";
const GITHUB_HTML_RELEASES =
"https://github.com/openai/codex/releases/latest";
const CACHE_DIR = join(homedir(), ".opencode", "cache");

const __filename = fileURLToPath(import.meta.url);
Expand Down Expand Up @@ -61,11 +62,40 @@ export function getModelFamily(normalizedModel: string): ModelFamily {
* @returns Release tag name (e.g., "rust-v0.43.0")
*/
async function getLatestReleaseTag(): Promise<string> {
const response = await fetch(GITHUB_API_RELEASES);
if (!response.ok)
throw new Error(`Failed to fetch latest release: ${response.status}`);
const data = (await response.json()) as GitHubRelease;
return data.tag_name;
try {
const response = await fetch(GITHUB_API_RELEASES);
if (response.ok) {
const data = (await response.json()) as GitHubRelease;
if (data.tag_name) {
return data.tag_name;
}
}
} catch {
}

const htmlResponse = await fetch(GITHUB_HTML_RELEASES);
if (!htmlResponse.ok) {
throw new Error(
`Failed to fetch latest release: ${htmlResponse.status}`,
);
}

const finalUrl = htmlResponse.url;
if (finalUrl) {
const parts = finalUrl.split("/tag/");
const last = parts[parts.length - 1];
if (last && !last.includes("/")) {
return last;
}
}

const html = await htmlResponse.text();
const match = html.match(/\/openai\/codex\/releases\/tag\/([^"]+)/);
if (match && match[1]) {
return match[1];
}

throw new Error("Failed to determine latest release tag from GitHub");
}

/**
Expand Down
8 changes: 8 additions & 0 deletions lib/request/request-transformer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -440,6 +440,14 @@ export async function transformRequestBody(
// DEFAULT MODE: Keep original behavior with tool remap message
body.input = addToolRemapMessage(body.input, !!body.tools);
}

if (!body.tools && body.input) {
body.input = body.input.filter(
(item) =>
item.type !== "function_call" &&
item.type !== "function_call_output",
);
}
}

// Configure reasoning (use normalized model family + model-specific config)
Expand Down
18 changes: 18 additions & 0 deletions test/request-transformer.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -847,6 +847,24 @@ describe('Request Transformer Module', () => {
expect(result.reasoning?.effort).toBe('medium');
});

it('should drop function_call items when no tools present', async () => {
const body: RequestBody = {
model: 'gpt-5-codex',
input: [
{ type: 'message', role: 'user', content: 'hello' },
{ type: 'function_call', role: 'assistant', name: 'write', arguments: '{}' } as any,
{ type: 'function_call_output', role: 'assistant', call_id: 'call_1', output: '{}' } as any,
],
};

const result = await transformRequestBody(body, codexInstructions);

expect(result.tools).toBeUndefined();
expect(result.input).toHaveLength(1);
expect(result.input![0].type).toBe('message');
expect(result.input![0].role).toBe('user');
});

describe('CODEX_MODE parameter', () => {
it('should use bridge message when codexMode=true and tools present (default)', async () => {
const body: RequestBody = {
Expand Down