Node.js integration (fetch)
This is the most version-proof way: plain HTTP with fetch.
Requirements:
OPENAI_API_KEYenv var (your SU8 Codes key)- Node.js 18+ (built-in
fetch)
Minimal runnable example
Section titled “Minimal runnable example”const baseURL = 'https://www.su8.codes/codex/v1';const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) throw new Error('Missing OPENAI_API_KEY');
async function main() { const resp = await fetch(`${baseURL}/responses`, { method: 'POST', headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${apiKey}` }, body: JSON.stringify({ model: 'gpt-5.2', reasoning_effort: 'high', // Optional: low / medium / high / xhigh (for reasoning models) instructions: 'You are a professional coding assistant', // Equivalent to System Prompt stream: true, // Recommended: use streaming to avoid Cloudflare 502 timeouts on long requests input: [ { type: 'message', role: 'user', content: [{ type: 'input_text', text: 'Hello' }] } ] }) });
if (!resp.ok) { const text = await resp.text(); throw new Error(`${resp.status} ${resp.statusText}\n${text}`); }
const reader = resp.body.getReader(); const decoder = new TextDecoder(); while (true) { const { done, value } = await reader.read(); if (done) break; process.stdout.write(decoder.decode(value, { stream: true })); } console.log('\n');}
main();Not sure about model ids? Copy from:
title: Python integration (requests)
Section titled “title: Python integration (requests)”Stable and straightforward: plain HTTP.
Minimal runnable example
Section titled “Minimal runnable example”import osimport requests
base_url = "https://www.su8.codes/codex/v1"api_key = os.environ.get("OPENAI_API_KEY")if not api_key: raise RuntimeError("Missing OPENAI_API_KEY")
resp = requests.post( f"{base_url}/responses", headers={ "Content-Type": "application/json", "Authorization": f"Bearer {api_key}", }, json={ "model": "gpt-5.2", "reasoning_effort": "high", # Optional: low / medium / high / xhigh (for reasoning models) "instructions": "You are a professional coding assistant", # Equivalent to System Prompt "stream": True, # Recommended: use streaming to avoid Cloudflare 502 timeouts "input": [ { "type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello"}], } ], }, stream=True, timeout=60,)
resp.raise_for_status()for line in resp.iter_lines(): if line: print(line.decode('utf-8'))Model ids: copy from: