remove --oss completely

This commit is contained in:
Eva Ho
2026-03-25 12:24:06 -04:00
parent 03748eb2d7
commit b02fcc329e
3 changed files with 70 additions and 33 deletions

View File

@@ -19,7 +19,7 @@ func (c *Codex) String() string { return "Codex" }
const codexProfileName = "ollama-launch"
func (c *Codex) args(model string, extra []string) []string {
args := []string{"--oss", "--profile", codexProfileName}
args := []string{"--profile", codexProfileName}
if model != "" {
args = append(args, "-m", model)
}
@@ -63,40 +63,59 @@ func ensureCodexConfig() error {
return writeCodexProfile(configPath)
}
// writeCodexProfile ensures ~/.codex/config.toml has a [profiles.ollama-launch] section
// with the correct openai_base_url.
// writeCodexProfile ensures ~/.codex/config.toml has the ollama-launch profile
// and model provider sections with the correct base URL.
func writeCodexProfile(configPath string) error {
baseURL := envconfig.Host().String() + "/v1/"
header := fmt.Sprintf("[profiles.%s]", codexProfileName)
profileLines := []string{
header,
fmt.Sprintf("openai_base_url = %q", baseURL),
sections := []struct {
header string
lines []string
}{
{
header: fmt.Sprintf("[profiles.%s]", codexProfileName),
lines: []string{
fmt.Sprintf("openai_base_url = %q", baseURL),
`forced_login_method = "api"`,
fmt.Sprintf("model_provider = %q", codexProfileName),
},
},
{
header: fmt.Sprintf("[model_providers.%s]", codexProfileName),
lines: []string{
`name = "Ollama"`,
fmt.Sprintf("base_url = %q", baseURL),
},
},
}
profileBlock := strings.Join(profileLines, "\n") + "\n"
content, readErr := os.ReadFile(configPath)
if readErr != nil {
// File doesn't exist; create with just the profile.
return os.WriteFile(configPath, []byte(profileBlock), 0o644)
text := ""
if readErr == nil {
text = string(content)
}
text := string(content)
for _, s := range sections {
block := strings.Join(append([]string{s.header}, s.lines...), "\n") + "\n"
if idx := strings.Index(text, header); idx >= 0 {
// Replace the existing profile section up to the next section header.
rest := text[idx+len(header):]
if endIdx := strings.Index(rest, "\n["); endIdx >= 0 {
text = text[:idx] + profileBlock + rest[endIdx+1:]
if idx := strings.Index(text, s.header); idx >= 0 {
// Replace the existing section up to the next section header.
rest := text[idx+len(s.header):]
if endIdx := strings.Index(rest, "\n["); endIdx >= 0 {
text = text[:idx] + block + rest[endIdx+1:]
} else {
text = text[:idx] + block
}
} else {
text = text[:idx] + profileBlock
// Append the section.
if text != "" && !strings.HasSuffix(text, "\n") {
text += "\n"
}
if text != "" {
text += "\n"
}
text += block
}
} else {
// Append the profile section.
if !strings.HasSuffix(text, "\n") {
text += "\n"
}
text += "\n" + profileBlock
}
return os.WriteFile(configPath, []byte(text), 0o644)

View File

@@ -17,10 +17,10 @@ func TestCodexArgs(t *testing.T) {
args []string
want []string
}{
{"with model", "llama3.2", nil, []string{"--oss", "--profile", "ollama-launch", "-m", "llama3.2"}},
{"empty model", "", nil, []string{"--oss", "--profile", "ollama-launch"}},
{"with model and extra args", "qwen3.5", []string{"-p", "myprofile"}, []string{"--oss", "--profile", "ollama-launch", "-m", "qwen3.5", "-p", "myprofile"}},
{"with sandbox flag", "llama3.2", []string{"--sandbox", "workspace-write"}, []string{"--oss", "--profile", "ollama-launch", "-m", "llama3.2", "--sandbox", "workspace-write"}},
{"with model", "llama3.2", nil, []string{"--profile", "ollama-launch", "-m", "llama3.2"}},
{"empty model", "", nil, []string{"--profile", "ollama-launch"}},
{"with model and extra args", "qwen3.5", []string{"-p", "myprofile"}, []string{"--profile", "ollama-launch", "-m", "qwen3.5", "-p", "myprofile"}},
{"with sandbox flag", "llama3.2", []string{"--sandbox", "workspace-write"}, []string{"--profile", "ollama-launch", "-m", "llama3.2", "--sandbox", "workspace-write"}},
}
for _, tt := range tests {
@@ -57,6 +57,18 @@ func TestWriteCodexProfile(t *testing.T) {
if !strings.Contains(content, "/v1/") {
t.Error("missing /v1/ suffix in base URL")
}
if !strings.Contains(content, `forced_login_method = "api"`) {
t.Error("missing forced_login_method key")
}
if !strings.Contains(content, `model_provider = "ollama-launch"`) {
t.Error("missing model_provider key")
}
if !strings.Contains(content, "[model_providers.ollama-launch]") {
t.Error("missing [model_providers.ollama-launch] section")
}
if !strings.Contains(content, `name = "Ollama"`) {
t.Error("missing model provider name")
}
})
t.Run("appends profile to existing file without profile", func(t *testing.T) {
@@ -83,7 +95,7 @@ func TestWriteCodexProfile(t *testing.T) {
t.Run("replaces existing profile section", func(t *testing.T) {
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "config.toml")
existing := "[profiles.ollama-launch]\nopenai_base_url = \"http://old:1234/v1/\"\n"
existing := "[profiles.ollama-launch]\nopenai_base_url = \"http://old:1234/v1/\"\n\n[model_providers.ollama-launch]\nname = \"Ollama\"\nbase_url = \"http://old:1234/v1/\"\n"
os.WriteFile(configPath, []byte(existing), 0o644)
if err := writeCodexProfile(configPath); err != nil {
@@ -99,6 +111,9 @@ func TestWriteCodexProfile(t *testing.T) {
if strings.Count(content, "[profiles.ollama-launch]") != 1 {
t.Errorf("expected exactly one [profiles.ollama-launch] section, got %d", strings.Count(content, "[profiles.ollama-launch]"))
}
if strings.Count(content, "[model_providers.ollama-launch]") != 1 {
t.Errorf("expected exactly one [model_providers.ollama-launch] section, got %d", strings.Count(content, "[model_providers.ollama-launch]"))
}
})
t.Run("replaces profile while preserving following sections", func(t *testing.T) {
@@ -207,5 +222,8 @@ func TestEnsureCodexConfig(t *testing.T) {
if strings.Count(content, "[profiles.ollama-launch]") != 1 {
t.Errorf("expected exactly one [profiles.ollama-launch] section after two calls, got %d", strings.Count(content, "[profiles.ollama-launch]"))
}
if strings.Count(content, "[model_providers.ollama-launch]") != 1 {
t.Errorf("expected exactly one [model_providers.ollama-launch] section after two calls, got %d", strings.Count(content, "[model_providers.ollama-launch]"))
}
})
}

View File

@@ -29,10 +29,10 @@ ollama launch codex --config
### Manual setup
To use `codex` with Ollama, use the `--oss` flag:
To use `codex` with Ollama, use the `--profile` flag:
```
codex --oss
codex --profile ollama-launch
```
### Changing Models
@@ -40,13 +40,13 @@ codex --oss
By default, codex will use the local `gpt-oss:20b` model. However, you can specify a different model with the `-m` flag:
```
codex --oss -m gpt-oss:120b
codex --profile ollama-launch -m gpt-oss:120b
```
### Cloud Models
```
codex --oss -m gpt-oss:120b-cloud
codex --profile ollama-launch -m gpt-oss:120b-cloud
```