Skip to content

Commit 58ffdc6

Browse files
committed
Phase 1-2: Rename to HackCode, add Ollama as default provider
- Rename binary from "claw" to "hackcode" - Set default model to Gemma-4-E4B-Uncensored-HauhauCS-Aggressive - Add ProviderKind::Ollama with OpenAI-compatible client - Add OpenAiCompatConfig::ollama() (localhost:11434/v1) - Add from_env_optional_key() — Ollama needs no API key - Make Ollama the default fallback provider (instead of Anthropic) - Auto-detect Ollama models (hf.co/, llama, gemma, mistral, phi) - Replace ASCII banner with HackCode banner - Replace all "claw"/"Claw Code" branding with "hackcode"/"HackCode" - Replace .claw/ config dir with .hackcode/
1 parent e1ed30a commit 58ffdc6

5 files changed

Lines changed: 168 additions & 122 deletions

File tree

rust/crates/api/src/client.rs

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ pub enum ProviderClient {
1111
Anthropic(AnthropicClient),
1212
Xai(OpenAiCompatClient),
1313
OpenAi(OpenAiCompatClient),
14+
Ollama(OpenAiCompatClient),
1415
}
1516

1617
impl ProviderClient {
@@ -43,6 +44,9 @@ impl ProviderClient {
4344
};
4445
Ok(Self::OpenAi(OpenAiCompatClient::from_env(config)?))
4546
}
47+
ProviderKind::Ollama => Ok(Self::Ollama(
48+
OpenAiCompatClient::from_env_optional_key(OpenAiCompatConfig::ollama())?,
49+
)),
4650
}
4751
}
4852

@@ -52,6 +56,7 @@ impl ProviderClient {
5256
Self::Anthropic(_) => ProviderKind::Anthropic,
5357
Self::Xai(_) => ProviderKind::Xai,
5458
Self::OpenAi(_) => ProviderKind::OpenAi,
59+
Self::Ollama(_) => ProviderKind::Ollama,
5560
}
5661
}
5762

@@ -67,15 +72,15 @@ impl ProviderClient {
6772
pub fn prompt_cache_stats(&self) -> Option<PromptCacheStats> {
6873
match self {
6974
Self::Anthropic(client) => client.prompt_cache_stats(),
70-
Self::Xai(_) | Self::OpenAi(_) => None,
75+
Self::Xai(_) | Self::OpenAi(_) | Self::Ollama(_) => None,
7176
}
7277
}
7378

7479
#[must_use]
7580
pub fn take_last_prompt_cache_record(&self) -> Option<PromptCacheRecord> {
7681
match self {
7782
Self::Anthropic(client) => client.take_last_prompt_cache_record(),
78-
Self::Xai(_) | Self::OpenAi(_) => None,
83+
Self::Xai(_) | Self::OpenAi(_) | Self::Ollama(_) => None,
7984
}
8085
}
8186

@@ -85,7 +90,9 @@ impl ProviderClient {
8590
) -> Result<MessageResponse, ApiError> {
8691
match self {
8792
Self::Anthropic(client) => client.send_message(request).await,
88-
Self::Xai(client) | Self::OpenAi(client) => client.send_message(request).await,
93+
Self::Xai(client) | Self::OpenAi(client) | Self::Ollama(client) => {
94+
client.send_message(request).await
95+
}
8996
}
9097
}
9198

@@ -98,7 +105,7 @@ impl ProviderClient {
98105
.stream_message(request)
99106
.await
100107
.map(MessageStream::Anthropic),
101-
Self::Xai(client) | Self::OpenAi(client) => client
108+
Self::Xai(client) | Self::OpenAi(client) | Self::Ollama(client) => client
102109
.stream_message(request)
103110
.await
104111
.map(MessageStream::OpenAiCompat),

rust/crates/api/src/providers/mod.rs

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ pub enum ProviderKind {
3333
Anthropic,
3434
Xai,
3535
OpenAi,
36+
Ollama,
3637
}
3738

3839
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -144,7 +145,7 @@ pub fn resolve_model_alias(model: &str) -> String {
144145
"grok-2" => "grok-2",
145146
_ => trimmed,
146147
},
147-
ProviderKind::OpenAi => trimmed,
148+
ProviderKind::OpenAi | ProviderKind::Ollama => trimmed,
148149
})
149150
})
150151
.map_or_else(|| trimmed.to_string(), ToOwned::to_owned)
@@ -194,6 +195,23 @@ pub fn metadata_for_model(model: &str) -> Option<ProviderMetadata> {
194195
default_base_url: openai_compat::DEFAULT_DASHSCOPE_BASE_URL,
195196
});
196197
}
198+
// Ollama local models — HuggingFace models pulled via `ollama pull hf.co/...`
199+
// and any model containing a colon (e.g. "llama3.2:1b", "gemma:7b")
200+
if canonical.starts_with("hf.co/")
201+
|| canonical.contains(':')
202+
|| canonical.starts_with("llama")
203+
|| canonical.starts_with("gemma")
204+
|| canonical.starts_with("mistral")
205+
|| canonical.starts_with("phi")
206+
|| canonical.starts_with("codellama")
207+
{
208+
return Some(ProviderMetadata {
209+
provider: ProviderKind::Ollama,
210+
auth_env: "OLLAMA_API_KEY",
211+
base_url_env: "OLLAMA_BASE_URL",
212+
default_base_url: openai_compat::DEFAULT_OLLAMA_BASE_URL,
213+
});
214+
}
197215
None
198216
}
199217

@@ -211,7 +229,8 @@ pub fn detect_provider_kind(model: &str) -> ProviderKind {
211229
if openai_compat::has_api_key("XAI_API_KEY") {
212230
return ProviderKind::Xai;
213231
}
214-
ProviderKind::Anthropic
232+
// Default to Ollama (local inference) — HackCode is local-first
233+
ProviderKind::Ollama
215234
}
216235

217236
#[must_use]

rust/crates/api/src/providers/openai_compat.rs

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ use super::{preflight_message_request, Provider, ProviderFuture};
1919
pub const DEFAULT_XAI_BASE_URL: &str = "https://api.x.ai/v1";
2020
pub const DEFAULT_OPENAI_BASE_URL: &str = "https://api.openai.com/v1";
2121
pub const DEFAULT_DASHSCOPE_BASE_URL: &str = "https://dashscope.aliyuncs.com/compatible-mode/v1";
22+
pub const DEFAULT_OLLAMA_BASE_URL: &str = "http://localhost:11434/v1";
2223
const REQUEST_ID_HEADER: &str = "request-id";
2324
const ALT_REQUEST_ID_HEADER: &str = "x-request-id";
2425
const DEFAULT_INITIAL_BACKOFF: Duration = Duration::from_secs(1);
@@ -36,6 +37,7 @@ pub struct OpenAiCompatConfig {
3637
const XAI_ENV_VARS: &[&str] = &["XAI_API_KEY"];
3738
const OPENAI_ENV_VARS: &[&str] = &["OPENAI_API_KEY"];
3839
const DASHSCOPE_ENV_VARS: &[&str] = &["DASHSCOPE_API_KEY"];
40+
const OLLAMA_ENV_VARS: &[&str] = &["OLLAMA_API_KEY"];
3941

4042
impl OpenAiCompatConfig {
4143
#[must_use]
@@ -72,12 +74,25 @@ impl OpenAiCompatConfig {
7274
}
7375
}
7476

77+
/// Ollama local inference backend. No API key required.
78+
/// Speaks the OpenAI-compatible REST shape at /v1.
79+
#[must_use]
80+
pub const fn ollama() -> Self {
81+
Self {
82+
provider_name: "Ollama",
83+
api_key_env: "OLLAMA_API_KEY",
84+
base_url_env: "OLLAMA_BASE_URL",
85+
default_base_url: DEFAULT_OLLAMA_BASE_URL,
86+
}
87+
}
88+
7589
#[must_use]
7690
pub fn credential_env_vars(self) -> &'static [&'static str] {
7791
match self.provider_name {
7892
"xAI" => XAI_ENV_VARS,
7993
"OpenAI" => OPENAI_ENV_VARS,
8094
"DashScope" => DASHSCOPE_ENV_VARS,
95+
"Ollama" => OLLAMA_ENV_VARS,
8196
_ => &[],
8297
}
8398
}
@@ -126,6 +141,14 @@ impl OpenAiCompatClient {
126141
Ok(Self::new(api_key, config))
127142
}
128143

144+
/// Like `from_env` but uses a dummy key when the env var is unset.
145+
/// Used for Ollama which does not require API key authentication.
146+
pub fn from_env_optional_key(config: OpenAiCompatConfig) -> Result<Self, ApiError> {
147+
let api_key = read_env_non_empty(config.api_key_env)?
148+
.unwrap_or_else(|| "ollama".to_string());
149+
Ok(Self::new(api_key, config))
150+
}
151+
129152
#[must_use]
130153
pub fn with_base_url(mut self, base_url: impl Into<String>) -> Self {
131154
self.base_url = base_url.into();

rust/crates/rusty-claude-cli/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ license.workspace = true
66
publish.workspace = true
77

88
[[bin]]
9-
name = "claw"
9+
name = "hackcode"
1010
path = "src/main.rs"
1111

1212
[dependencies]

0 commit comments

Comments
 (0)