From d0654bf640857d051ce2af63e96c665db8c3bbd9 Mon Sep 17 00:00:00 2001 From: Binlogo Date: Sat, 31 May 2025 00:14:18 +0800 Subject: [PATCH] feat: suppot local models integrate with ollama --- README.md | 22 +++++++++++++++++++++- aiscript-vm/src/ai/mod.rs | 34 +++++++++++++++++++++++++++++++--- examples/ollama.ai | 5 +++++ examples/project.toml | 4 ++++ 4 files changed, 61 insertions(+), 4 deletions(-) create mode 100644 examples/ollama.ai diff --git a/README.md b/README.md index d311e88..8e163b5 100644 --- a/README.md +++ b/README.md @@ -117,9 +117,10 @@ Check out the [examples](./examples) directory for more sample code. AIScript supports the following AI models: -- [x] OpenAI ((uses `OPENAI_API_KEY` environment variable by default)) +- [x] OpenAI (uses `OPENAI_API_KEY` environment variable by default or local models with Ollama) - [x] DeepSeek - [x] Anthropic +- [x] Ollama (100+ local models from various providers) Configuration by `project.toml`: @@ -138,8 +139,27 @@ model = "deepseek-chat" [ai.anthropic] api_key = "YOUR_API_KEY" model = "claude-3-5-sonnet-latest" + +# or use Ollama (local models) +[ai.ollama] +api_endpoint = "http://localhost:11434/v1" # Default Ollama endpoint +model = "llama3.2" # or any other model installed in your Ollama instance ``` +### Using Ollama + +[Ollama](https://ollama.ai/) allows you to run local AI models on your own hardware. To use Ollama with AIScript: + +1. Install Ollama from [ollama.ai](https://ollama.ai/) +2. Pull your desired models (e.g., `ollama pull llama3.2`) +3. Make sure Ollama is running locally +4. Configure AIScript to use Ollama as shown above or by setting the `OLLAMA_API_ENDPOINT` environment variable. + +Ollama provides access to 100+ models ranging from small 135M parameter models to massive 671B parameter models, including: +- Llama family (llama4, llama3.2, codellama) +- DeepSeek models (deepseek-r1, deepseek-v3) +- And [many more specialized models](https://ollama.com/search) + ## Roadmap See our [roadmap](https://aiscript.dev/guide/contribution/roadmap) for upcoming features and improvements. diff --git a/aiscript-vm/src/ai/mod.rs b/aiscript-vm/src/ai/mod.rs index b7e43cd..1103e6d 100644 --- a/aiscript-vm/src/ai/mod.rs +++ b/aiscript-vm/src/ai/mod.rs @@ -22,11 +22,16 @@ const DEEPSEEK_DEFAULT_MODEL: &str = "deepseek-chat"; const ANTHROPIC_API_ENDPOINT: &str = "https://api.anthropic.com/v1"; const ANTHROPIC_DEFAULT_MODEL: &str = "claude-3-5-sonnet-latest"; +// Ollama +const OLLAMA_DEFAULT_API_ENDPOINT: &str = "http://localhost:11434/v1"; +const OLLAMA_DEFAULT_MODEL: &str = "llama3"; + #[derive(Debug, Clone, Deserialize)] pub struct AiConfig { pub openai: Option, pub anthropic: Option, pub deepseek: Option, + pub ollama: Option, } impl Default for AiConfig { @@ -47,6 +52,17 @@ impl Default for AiConfig { api_endpoint: Some(DEEPSEEK_API_ENDPOINT.into()), model: Some(DEEPSEEK_DEFAULT_MODEL.into()), }), + ollama: env::var("OLLAMA_API_ENDPOINT") + .ok() + .map(|endpoint| ModelConfig { + api_key: EnvString(String::default()), // Ollama does not require an API key + api_endpoint: endpoint + .parse() + .ok() + .map(|url: String| url.into()) + .or(Some(OLLAMA_DEFAULT_API_ENDPOINT.into())), + model: Some(OLLAMA_DEFAULT_MODEL.into()), + }), } } } @@ -64,9 +80,7 @@ impl Default for ModelConfig { #[cfg(feature = "ai_test")] api_key: "".into(), #[cfg(not(feature = "ai_test"))] - api_key: env::var("OPENAI_API_KEY") - .expect("Expect `OPENAI_API_KEY` environment variable.") - .into(), + api_key: EnvString(env::var("OPENAI_API_KEY").unwrap_or_default()), api_endpoint: Some(OPENAI_API_ENDPOINT.into()), model: Some(OPENAI_DEFAULT_MODEL.into()), } @@ -78,6 +92,12 @@ impl AiConfig { &self, model_name: Option, ) -> Result { + if let Some(ollama) = self.ollama.as_ref() { + let model = model_name.as_deref().unwrap_or(OLLAMA_DEFAULT_MODEL); + let mut config = ollama.clone(); + config.model = Some(EnvString(model.to_string())); + return Ok(config); + } if let Some(model) = model_name { match model { m if m.starts_with("gpt") => { @@ -121,6 +141,14 @@ impl AiConfig { } m => Err(format!("Unsupported model '{m}'.")), } + } else if let Some(ollama) = self.ollama.as_ref() { + if let Some(model) = model_name { + let mut config = ollama.clone(); + config.model = Some(EnvString(model)); + return Ok(config); + } else { + return Ok(ollama.clone()); + } } else { // Default is OpenAI model Ok(ModelConfig::default()) diff --git a/examples/ollama.ai b/examples/ollama.ai new file mode 100644 index 0000000..b0a2ea5 --- /dev/null +++ b/examples/ollama.ai @@ -0,0 +1,5 @@ +let a = prompt { + input: "What is rust?", + model: "llama3.2" +}; +print(a); \ No newline at end of file diff --git a/examples/project.toml b/examples/project.toml index e656afe..e7c0a16 100644 --- a/examples/project.toml +++ b/examples/project.toml @@ -27,3 +27,7 @@ scopes = ["email"] [ai.anthropic] api_key = "$CLAUDE_API_KEY" model = "claude-3-5-sonnet-latest" + +[ai.ollama] +api_endpoint = "http://localhost:11434/v1" +model = "llama3.2"