uni_xervo/provider/
gemini.rs

1use crate::api::{ModelAliasSpec, ModelTask};
2use crate::error::{Result, RuntimeError};
3use crate::provider::remote_common::{
4    RemoteProviderBase, build_google_generate_payload, check_http_status, resolve_api_key,
5};
6use crate::traits::{
7    EmbeddingModel, GenerationOptions, GenerationResult, GeneratorModel, LoadedModelHandle,
8    ModelProvider, ProviderCapabilities, ProviderHealth,
9};
10use async_trait::async_trait;
11use reqwest::Client;
12use serde_json::json;
13use std::sync::Arc;
14
15/// Remote provider that calls the [Google Gemini API](https://ai.google.dev/api)
16/// for embedding (`batchEmbedContents`) and text generation (`generateContent`).
17///
18/// Requires the `GEMINI_API_KEY` environment variable (or a custom env var name
19/// via the `api_key_env` option).
20pub struct RemoteGeminiProvider {
21    base: RemoteProviderBase,
22}
23
24impl RemoteGeminiProvider {
25    pub fn new() -> Self {
26        Self::default()
27    }
28
29    #[cfg(test)]
30    fn insert_test_breaker(&self, key: crate::api::ModelRuntimeKey, age: std::time::Duration) {
31        self.base.insert_test_breaker(key, age);
32    }
33
34    #[cfg(test)]
35    fn breaker_count(&self) -> usize {
36        self.base.breaker_count()
37    }
38
39    #[cfg(test)]
40    fn force_cleanup_now_for_test(&self) {
41        self.base.force_cleanup_now_for_test();
42    }
43}
44
45impl Default for RemoteGeminiProvider {
46    fn default() -> Self {
47        Self {
48            base: RemoteProviderBase::new(),
49        }
50    }
51}
52
53#[async_trait]
54impl ModelProvider for RemoteGeminiProvider {
55    fn provider_id(&self) -> &'static str {
56        "remote/gemini"
57    }
58
59    fn capabilities(&self) -> ProviderCapabilities {
60        ProviderCapabilities {
61            supported_tasks: vec![ModelTask::Embed, ModelTask::Generate],
62        }
63    }
64
65    async fn load(&self, spec: &ModelAliasSpec) -> Result<LoadedModelHandle> {
66        let cb = self.base.circuit_breaker_for(spec);
67        let api_key = resolve_api_key(&spec.options, "api_key_env", "GEMINI_API_KEY")?;
68
69        match spec.task {
70            ModelTask::Embed => {
71                let model = GeminiEmbeddingModel {
72                    client: self.base.client.clone(),
73                    cb: cb.clone(),
74                    model_id: spec.model_id.clone(),
75                    api_key,
76                };
77                let handle: Arc<dyn EmbeddingModel> = Arc::new(model);
78                Ok(Arc::new(handle) as LoadedModelHandle)
79            }
80            ModelTask::Generate => {
81                let model = GeminiGeneratorModel {
82                    client: self.base.client.clone(),
83                    cb,
84                    model_id: spec.model_id.clone(),
85                    api_key,
86                };
87                let handle: Arc<dyn GeneratorModel> = Arc::new(model);
88                Ok(Arc::new(handle) as LoadedModelHandle)
89            }
90            _ => Err(RuntimeError::CapabilityMismatch(format!(
91                "Gemini provider does not support task {:?}",
92                spec.task
93            ))),
94        }
95    }
96
97    async fn health(&self) -> ProviderHealth {
98        ProviderHealth::Healthy
99    }
100}
101
102/// Embedding model backed by the Gemini batch embedding API.
103pub struct GeminiEmbeddingModel {
104    client: Client,
105    cb: crate::reliability::CircuitBreakerWrapper,
106    model_id: String,
107    api_key: String,
108}
109
110#[async_trait]
111impl EmbeddingModel for GeminiEmbeddingModel {
112    async fn embed(&self, texts: Vec<&str>) -> Result<Vec<Vec<f32>>> {
113        let texts: Vec<String> = texts.iter().map(|s| s.to_string()).collect();
114
115        self.cb
116            .call(move || async move {
117                let url = format!(
118                    "https://generativelanguage.googleapis.com/v1beta/models/{}:batchEmbedContents?key={}",
119                    self.model_id, self.api_key
120                );
121
122                let requests: Vec<_> = texts
123                    .iter()
124                    .map(|t| {
125                        json!({
126                            "model": format!("models/{}", self.model_id),
127                            "content": { "parts": [{ "text": t }] }
128                        })
129                    })
130                    .collect();
131
132                let response = self
133                    .client
134                    .post(&url)
135                    .json(&json!({ "requests": requests }))
136                    .send()
137                    .await
138                    .map_err(|e| RuntimeError::ApiError(e.to_string()))?;
139
140                let body: serde_json::Value = check_http_status("Gemini", response)?
141                    .json()
142                    .await
143                    .map_err(|e| RuntimeError::ApiError(e.to_string()))?;
144
145                let embeddings_json = body
146                    .get("embeddings")
147                    .and_then(|v| v.as_array())
148                    .ok_or_else(|| {
149                        RuntimeError::ApiError("Invalid response format".to_string())
150                    })?;
151
152                let mut result = Vec::new();
153                for item in embeddings_json {
154                    let values = item
155                        .get("values")
156                        .and_then(|v| v.as_array())
157                        .ok_or_else(|| {
158                            RuntimeError::ApiError("Missing values in embedding".to_string())
159                        })?;
160
161                    let vec: Vec<f32> = values
162                        .iter()
163                        .filter_map(|v| v.as_f64().map(|f| f as f32))
164                        .collect();
165                    result.push(vec);
166                }
167                Ok(result)
168            })
169            .await
170    }
171
172    fn dimensions(&self) -> u32 {
173        // All current Gemini embedding models use 768 dimensions.
174        768
175    }
176
177    fn model_id(&self) -> &str {
178        &self.model_id
179    }
180}
181
182/// Text generation model backed by the Gemini `generateContent` API.
183pub struct GeminiGeneratorModel {
184    client: Client,
185    cb: crate::reliability::CircuitBreakerWrapper,
186    model_id: String,
187    api_key: String,
188}
189
190#[async_trait]
191impl GeneratorModel for GeminiGeneratorModel {
192    async fn generate(
193        &self,
194        messages: &[String],
195        options: GenerationOptions,
196    ) -> Result<GenerationResult> {
197        let messages: Vec<String> = messages.iter().map(|s| s.to_string()).collect();
198
199        self.cb
200            .call(move || async move {
201                let url = format!(
202                    "https://generativelanguage.googleapis.com/v1beta/models/{}:generateContent?key={}",
203                    self.model_id, self.api_key
204                );
205
206                let payload = build_google_generate_payload(&messages, &options);
207
208                let response = self
209                    .client
210                    .post(&url)
211                    .json(&payload)
212                    .send()
213                    .await
214                    .map_err(|e| RuntimeError::ApiError(e.to_string()))?;
215
216                let body: serde_json::Value = check_http_status("Gemini", response)?
217                    .json()
218                    .await
219                    .map_err(|e| RuntimeError::ApiError(e.to_string()))?;
220
221                let candidates = body
222                    .get("candidates")
223                    .and_then(|v| v.as_array())
224                    .ok_or_else(|| RuntimeError::ApiError("No candidates returned".to_string()))?;
225
226                let first_candidate = candidates
227                    .first()
228                    .ok_or_else(|| RuntimeError::ApiError("Empty candidates".to_string()))?;
229
230                let content_parts = first_candidate
231                    .get("content")
232                    .and_then(|c| c.get("parts"))
233                    .and_then(|p| p.as_array())
234                    .ok_or_else(|| RuntimeError::ApiError("Invalid content format".to_string()))?;
235
236                let text = content_parts
237                    .first()
238                    .and_then(|p| p.get("text"))
239                    .and_then(|t| t.as_str())
240                    .unwrap_or("")
241                    .to_string();
242
243                Ok(GenerationResult {
244                    text,
245                    usage: None,
246                })
247            })
248            .await
249    }
250}
251
252#[cfg(test)]
253mod tests {
254    use super::*;
255    use crate::api::ModelRuntimeKey;
256    use crate::provider::remote_common::RemoteProviderBase;
257    use crate::traits::ModelProvider;
258    use std::time::Duration;
259
260    static ENV_LOCK: tokio::sync::Mutex<()> = tokio::sync::Mutex::const_new(());
261
262    fn spec(alias: &str, task: ModelTask, model_id: &str) -> ModelAliasSpec {
263        ModelAliasSpec {
264            alias: alias.to_string(),
265            task,
266            provider_id: "remote/gemini".to_string(),
267            model_id: model_id.to_string(),
268            revision: None,
269            warmup: crate::api::WarmupPolicy::Lazy,
270            required: false,
271            timeout: None,
272            load_timeout: None,
273            retry: None,
274            options: serde_json::Value::Null,
275        }
276    }
277
278    #[tokio::test]
279    async fn breaker_reused_for_same_runtime_key() {
280        let _lock = ENV_LOCK.lock().await;
281        // SAFETY: protected by ENV_LOCK
282        unsafe { std::env::set_var("GEMINI_API_KEY", "test-key") };
283
284        let provider = RemoteGeminiProvider::new();
285        let s1 = spec("embed/a", ModelTask::Embed, "embedding-001");
286        let s2 = spec("embed/b", ModelTask::Embed, "embedding-001");
287
288        let _ = provider.load(&s1).await.unwrap();
289        let _ = provider.load(&s2).await.unwrap();
290
291        assert_eq!(provider.breaker_count(), 1);
292
293        // SAFETY: protected by ENV_LOCK
294        unsafe { std::env::remove_var("GEMINI_API_KEY") };
295    }
296
297    #[tokio::test]
298    async fn breaker_isolated_by_task_and_model() {
299        let _lock = ENV_LOCK.lock().await;
300        // SAFETY: protected by ENV_LOCK
301        unsafe { std::env::set_var("GEMINI_API_KEY", "test-key") };
302
303        let provider = RemoteGeminiProvider::new();
304        let embed = spec("embed/a", ModelTask::Embed, "embedding-001");
305        let gen_spec = spec("chat/a", ModelTask::Generate, "gemini-pro");
306
307        let _ = provider.load(&embed).await.unwrap();
308        let _ = provider.load(&gen_spec).await.unwrap();
309
310        assert_eq!(provider.breaker_count(), 2);
311
312        // SAFETY: protected by ENV_LOCK
313        unsafe { std::env::remove_var("GEMINI_API_KEY") };
314    }
315
316    #[tokio::test]
317    async fn breaker_cleanup_evicts_stale_entries() {
318        let _lock = ENV_LOCK.lock().await;
319        // SAFETY: protected by ENV_LOCK
320        unsafe { std::env::set_var("GEMINI_API_KEY", "test-key") };
321
322        let provider = RemoteGeminiProvider::new();
323        let stale = spec("embed/stale", ModelTask::Embed, "embedding-001");
324        let fresh = spec("embed/fresh", ModelTask::Embed, "embedding-002");
325        provider.insert_test_breaker(
326            ModelRuntimeKey::new(&stale),
327            RemoteProviderBase::BREAKER_TTL + Duration::from_secs(5),
328        );
329        provider.insert_test_breaker(ModelRuntimeKey::new(&fresh), Duration::from_secs(1));
330        assert_eq!(provider.breaker_count(), 2);
331
332        provider.force_cleanup_now_for_test();
333        let _ = provider.load(&fresh).await.unwrap();
334
335        assert_eq!(provider.breaker_count(), 1);
336
337        // SAFETY: protected by ENV_LOCK
338        unsafe { std::env::remove_var("GEMINI_API_KEY") };
339    }
340
341    #[test]
342    fn generation_payload_alternates_roles() {
343        let messages = vec![
344            "user question".to_string(),
345            "assistant answer".to_string(),
346            "user follow-up".to_string(),
347        ];
348        let payload = build_google_generate_payload(&messages, &GenerationOptions::default());
349        let contents = payload["contents"].as_array().unwrap();
350
351        assert_eq!(contents[0]["role"], "user");
352        assert_eq!(contents[1]["role"], "model");
353        assert_eq!(contents[2]["role"], "user");
354    }
355
356    #[test]
357    fn generation_payload_includes_generation_options() {
358        let messages = vec!["hello".to_string()];
359        let payload = build_google_generate_payload(
360            &messages,
361            &GenerationOptions {
362                max_tokens: Some(64),
363                temperature: Some(0.7),
364                top_p: Some(0.9),
365            },
366        );
367
368        assert_eq!(payload["generationConfig"]["maxOutputTokens"], 64);
369        let temperature = payload["generationConfig"]["temperature"].as_f64().unwrap();
370        let top_p = payload["generationConfig"]["topP"].as_f64().unwrap();
371        assert!((temperature - 0.7).abs() < 1e-6);
372        assert!((top_p - 0.9).abs() < 1e-6);
373    }
374}