import { PartialConfig, Ollama, OpenAI } from "@continuedev/config-types"; import { DiffContextProvider, FileContextProvider, FolderContextProvider, GitHubIssuesContextProvider, TerminalContextProvider, URLContextProvider, } from "@continuedev/core"; // --- PASSO FONDAMENTALE: Incolla la tua chiave API di OpenRouter qui --- const OPENROUTER_API_KEY = "sk-or-TUA_CHIAVE_OPENROUTER_QUI"; const config: PartialConfig = { models: [ // --- Modelli Cloud (Agenti) - Per compiti complessi e analisi codebase --- { title: "Gemini 1.5 Pro (Agent)", provider: "openai", // Usa il provider "openai" per la compatibilità con OpenRouter model: "google/gemini-1.5-pro", apiBase: "https://openrouter.ai/api/v1", apiKey: OPENROUTER_API_KEY, contextProviders: [ new DiffContextProvider(), new FileContextProvider(), new FolderContextProvider(), new GitHubIssuesContextProvider(), new TerminalContextProvider(), new URLContextProvider(), ], }, { title: "Claude 3.5 Sonnet (Agent)", provider: "openai", model: "anthropic/claude-3.5-sonnet", apiBase: "https://openrouter.ai/api/v1", apiKey: OPENROUTER_API_KEY, contextProviders: [ new DiffContextProvider(), new FileContextProvider(), new FolderContextProvider(), new GitHubIssuesContextProvider(), new TerminalContextProvider(), new URLContextProvider(), ], }, // --- Modelli Locali (Gratuiti) - Per richieste dirette e veloci --- { title: "CodeLlama 13b (local)", provider: "ollama", model: "codellama:13b", apiBase: "http://192.168.100.1:11434", }, { title: "DeepSeek Coder v2 (local)", provider: "ollama", model: "deepseek-coder-v2:16b", apiBase: "http://192.168.100.1:11434", }, // Aggiungi qui gli altri tuoi modelli locali seguendo questo schema { title: "Llama 3.1 8b (local)", provider: "ollama", model: "llama3.1:8b", apiBase: "http://192.168.100.1:11434", }, ], // Modello per il completamento automatico del codice tabCompleteModel: { provider: "ollama", model: "codellama:13b", apiBase: "http://192.168.100.1:11434", }, }; export default config;