Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
935f2418e2 |
78
.continue/config.ts
Normal file
78
.continue/config.ts
Normal file
@ -0,0 +1,78 @@
|
||||
import { PartialConfig, Ollama, OpenAI } from "@continuedev/config-types";
|
||||
import {
|
||||
DiffContextProvider,
|
||||
FileContextProvider,
|
||||
FolderContextProvider,
|
||||
GitHubIssuesContextProvider,
|
||||
TerminalContextProvider,
|
||||
URLContextProvider,
|
||||
} from "@continuedev/core";
|
||||
|
||||
// --- PASSO FONDAMENTALE: Incolla la tua chiave API di OpenRouter qui ---
|
||||
const OPENROUTER_API_KEY = "sk-or-TUA_CHIAVE_OPENROUTER_QUI";
|
||||
|
||||
const config: PartialConfig = {
|
||||
models: [
|
||||
// --- Modelli Cloud (Agenti) - Per compiti complessi e analisi codebase ---
|
||||
{
|
||||
title: "Gemini 1.5 Pro (Agent)",
|
||||
provider: "openai", // Usa il provider "openai" per la compatibilità con OpenRouter
|
||||
model: "google/gemini-1.5-pro",
|
||||
apiBase: "https://openrouter.ai/api/v1",
|
||||
apiKey: OPENROUTER_API_KEY,
|
||||
contextProviders: [
|
||||
new DiffContextProvider(),
|
||||
new FileContextProvider(),
|
||||
new FolderContextProvider(),
|
||||
new GitHubIssuesContextProvider(),
|
||||
new TerminalContextProvider(),
|
||||
new URLContextProvider(),
|
||||
],
|
||||
},
|
||||
{
|
||||
title: "Claude 3.5 Sonnet (Agent)",
|
||||
provider: "openai",
|
||||
model: "anthropic/claude-3.5-sonnet",
|
||||
apiBase: "https://openrouter.ai/api/v1",
|
||||
apiKey: OPENROUTER_API_KEY,
|
||||
contextProviders: [
|
||||
new DiffContextProvider(),
|
||||
new FileContextProvider(),
|
||||
new FolderContextProvider(),
|
||||
new GitHubIssuesContextProvider(),
|
||||
new TerminalContextProvider(),
|
||||
new URLContextProvider(),
|
||||
],
|
||||
},
|
||||
|
||||
// --- Modelli Locali (Gratuiti) - Per richieste dirette e veloci ---
|
||||
{
|
||||
title: "CodeLlama 13b (local)",
|
||||
provider: "ollama",
|
||||
model: "codellama:13b",
|
||||
apiBase: "http://192.168.100.1:11434",
|
||||
},
|
||||
{
|
||||
title: "DeepSeek Coder v2 (local)",
|
||||
provider: "ollama",
|
||||
model: "deepseek-coder-v2:16b",
|
||||
apiBase: "http://192.168.100.1:11434",
|
||||
},
|
||||
// Aggiungi qui gli altri tuoi modelli locali seguendo questo schema
|
||||
{
|
||||
title: "Llama 3.1 8b (local)",
|
||||
provider: "ollama",
|
||||
model: "llama3.1:8b",
|
||||
apiBase: "http://192.168.100.1:11434",
|
||||
},
|
||||
],
|
||||
|
||||
// Modello per il completamento automatico del codice
|
||||
tabCompleteModel: {
|
||||
provider: "ollama",
|
||||
model: "codellama:13b",
|
||||
apiBase: "http://192.168.100.1:11434",
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
17
CLAUDE.md
Normal file
17
CLAUDE.md
Normal file
@ -0,0 +1,17 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Commands
|
||||
|
||||
- To run the application: `python -m pyhasher`
|
||||
|
||||
## Architecture
|
||||
|
||||
This is a desktop GUI application built with Python and Tkinter for calculating file hashes.
|
||||
|
||||
The core logic is decoupled from the GUI:
|
||||
|
||||
- **`pyhasher/core/core.py`**: Contains the `calculate_hashes_for_file` function, which handles all file reading and hash computations. It processes files in chunks to efficiently handle large files. This module has no dependency on the GUI.
|
||||
|
||||
- **`pyhasher/gui/gui.py`**: Implements the user interface using Tkinter. It gets the file path from the user, calls the core function to perform the calculations, and displays the results.
|
||||
Loading…
Reference in New Issue
Block a user