Add gitignor

This commit is contained in:
2026-04-07 22:06:19 +02:00
commit 49802d89d5
7 changed files with 630 additions and 0 deletions

42
.gitignore vendored Normal file
View File

@@ -0,0 +1,42 @@
# Environnement
.env
*.env
# Python
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
build/
*.egg-info/
.eggs/
pip-wheel-metadata/
# Virtualenv
venv/
.venv/
env/
.env/
# IDE
.idea/
.vscode/
*.swp
*.swo
# Audio temporaire
*.mp3
*.wav
*.pcm
/tmp/
# macOS
.DS_Store
# Logs
*.log
# MCP servers — dépendances Node
mcp_servers/*/node_modules/
.github/

196
assistant/cli.py Normal file
View File

@@ -0,0 +1,196 @@
import asyncio
import sys
from . import llm, tts, audio, config
HELP_TEXT = """
Commandes disponibles :
exit / quit Quitter l'assistant
reset Effacer l'historique de conversation
voice <id> Changer la voix Voxtral (voice_id)
voice clear Revenir à la voix par défaut
mode texte Passer en mode saisie texte (défaut)
mode vocal Passer en mode entrée microphone
profiles Lister les profils de personnalité disponibles
profile <slug> Charger un profil (ex: profile traveller_scout)
mcp Lister les serveurs MCP connectés
mcp tools Lister tous les outils MCP disponibles
help Afficher ce message
Mode vocal : appuyez sur Entrée (sans rien écrire) pour commencer à parler,
puis Entrée à nouveau pour envoyer.
"""
def _set_voice(parts: list[str]) -> None:
if len(parts) < 2:
print("Usage : voice <id> ou voice clear")
return
if parts[1] == "clear":
config.VOICE_ID = None
print("Voix réinitialisée (défaut).")
else:
config.VOICE_ID = parts[1]
print(f"Voix définie sur : {config.VOICE_ID}")
def _process_message(user_input: str) -> None:
"""Envoie un message au LLM et lit la réponse à voix haute."""
print(f"Arioch > ", end="", flush=True)
try:
reply = llm.chat(user_input)
except Exception as e:
print(f"\n[Erreur LLM] {e}")
return
print(reply)
try:
audio_bytes = tts.text_to_speech(reply)
audio.play_audio(audio_bytes)
except Exception as e:
print(f"[Erreur TTS/Audio] {e}")
def _handle_command(user_input: str) -> bool:
"""Gère les commandes spéciales. Retourne True si c'était une commande."""
from .profile import list_profiles, apply_profile
lower = user_input.lower()
parts = user_input.split()
if lower in ("exit", "quit"):
print("Au revoir !")
sys.exit(0)
elif lower == "reset":
llm.reset_history()
print("Historique effacé.\n")
return True
elif lower == "help":
print(HELP_TEXT)
return True
elif lower.startswith("voice"):
_set_voice(parts)
return True
elif lower in ("mode texte", "mode text"):
return True # signal au caller
elif lower in ("mode vocal", "mode voix", "mode voice"):
return True # signal au caller
elif lower == "profiles":
_list_profiles(list_profiles())
return True
elif lower.startswith("mcp"):
_handle_mcp(parts)
return True
elif lower.startswith("profile ") and len(parts) >= 2:
_load_profile(parts[1], apply_profile)
return True
return False
def _handle_mcp(parts: list[str]) -> None:
from . import mcp_client
manager = mcp_client.get_manager()
servers = manager.summary()
if len(parts) >= 2 and parts[1] == "tools":
tools = manager.get_mistral_tools()
if not tools:
print("Aucun outil MCP disponible.")
return
print(f"\n{len(tools)} outil(s) MCP disponible(s) :")
for t in tools:
fn = t["function"]
desc = fn.get("description", "")
print(f" {fn['name']:<45} {desc[:60]}")
print()
return
if not servers:
print("Aucun serveur MCP connecté. Configurez 'mcp_servers' dans un profil YAML.\n")
return
print("\nServeurs MCP connectés :")
for name, count in servers:
print(f" {name:<30} {count} outil(s)")
total = sum(c for _, c in servers)
print(f"\nTotal : {total} outil(s). Tapez 'mcp tools' pour les lister.\n")
if not profiles:
print("Aucun profil disponible dans profiles/")
return
print("\nProfils disponibles :")
for slug, name, desc in profiles:
print(f" {slug:<25} {name}" + (f"{desc}" if desc else ""))
print("\nUsage : profile <slug>\n")
def _load_profile(slug: str, apply_fn) -> None:
try:
profile = apply_fn(slug)
print(f"✅ Profil chargé : {profile.name}")
if profile.description:
print(f" {profile.description}")
print()
except FileNotFoundError as e:
print(f"[Profil] {e}")
def run() -> None:
print("🎙️ Arioch — Assistant vocal (Mistral Large + Voxtral)")
print("Commandes : 'profiles' pour voir les personnalités, 'mode vocal' pour parler, 'help' pour l'aide.\n")
vocal_mode = False
while True:
try:
if vocal_mode:
prompt = "🎤 [vocal] Entrée pour parler > "
else:
prompt = "Vous > "
user_input = input(prompt).strip()
except (EOFError, KeyboardInterrupt):
print("\nAu revoir !")
sys.exit(0)
if not user_input:
if vocal_mode:
# Entrée vide en mode vocal → lancer la capture micro
try:
from .stt import transcribe_from_mic
user_input = asyncio.run(transcribe_from_mic())
except Exception as e:
print(f"[Erreur STT] {e}")
continue
if not user_input:
print("(rien capturé)")
continue
print(f"Vous (transcrit) : {user_input}")
_process_message(user_input)
continue
lower = user_input.lower()
# Changement de mode
if lower in ("mode vocal", "mode voix", "mode voice"):
vocal_mode = True
print("Mode vocal activé. Appuyez sur Entrée (sans rien écrire) pour parler.\n")
continue
elif lower in ("mode texte", "mode text"):
vocal_mode = False
print("Mode texte activé.\n")
continue
# Autres commandes
if _handle_command(user_input):
continue
# Message normal (texte)
_process_message(user_input)

84
assistant/llm.py Normal file
View File

@@ -0,0 +1,84 @@
import json
from mistralai.client import Mistral
from . import config
_client = Mistral(api_key=config.MISTRAL_API_KEY)
_history: list[dict] = []
def reset_history() -> None:
"""Efface l'historique de conversation."""
_history.clear()
def chat(user_message: str) -> str:
"""Envoie un message au LLM, gère les appels d'outils MCP et retourne la réponse."""
from . import mcp_client
_history.append({"role": "user", "content": user_message})
manager = mcp_client.get_manager()
tools = manager.get_mistral_tools() if manager.has_tools else None
while True:
messages = [{"role": "system", "content": config.SYSTEM_PROMPT}] + _history
kwargs: dict = {"model": config.LLM_MODEL, "messages": messages}
if tools:
kwargs["tools"] = tools
response = _client.chat.complete(**kwargs)
choice = response.choices[0]
msg = choice.message
if msg.tool_calls:
# 1. Ajouter le message assistant (avec les appels d'outils) à l'historique
_history.append({
"role": "assistant",
"content": msg.content or "",
"tool_calls": [
{
"id": tc.id,
"type": "function",
"function": {
"name": tc.function.name,
"arguments": tc.function.arguments,
},
}
for tc in msg.tool_calls
],
})
# 2. Exécuter chaque outil et ajouter les résultats
for tc in msg.tool_calls:
tool_name = tc.function.name
try:
args = (
json.loads(tc.function.arguments)
if isinstance(tc.function.arguments, str)
else tc.function.arguments
)
print(f" 🔧 [MCP] {tool_name}({_short_args(args)})")
result = manager.call_tool(tool_name, args)
except Exception as e:
result = f"Erreur lors de l'appel à {tool_name} : {e}"
_history.append({
"role": "tool",
"content": result,
"tool_call_id": tc.id,
})
# 3. Reboucler pour obtenir la réponse finale
else:
reply = msg.content or ""
_history.append({"role": "assistant", "content": reply})
return reply
def _short_args(args: dict) -> str:
text = json.dumps(args, ensure_ascii=False)
return text[:80] + "" if len(text) > 80 else text

188
assistant/mcp_client.py Normal file
View File

@@ -0,0 +1,188 @@
"""Support des serveurs MCP (Model Context Protocol) pour l'assistant.
Configure les serveurs dans un profil YAML sous la clé `mcp_servers` :
mcp_servers:
- name: filesystem
command: npx
args: ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"]
- name: my_sse_server
url: "http://localhost:3000/sse"
"""
from __future__ import annotations
import asyncio
import re
import threading
from contextlib import AsyncExitStack
from dataclasses import dataclass, field
from typing import Any
@dataclass
class MCPServerConfig:
name: str
command: str | None = None
args: list[str] = field(default_factory=list)
env: dict[str, str] | None = None
url: str | None = None
def _sanitize_name(name: str) -> str:
"""Transforme un nom en identifiant valide pour l'API Mistral (^[a-zA-Z0-9_-]{1,64}$)."""
return re.sub(r"[^a-zA-Z0-9_-]", "_", name)[:64]
class MCPManager:
"""Gère les connexions aux serveurs MCP et l'exécution des outils."""
def __init__(self) -> None:
self._loop = asyncio.new_event_loop()
self._thread = threading.Thread(target=self._run_loop, daemon=True)
self._thread.start()
self._sessions: dict[str, Any] = {}
self._raw_tools: dict[str, list] = {}
# mistral_name -> (server_name, original_tool_name)
self._tool_map: dict[str, tuple[str, str]] = {}
self._exit_stacks: dict[str, AsyncExitStack] = {}
def _run_loop(self) -> None:
asyncio.set_event_loop(self._loop)
self._loop.run_forever()
def _run(self, coro, timeout: int = 30) -> Any:
future = asyncio.run_coroutine_threadsafe(coro, self._loop)
return future.result(timeout=timeout)
# ------------------------------------------------------------------ #
# Public API #
# ------------------------------------------------------------------ #
def load_servers(self, configs: list[MCPServerConfig]) -> None:
self._run(self._load_servers_async(configs), timeout=60)
def get_mistral_tools(self) -> list[dict]:
tools = []
for mistral_name, (server_name, tool_name) in self._tool_map.items():
raw = {t.name: t for t in self._raw_tools.get(server_name, [])}
tool = raw.get(tool_name)
if tool is None:
continue
tools.append({
"type": "function",
"function": {
"name": mistral_name,
"description": tool.description or "",
"parameters": tool.inputSchema or {"type": "object", "properties": {}},
},
})
return tools
def call_tool(self, mistral_name: str, arguments: dict) -> str:
if mistral_name not in self._tool_map:
return f"Outil inconnu : {mistral_name}"
server_name, tool_name = self._tool_map[mistral_name]
if server_name not in self._sessions:
return f"Serveur MCP '{server_name}' non disponible"
return self._run(self._call_tool_async(server_name, tool_name, arguments))
def shutdown(self) -> None:
try:
self._run(self._shutdown_async(), timeout=10)
except Exception:
pass
def summary(self) -> list[tuple[str, int]]:
"""Retourne [(server_name, tool_count), ...] pour les serveurs connectés."""
return [(name, len(self._raw_tools.get(name, []))) for name in self._sessions]
@property
def has_tools(self) -> bool:
return bool(self._tool_map)
# ------------------------------------------------------------------ #
# Async internals #
# ------------------------------------------------------------------ #
async def _load_servers_async(self, configs: list[MCPServerConfig]) -> None:
for cfg in configs:
try:
await self._connect_server(cfg)
except Exception as e:
print(f"[MCP] ❌ Connexion {cfg.name} impossible : {e}")
async def _connect_server(self, cfg: MCPServerConfig) -> None:
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
stack = AsyncExitStack()
if cfg.command:
params = StdioServerParameters(
command=cfg.command,
args=cfg.args or [],
env=cfg.env,
)
read, write = await stack.enter_async_context(stdio_client(params))
else:
from mcp.client.sse import sse_client
read, write = await stack.enter_async_context(sse_client(cfg.url))
session = await stack.enter_async_context(ClientSession(read, write))
await session.initialize()
self._sessions[cfg.name] = session
self._exit_stacks[cfg.name] = stack
tools_resp = await session.list_tools()
self._raw_tools[cfg.name] = tools_resp.tools
server_safe = _sanitize_name(cfg.name)
for tool in tools_resp.tools:
tool_safe = _sanitize_name(tool.name)
mistral_name = f"{server_safe}__{tool_safe}"
self._tool_map[mistral_name] = (cfg.name, tool.name)
print(f"[MCP] ✅ {cfg.name}{len(tools_resp.tools)} outil(s) disponible(s)")
async def _call_tool_async(self, server_name: str, tool_name: str, arguments: dict) -> str:
session = self._sessions[server_name]
result = await session.call_tool(tool_name, arguments)
parts = []
for item in result.content:
if hasattr(item, "text"):
parts.append(item.text)
else:
parts.append(str(item))
return "\n".join(parts) if parts else "(aucun résultat)"
async def _shutdown_async(self) -> None:
for stack in list(self._exit_stacks.values()):
try:
await stack.aclose()
except Exception:
pass
self._sessions.clear()
self._raw_tools.clear()
self._tool_map.clear()
self._exit_stacks.clear()
_manager: MCPManager | None = None
_lock = threading.Lock()
def get_manager() -> MCPManager:
global _manager
with _lock:
if _manager is None:
_manager = MCPManager()
return _manager
def reset_manager() -> None:
global _manager
with _lock:
if _manager is not None:
_manager.shutdown()
_manager = MCPManager()

100
assistant/profile.py Normal file
View File

@@ -0,0 +1,100 @@
"""
Gestion des profils de personnalité pour l'assistant.
Chaque profil est un fichier YAML dans le dossier profiles/ contenant :
- name : nom affiché
- description : courte description
- system_prompt : prompt système (obligatoire)
- documents : liste de fichiers texte/markdown à injecter dans le contexte (optionnel)
- voice_language : surcharge de VOICE_LANGUAGE (optionnel)
"""
from __future__ import annotations
from dataclasses import dataclass, field
from pathlib import Path
import yaml
PROFILES_DIR = Path(__file__).parent.parent / "profiles"
@dataclass
class Profile:
name: str
system_prompt: str
description: str = ""
documents: list[str] = field(default_factory=list)
voice_language: str | None = None
mcp_servers: list[dict] = field(default_factory=list)
def build_system_prompt(self) -> str:
"""Retourne le prompt système enrichi des documents de contexte."""
parts = [self.system_prompt.strip()]
for doc_path in self.documents:
full_path = PROFILES_DIR / doc_path
if full_path.exists():
content = full_path.read_text(encoding="utf-8").strip()
parts.append(
f"\n---\n## Contexte : {full_path.stem}\n\n{content}"
)
else:
print(f"[Profile] Document introuvable : {full_path}")
return "\n".join(parts)
def list_profiles() -> list[tuple[str, str, str]]:
"""Retourne [(slug, name, description), ...] pour tous les profils disponibles."""
results = []
for yaml_file in sorted(PROFILES_DIR.glob("*.yaml")):
slug = yaml_file.stem
try:
data = yaml.safe_load(yaml_file.read_text(encoding="utf-8"))
results.append((slug, data.get("name", slug), data.get("description", "")))
except Exception as e:
print(f"[Profile] Erreur lecture {yaml_file.name} : {e}")
return results
def load_profile(slug: str) -> Profile:
"""Charge un profil par son slug (nom de fichier sans extension)."""
yaml_file = PROFILES_DIR / f"{slug}.yaml"
if not yaml_file.exists():
raise FileNotFoundError(f"Profil introuvable : {slug} (cherché dans {PROFILES_DIR})")
data = yaml.safe_load(yaml_file.read_text(encoding="utf-8"))
return Profile(
name=data.get("name", slug),
description=data.get("description", ""),
system_prompt=data.get("system_prompt", ""),
documents=data.get("documents", []),
voice_language=data.get("voice_language"),
mcp_servers=data.get("mcp_servers", []),
)
def apply_profile(slug: str) -> Profile:
"""Charge un profil et l'applique à la configuration active."""
from . import config, llm, mcp_client
profile = load_profile(slug)
config.SYSTEM_PROMPT = profile.build_system_prompt()
if profile.voice_language:
config.VOICE_LANGUAGE = profile.voice_language
import assistant.tts as tts
tts._default_voice_id = None
llm.reset_history()
# Réinitialise les serveurs MCP et charge ceux du profil
manager = mcp_client.reset_manager()
if profile.mcp_servers:
from .mcp_client import MCPServerConfig, get_manager
configs = [MCPServerConfig(**s) for s in profile.mcp_servers]
get_manager().load_servers(configs)
return profile

14
profiles/default.yaml Normal file
View File

@@ -0,0 +1,14 @@
name: Arioch — Assistant général
description: Assistant vocal généraliste, poli et concis
system_prompt: |
Tu es Arioch, un assistant vocal intelligent, concis et sympathique.
Réponds toujours en français sauf si l'utilisateur parle une autre langue.
# Exemple de configuration de serveurs MCP (décommentez pour activer) :
# mcp_servers:
# - name: filesystem
# command: npx
# args: ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"]
# - name: brave_search
# url: "http://localhost:3000/sse"

6
requirements.txt Normal file
View File

@@ -0,0 +1,6 @@
mistralai[realtime]>=1.7.0
python-dotenv>=1.0.0
sounddevice>=0.5.0
numpy>=1.26.0
pyyaml>=6.0
mcp>=1.0.0