39 lines
904 B
Python
39 lines
904 B
Python
# gemini.py
|
|
import tomllib
|
|
from pathlib import Path
|
|
import google.generativeai as genai
|
|
|
|
_cache = None
|
|
_model = None
|
|
_chat = None
|
|
|
|
def _load_key() -> str:
|
|
with open("credentials.toml", "rb") as f:
|
|
return tomllib.load(f)["gemini"]["api_key"]
|
|
|
|
def _ensure_model():
|
|
global _model
|
|
if _model is None:
|
|
genai.configure(api_key=_load_key())
|
|
_model = genai.GenerativeModel("gemini-1.5-pro")
|
|
|
|
def _ensure_chat():
|
|
global _chat
|
|
if _chat is None:
|
|
_ensure_model()
|
|
_chat = _model.start_chat(history=[])
|
|
|
|
def send(md_content: str, user_message: str) -> str:
|
|
global _cache, _chat
|
|
_ensure_chat()
|
|
|
|
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
|
|
response = _chat.send_message(full_message)
|
|
return response.text
|
|
|
|
def reset_session():
|
|
global _cache, _model, _chat
|
|
_cache = None
|
|
_model = None
|
|
_chat = None
|