progress
This commit is contained in:
27
gemini.py
27
gemini.py
@@ -1,38 +1,35 @@
|
||||
# gemini.py
|
||||
import tomllib
|
||||
from pathlib import Path
|
||||
import google.generativeai as genai
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
|
||||
_cache = None
|
||||
_model = None
|
||||
_client = None
|
||||
_chat = None
|
||||
|
||||
def _load_key() -> str:
|
||||
with open("credentials.toml", "rb") as f:
|
||||
return tomllib.load(f)["gemini"]["api_key"]
|
||||
|
||||
def _ensure_model():
|
||||
global _model
|
||||
if _model is None:
|
||||
genai.configure(api_key=_load_key())
|
||||
_model = genai.GenerativeModel("gemini-1.5-pro")
|
||||
def _ensure_client():
|
||||
global _client
|
||||
if _client is None:
|
||||
_client = genai.Client(api_key=_load_key())
|
||||
|
||||
def _ensure_chat():
|
||||
global _chat
|
||||
if _chat is None:
|
||||
_ensure_model()
|
||||
_chat = _model.start_chat(history=[])
|
||||
_ensure_client()
|
||||
_chat = _client.chats.create(model="gemini-2.0-flash")
|
||||
|
||||
def send(md_content: str, user_message: str) -> str:
|
||||
global _cache, _chat
|
||||
global _chat
|
||||
_ensure_chat()
|
||||
|
||||
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
|
||||
response = _chat.send_message(full_message)
|
||||
return response.text
|
||||
|
||||
def reset_session():
|
||||
global _cache, _model, _chat
|
||||
_cache = None
|
||||
_model = None
|
||||
global _client, _chat
|
||||
_client = None
|
||||
_chat = None
|
||||
|
||||
Reference in New Issue
Block a user