Ajustes do Gemini
This commit is contained in:
@@ -3,8 +3,10 @@ from __future__ import annotations
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from google import genai
|
||||
from google.genai import types as genai_types
|
||||
import requests
|
||||
|
||||
from video_render.config import BASE_DIR, Settings
|
||||
@@ -12,7 +14,6 @@ from video_render.transcription import TranscriptionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
GEMINI_ENDPOINT_TEMPLATE = "https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent"
|
||||
OPENROUTER_ENDPOINT = "https://openrouter.ai/api/v1/chat/completions"
|
||||
|
||||
|
||||
@@ -31,6 +32,7 @@ class GeminiHighlighter:
|
||||
|
||||
self.prompt_template = prompt_path.read_text(encoding="utf-8")
|
||||
self.settings = settings
|
||||
self.client = genai.Client()
|
||||
|
||||
def generate_highlights(self, transcription: TranscriptionResult) -> List[Dict]:
|
||||
payload = {
|
||||
@@ -45,45 +47,13 @@ class GeminiHighlighter:
|
||||
],
|
||||
}
|
||||
|
||||
body = {
|
||||
"contents": [
|
||||
{
|
||||
"role": "user",
|
||||
"parts": [
|
||||
{"text": self.prompt_template},
|
||||
{"text": json.dumps(payload, ensure_ascii=False)},
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
try:
|
||||
response = self._call_gemini(payload)
|
||||
except Exception as exc:
|
||||
logger.error("Gemini API request falhou: %s", exc)
|
||||
raise RuntimeError("Gemini API request falhou") from exc
|
||||
|
||||
if self.settings.gemini.temperature is not None:
|
||||
body["generationConfig"] = {
|
||||
"temperature": self.settings.gemini.temperature,
|
||||
}
|
||||
if self.settings.gemini.top_p is not None:
|
||||
body["generationConfig"]["topP"] = self.settings.gemini.top_p
|
||||
if self.settings.gemini.top_k is not None:
|
||||
body["generationConfig"]["topK"] = self.settings.gemini.top_k
|
||||
|
||||
url = GEMINI_ENDPOINT_TEMPLATE.format(model=self.settings.gemini.model)
|
||||
params = {"key": self.settings.gemini.api_key}
|
||||
|
||||
response = requests.post(url, params=params, json=body, timeout=120)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
candidates = data.get("candidates") or []
|
||||
if not candidates:
|
||||
raise RuntimeError("Gemini nao retornou candidatos")
|
||||
|
||||
text_parts = candidates[0].get("content", {}).get("parts", [])
|
||||
if not text_parts:
|
||||
raise RuntimeError("Resposta do Gemini sem conteudo")
|
||||
|
||||
raw_text = text_parts[0].get("text")
|
||||
if not raw_text:
|
||||
raise RuntimeError("Resposta do Gemini sem texto")
|
||||
raw_text = self._extract_response_text(response)
|
||||
|
||||
parsed = self._extract_json(raw_text)
|
||||
highlights = parsed.get("highlights")
|
||||
@@ -91,6 +61,61 @@ class GeminiHighlighter:
|
||||
raise ValueError("Resposta do Gemini invalida: campo 'highlights' ausente")
|
||||
return highlights
|
||||
|
||||
def _call_gemini(self, payload: Dict[str, Any]) -> Any:
|
||||
contents = [
|
||||
{
|
||||
"role": "user",
|
||||
"parts": [
|
||||
{"text": self.prompt_template},
|
||||
{"text": json.dumps(payload, ensure_ascii=False)},
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
request_kwargs: Dict[str, Any] = {
|
||||
"model": self.settings.gemini.model,
|
||||
"contents": contents,
|
||||
}
|
||||
|
||||
config = self._build_generation_config()
|
||||
if config is not None:
|
||||
request_kwargs["config"] = config
|
||||
|
||||
return self.client.models.generate_content(**request_kwargs)
|
||||
|
||||
def _build_generation_config(self) -> Optional[genai_types.GenerateContentConfig]:
|
||||
config_kwargs: Dict[str, Any] = {}
|
||||
if self.settings.gemini.temperature is not None:
|
||||
config_kwargs["temperature"] = self.settings.gemini.temperature
|
||||
if self.settings.gemini.top_p is not None:
|
||||
config_kwargs["top_p"] = self.settings.gemini.top_p
|
||||
if self.settings.gemini.top_k is not None:
|
||||
config_kwargs["top_k"] = self.settings.gemini.top_k
|
||||
|
||||
if not config_kwargs:
|
||||
return None
|
||||
|
||||
return genai_types.GenerateContentConfig(**config_kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _extract_response_text(response: Any) -> str:
|
||||
text = getattr(response, "text", None)
|
||||
if text:
|
||||
return str(text).strip()
|
||||
|
||||
candidates = getattr(response, "candidates", None) or []
|
||||
for candidate in candidates:
|
||||
content = getattr(candidate, "content", None)
|
||||
if not content:
|
||||
continue
|
||||
parts = getattr(content, "parts", None) or []
|
||||
for part in parts:
|
||||
part_text = getattr(part, "text", None)
|
||||
if part_text:
|
||||
return str(part_text).strip()
|
||||
|
||||
raise RuntimeError("Resposta do Gemini sem texto")
|
||||
|
||||
@staticmethod
|
||||
def _extract_json(response_text: str) -> Dict:
|
||||
try:
|
||||
@@ -160,10 +185,6 @@ class OpenRouterCopywriter:
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
choices = data.get("choices") or []
|
||||
print("Data:")
|
||||
print(data)
|
||||
print("Choices:")
|
||||
print(choices)
|
||||
|
||||
if not choices:
|
||||
raise RuntimeError("OpenRouter nao retornou escolhas")
|
||||
|
||||
Reference in New Issue
Block a user