Initial commit: BLIGHT: CUE

Webhook listener that monitors Gitea repos for BLIGHT: triggers in
markdown files, processes them via Gemini 2.5 Flash-Lite, and writes
results back in-place.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-14 20:17:31 -05:00
commit 4c9fecda16
12 changed files with 523 additions and 0 deletions

4
ai/__init__.py Normal file
View File

@@ -0,0 +1,4 @@
from .base import AIProvider
from .gemini import GeminiProvider
__all__ = ["AIProvider", "GeminiProvider"]

21
ai/base.py Normal file
View File

@@ -0,0 +1,21 @@
from abc import ABC, abstractmethod
class AIProvider(ABC):
"""Base class for all AI provider implementations.
To add a new provider, subclass this and implement `complete`, then
instantiate your provider in `processor.py` instead of GeminiProvider.
"""
@abstractmethod
def complete(self, document: str, instruction: str) -> str:
"""Process an instruction in the context of a full document.
Args:
document: The full markdown document text (for context).
instruction: The BLIGHT instruction extracted from the trigger line.
Returns:
The text to insert in place of the trigger line.
"""

29
ai/gemini.py Normal file
View File

@@ -0,0 +1,29 @@
import google.generativeai as genai
import config
from .base import AIProvider
_SYSTEM_PROMPT = (
"You are an inline document assistant. "
"The user will provide a markdown document and a specific instruction. "
"Your response must contain ONLY the text to be inserted into the document — "
"no preamble, no explanation, no meta-commentary, no markdown code fences unless "
"the instruction specifically asks for them. "
"Respond as if your output will be dropped directly into the middle of a document."
)
class GeminiProvider(AIProvider):
def __init__(self) -> None:
genai.configure(api_key=config.GEMINI_API_KEY)
self._model = genai.GenerativeModel(
model_name="gemini-2.5-flash-lite",
system_instruction=_SYSTEM_PROMPT,
)
def complete(self, document: str, instruction: str) -> str:
prompt = (
f"DOCUMENT:\n\n{document}\n\n"
f"INSTRUCTION: {instruction}"
)
response = self._model.generate_content(prompt)
return response.text.strip()