Replace standalone Telegram bot with full CodeAnywhere framework fork. BetterBot shares all framework code and customizes only: - instance.py: BetterBot identity, system prompt, feature flags - tools/site_editing/: list_files, read_file, write_file with auto git push - .env: model defaults and site directory paths - compose/: Docker setup with betterlifesg + memoraiz mounts - deploy script: RackNerd with Infisical secrets
451 lines
15 KiB
Python
451 lines
15 KiB
Python
from __future__ import annotations
|
|
|
|
from dataclasses import dataclass
|
|
from typing import Any, Final
|
|
|
|
from config import settings
|
|
|
|
OFFICIAL_OPENAI_BASE_URLS: Final[set[str]] = {
|
|
"https://api.openai.com",
|
|
"https://api.openai.com/v1",
|
|
}
|
|
SUPPORTED_PROVIDERS: Final[tuple[str, ...]] = ("copilot", "openai", "openrouter", "vercel", "huggingface")
|
|
OPENAI_PREFIXES: Final[tuple[str, ...]] = (
|
|
"gpt-",
|
|
"chatgpt-",
|
|
"o1",
|
|
"o3",
|
|
"o4",
|
|
"computer-use-",
|
|
)
|
|
OPENROUTER_VENDOR_HINTS: Final[tuple[tuple[str, str], ...]] = (
|
|
("gpt-", "openai"),
|
|
("chatgpt-", "openai"),
|
|
("o1", "openai"),
|
|
("o3", "openai"),
|
|
("o4", "openai"),
|
|
("claude-", "anthropic"),
|
|
("gemini-", "google"),
|
|
("deepseek-", "deepseek"),
|
|
)
|
|
VERCEL_VENDOR_HINTS: Final[tuple[tuple[str, str], ...]] = (
|
|
("gpt-", "openai"),
|
|
("chatgpt-", "openai"),
|
|
("o1", "openai"),
|
|
("o3", "openai"),
|
|
("o4", "openai"),
|
|
("claude-", "anthropic"),
|
|
("gemini-", "google"),
|
|
("deepseek-", "deepseek"),
|
|
)
|
|
VENDOR_PROVIDER_ALIASES: Final[dict[str, str]] = {
|
|
"anthropic": "anthropic",
|
|
"claude": "anthropic",
|
|
"google": "google",
|
|
"gemini": "google",
|
|
"deepseek": "deepseek",
|
|
}
|
|
PROVIDER_DISPLAY_NAMES: Final[dict[str, str]] = {
|
|
"copilot": "GitHub Copilot",
|
|
"openai": "OpenAI",
|
|
"openrouter": "OpenRouter",
|
|
"vercel": "Vercel AI Gateway",
|
|
"huggingface": "HuggingFace",
|
|
}
|
|
# Model name fragments known NOT to support vision input.
|
|
_NO_VISION_FRAGMENTS: Final[tuple[str, ...]] = (
|
|
"gemma-2",
|
|
"gemma-1",
|
|
"llama",
|
|
"mistral",
|
|
"mixtral",
|
|
"phi-",
|
|
"qwen",
|
|
"deepseek",
|
|
"command-r",
|
|
"codestral",
|
|
"wizardlm",
|
|
"yi-",
|
|
"dbrx",
|
|
"o1-mini",
|
|
"o1-preview",
|
|
"o3-mini",
|
|
"gpt-3.5",
|
|
)
|
|
CURATED_MODELS: Final[dict[str, tuple[tuple[str, str], ...]]] = {
|
|
"copilot": (
|
|
("claude-sonnet-4.5", "Claude Sonnet 4.5 via Copilot"),
|
|
("claude-sonnet-4.6", "Claude Sonnet 4.6 via Copilot"),
|
|
("claude-opus-4.6", "Claude Opus 4.6 via Copilot (premium)"),
|
|
("gpt-4o", "GPT-4o via Copilot"),
|
|
("o3-mini", "o3-mini via Copilot"),
|
|
("gemini-2.5-pro", "Gemini 2.5 Pro via Copilot"),
|
|
),
|
|
"openai": (
|
|
("gpt-5.4", "default orchestrator model"),
|
|
("gpt-5.4-mini", "fast subagent / background model"),
|
|
("gpt-5.4-nano", "smallest low-latency model"),
|
|
),
|
|
"openrouter": (
|
|
("anthropic/claude-haiku-4.5", "fast Claude"),
|
|
("anthropic/claude-sonnet-4.5", "strong Claude"),
|
|
("anthropic/claude-sonnet-4.6", "newer Claude Sonnet"),
|
|
("google/gemini-2.5-flash", "fast Gemini"),
|
|
("google/gemini-2.5-pro", "strong Gemini"),
|
|
),
|
|
"vercel": (
|
|
("anthropic/claude-sonnet-4.5", "strong Claude via Vercel"),
|
|
("anthropic/claude-sonnet-4.6", "newer Claude Sonnet via Vercel"),
|
|
("anthropic/claude-haiku-4.5", "fast Claude via Vercel"),
|
|
("google/gemini-2.5-pro", "strong Gemini via Vercel"),
|
|
("google/gemini-2.5-flash", "fast Gemini via Vercel"),
|
|
("openai/gpt-4o", "OpenAI via Vercel gateway"),
|
|
("xai/grok-2", "Grok via Vercel gateway"),
|
|
("google/gemma-4-31b-it", "Gemma 4 via Vercel (background agent)"),
|
|
),
|
|
"huggingface": (
|
|
("Qwen/QwQ-32B", "QwQ 32B reasoning model"),
|
|
("Qwen/Qwen3-235B-A22B", "Qwen3 235B MoE"),
|
|
("mistralai/Mistral-Small-3.1-24B-Instruct-2503", "Mistral Small 3.1 24B"),
|
|
("google/gemma-3-27b-it", "Gemma 3 27B"),
|
|
),
|
|
}
|
|
|
|
|
|
def provider_configuration_error(provider: str) -> str | None:
|
|
normalized = normalize_provider_name(provider)
|
|
if normalized is None:
|
|
supported = ", ".join(SUPPORTED_PROVIDERS)
|
|
return f"Unsupported provider `{provider}`. Supported providers: {supported}."
|
|
|
|
if normalized == "copilot" and not settings.GITHUB_TOKEN:
|
|
return "GitHub Copilot is not configured. Set GITHUB_TOKEN in Infisical and redeploy."
|
|
|
|
if normalized == "openai" and not settings.OPENAI_API_KEY:
|
|
return "OpenAI is not configured. Set OPENAI_API_KEY in Infisical and redeploy."
|
|
|
|
if normalized == "openrouter" and not settings.OPENROUTER_API_KEY:
|
|
return "OpenRouter is not configured. Add OPENROUTER_API_KEY in Infisical and redeploy."
|
|
|
|
if normalized == "vercel" and not settings.VERCEL_API_KEY:
|
|
return "Vercel AI Gateway is not configured. Add VERCEL_API_KEY in Infisical and redeploy."
|
|
|
|
if normalized == "huggingface" and not settings.HUGGINGFACE_API_KEY:
|
|
return "HuggingFace is not configured. Add HUGGINGFACE_API_KEY in Infisical and redeploy."
|
|
|
|
return None
|
|
|
|
|
|
def provider_is_configured(provider: str) -> bool:
|
|
return provider_configuration_error(provider) is None
|
|
|
|
|
|
def build_provider_options() -> list[dict[str, Any]]:
|
|
return [
|
|
{
|
|
"id": provider,
|
|
"label": PROVIDER_DISPLAY_NAMES[provider],
|
|
"available": provider_is_configured(provider),
|
|
"reason": provider_configuration_error(provider),
|
|
}
|
|
for provider in SUPPORTED_PROVIDERS
|
|
]
|
|
|
|
|
|
def build_curated_model_options() -> dict[str, list[dict[str, str]]]:
|
|
return {
|
|
provider: [
|
|
{
|
|
"id": model_name,
|
|
"label": model_name,
|
|
"description": description,
|
|
"ref": f"{provider}:{model_name}",
|
|
}
|
|
for model_name, description in CURATED_MODELS[provider]
|
|
]
|
|
for provider in SUPPORTED_PROVIDERS
|
|
}
|
|
|
|
|
|
class ModelSelectionError(ValueError):
|
|
pass
|
|
|
|
|
|
@dataclass(frozen=True)
|
|
class ModelSelection:
|
|
provider: str
|
|
model: str
|
|
|
|
@property
|
|
def ref(self) -> str:
|
|
return f"{self.provider}:{self.model}"
|
|
|
|
@property
|
|
def supports_hosted_web_search(self) -> bool:
|
|
return self.provider == "openai" and is_official_openai_base_url(settings.OPENAI_BASE_URL)
|
|
|
|
@property
|
|
def likely_supports_vision(self) -> bool:
|
|
"""Heuristic: return False for model families known not to accept images."""
|
|
name = self.model.lower()
|
|
# Strip vendor prefix (e.g. "google/gemma-4-31b-it" → "gemma-4-31b-it")
|
|
if "/" in name:
|
|
name = name.split("/", 1)[1]
|
|
return not any(frag in name for frag in _NO_VISION_FRAGMENTS)
|
|
|
|
|
|
def default_selection() -> ModelSelection:
|
|
return resolve_selection(model=settings.DEFAULT_MODEL, provider="openai")
|
|
|
|
|
|
def resolve_selection(
|
|
*,
|
|
model: str | None,
|
|
provider: str | None = None,
|
|
current: ModelSelection | None = None,
|
|
) -> ModelSelection:
|
|
raw_model = (model or (current.model if current else settings.DEFAULT_MODEL)).strip()
|
|
raw_provider = (provider or "").strip().lower()
|
|
|
|
if not raw_model:
|
|
raise ModelSelectionError("Model cannot be empty.")
|
|
|
|
explicit_provider, explicit_model = _split_explicit_provider(raw_model)
|
|
if explicit_provider:
|
|
raw_provider = explicit_provider
|
|
raw_model = explicit_model
|
|
|
|
if not raw_provider:
|
|
inferred = _infer_provider(raw_model)
|
|
if inferred:
|
|
raw_provider = inferred
|
|
elif current:
|
|
raw_provider = current.provider
|
|
else:
|
|
raw_provider = "openai"
|
|
|
|
raw_provider, raw_model = _normalize_vendor_alias(raw_provider, raw_model)
|
|
|
|
if raw_provider not in SUPPORTED_PROVIDERS:
|
|
supported = ", ".join(SUPPORTED_PROVIDERS)
|
|
raise ModelSelectionError(f"Unsupported provider `{raw_provider}`. Supported providers: {supported}.")
|
|
|
|
normalized_model = _normalize_model_for_provider(raw_model, raw_provider)
|
|
_validate_provider_configuration(raw_provider)
|
|
_validate_model_for_provider(raw_provider, normalized_model)
|
|
|
|
return ModelSelection(provider=raw_provider, model=normalized_model)
|
|
|
|
|
|
def build_provider_config(selection: ModelSelection) -> dict[str, Any] | None:
|
|
"""Build a Copilot SDK ProviderConfig dict for the given selection.
|
|
|
|
Returns None for the copilot provider — the SDK uses the built-in
|
|
GitHub Copilot model catalog when no custom provider is passed.
|
|
"""
|
|
if selection.provider == "copilot":
|
|
return None
|
|
|
|
if selection.provider == "openai":
|
|
return {
|
|
"type": "openai",
|
|
"base_url": settings.OPENAI_BASE_URL,
|
|
"api_key": settings.OPENAI_API_KEY,
|
|
}
|
|
|
|
if selection.provider == "openrouter":
|
|
return {
|
|
"type": "openai",
|
|
"base_url": settings.OPENROUTER_BASE_URL,
|
|
"api_key": settings.OPENROUTER_API_KEY,
|
|
}
|
|
|
|
if selection.provider == "vercel":
|
|
return {
|
|
"type": "openai",
|
|
"base_url": settings.VERCEL_BASE_URL,
|
|
"api_key": settings.VERCEL_API_KEY,
|
|
}
|
|
|
|
if selection.provider == "huggingface":
|
|
return {
|
|
"type": "openai",
|
|
"base_url": settings.HUGGINGFACE_BASE_URL,
|
|
"api_key": settings.HUGGINGFACE_API_KEY,
|
|
}
|
|
|
|
raise ModelSelectionError(f"Unsupported provider `{selection.provider}`.")
|
|
|
|
|
|
def format_selection(selection: ModelSelection) -> str:
|
|
return f"Provider: `{selection.provider}`\nModel: `{selection.model}`"
|
|
|
|
|
|
def normalize_provider_name(provider: str | None) -> str | None:
|
|
if provider is None:
|
|
return None
|
|
|
|
normalized = provider.strip().lower()
|
|
if not normalized:
|
|
return None
|
|
if normalized == "hf":
|
|
return "huggingface"
|
|
if normalized in VENDOR_PROVIDER_ALIASES:
|
|
return "vercel"
|
|
if normalized in SUPPORTED_PROVIDERS:
|
|
return normalized
|
|
|
|
supported = ", ".join(SUPPORTED_PROVIDERS)
|
|
raise ModelSelectionError(f"Unsupported provider `{normalized}`. Supported providers: {supported}.")
|
|
|
|
|
|
def format_known_models(*, current: ModelSelection | None = None, provider: str | None = None) -> str:
|
|
requested_provider = normalize_provider_name(provider)
|
|
provider_names = (requested_provider,) if requested_provider else SUPPORTED_PROVIDERS
|
|
|
|
lines: list[str] = []
|
|
if current is not None:
|
|
lines.append("Current selection")
|
|
lines.append(format_selection(current))
|
|
lines.append("")
|
|
|
|
lines.append("Suggested models")
|
|
for provider_name in provider_names:
|
|
lines.append("")
|
|
lines.append(PROVIDER_DISPLAY_NAMES[provider_name])
|
|
for model_name, description in CURATED_MODELS[provider_name]:
|
|
lines.append(f"- `{model_name}` - {description}")
|
|
|
|
lines.append("")
|
|
lines.append("Usage")
|
|
lines.append("- `/models` - show the curated list")
|
|
lines.append("- `/provider copilot` - switch to GitHub Copilot subscription models")
|
|
lines.append("- `/provider openrouter` - switch backend provider")
|
|
lines.append("- `/provider vercel` - switch backend provider")
|
|
lines.append("- `/provider huggingface` - switch to HuggingFace serverless inference")
|
|
lines.append("- `/model copilot:anthropic/claude-opus-4.6` - switch provider and model at once")
|
|
lines.append("- `/model huggingface:Qwen/QwQ-32B` - use HuggingFace with specific model (shorthand: `hf:`)")
|
|
lines.append("- `/model anthropic/claude-sonnet-4.5` - switch model inside the current provider")
|
|
|
|
copilot_error = provider_configuration_error("copilot")
|
|
if requested_provider in (None, "copilot") and copilot_error:
|
|
lines.append("")
|
|
lines.append(copilot_error)
|
|
|
|
openrouter_error = provider_configuration_error("openrouter")
|
|
if requested_provider in (None, "openrouter") and openrouter_error:
|
|
lines.append("")
|
|
lines.append(openrouter_error)
|
|
|
|
vercel_error = provider_configuration_error("vercel")
|
|
if requested_provider in (None, "vercel") and vercel_error:
|
|
lines.append("")
|
|
lines.append(vercel_error)
|
|
|
|
huggingface_error = provider_configuration_error("huggingface")
|
|
if requested_provider in (None, "huggingface") and huggingface_error:
|
|
lines.append("")
|
|
lines.append(huggingface_error)
|
|
|
|
return "\n".join(lines).strip()
|
|
|
|
|
|
def is_official_openai_base_url(base_url: str) -> bool:
|
|
return base_url.rstrip("/") in OFFICIAL_OPENAI_BASE_URLS
|
|
|
|
|
|
def _split_explicit_provider(model: str) -> tuple[str | None, str]:
|
|
provider, separator, remainder = model.partition(":")
|
|
if not separator:
|
|
return None, model
|
|
normalized = provider.strip().lower()
|
|
if normalized == "hf":
|
|
normalized = "huggingface"
|
|
if normalized in SUPPORTED_PROVIDERS or normalized in VENDOR_PROVIDER_ALIASES:
|
|
return normalized, remainder.strip()
|
|
return None, model
|
|
|
|
|
|
def _infer_provider(model: str) -> str | None:
|
|
"""Return the provider a model clearly belongs to, or *None* for ambiguous names."""
|
|
normalized = model.strip().lower()
|
|
if normalized.startswith(OPENAI_PREFIXES):
|
|
return "openai"
|
|
if normalized.startswith(("claude-", "gemini-", "deepseek-", "anthropic/", "google/", "deepseek/")):
|
|
return "vercel"
|
|
if normalized.startswith(("openai/", "xai/")):
|
|
return "vercel"
|
|
# Any vendor/model format (e.g. zai/glm-5.1) defaults to Vercel gateway
|
|
if "/" in normalized:
|
|
return "vercel"
|
|
return None
|
|
|
|
|
|
def _normalize_copilot_model(model: str) -> str:
|
|
"""Strip vendor prefix (e.g. anthropic/claude-opus-4.6 -> claude-opus-4.6) for Copilot models."""
|
|
if "/" in model:
|
|
return model.split("/", 1)[1]
|
|
return model
|
|
|
|
|
|
def _normalize_vendor_alias(provider: str, model: str) -> tuple[str, str]:
|
|
normalized = provider.strip().lower()
|
|
if normalized in VENDOR_PROVIDER_ALIASES:
|
|
vendor = VENDOR_PROVIDER_ALIASES[normalized]
|
|
stripped_model = model.strip()
|
|
if not stripped_model.startswith(f"{vendor}/"):
|
|
stripped_model = f"{vendor}/{stripped_model}"
|
|
return "vercel", stripped_model
|
|
return normalized, model.strip()
|
|
|
|
|
|
def _normalize_model_for_provider(model: str, provider: str) -> str:
|
|
normalized = model.strip()
|
|
lowered = normalized.lower()
|
|
|
|
if provider == "copilot":
|
|
return _normalize_copilot_model(normalized)
|
|
|
|
if provider == "openai":
|
|
if lowered.startswith("openai/"):
|
|
return normalized.split("/", 1)[1]
|
|
return normalized
|
|
|
|
if provider == "openrouter":
|
|
if "/" in normalized:
|
|
return normalized
|
|
for prefix, vendor in OPENROUTER_VENDOR_HINTS:
|
|
if lowered.startswith(prefix):
|
|
return f"{vendor}/{normalized}"
|
|
return normalized
|
|
|
|
if provider == "vercel":
|
|
if "/" in normalized:
|
|
return normalized
|
|
for prefix, vendor in VERCEL_VENDOR_HINTS:
|
|
if lowered.startswith(prefix):
|
|
return f"{vendor}/{normalized}"
|
|
return normalized
|
|
|
|
if provider == "huggingface":
|
|
# HuggingFace models are always org/model format — pass through as-is
|
|
return normalized
|
|
|
|
return normalized
|
|
|
|
|
|
def _validate_provider_configuration(provider: str) -> None:
|
|
issue = provider_configuration_error(provider)
|
|
if issue:
|
|
raise ModelSelectionError(issue)
|
|
|
|
|
|
def _validate_model_for_provider(provider: str, model: str) -> None:
|
|
lowered = model.lower()
|
|
if provider != "openai":
|
|
return
|
|
|
|
if lowered.startswith(("anthropic/", "google/", "deepseek/", "claude-", "gemini-", "deepseek-")):
|
|
raise ModelSelectionError(
|
|
"That model is not an OpenAI model. `/model` only switches models inside the active provider. For Claude or Gemini, use `/provider vercel` or `/model vercel:<vendor/model>`."
|
|
)
|