- Replace 6 compound Likert questions with 12 atomic ones grouped by dimension (syntax, expressiveness, data/IO, errors, overall); drop free-form question. Responses now stored as ints, not strings. - Back-compat layer maps legacy keys to new dimensions so existing results still render. - Parallelize run-all with ThreadPoolExecutor (configurable workers) and add a thread-safe min-request-interval rate limiter to the Anthropic provider. - Add new tasks: path_normalizer, todo_manager, currency_converter, locale_weather_url, network_info_parser, url_normalizer.
47 lines
1.6 KiB
Python
47 lines
1.6 KiB
Python
from __future__ import annotations
|
|
|
|
import os
|
|
import threading
|
|
import time
|
|
from typing import Any
|
|
|
|
import anthropic
|
|
|
|
from .base import Message
|
|
|
|
|
|
class AnthropicProvider:
|
|
def __init__(self, config: dict[str, Any]) -> None:
|
|
api_key_env = config.get("api_key_env", "ANTHROPIC_API_KEY")
|
|
api_key = os.environ.get(api_key_env)
|
|
if not api_key:
|
|
raise RuntimeError(f"Set {api_key_env} environment variable")
|
|
self._client = anthropic.Anthropic(api_key=api_key)
|
|
self._model = config.get("model", "claude-sonnet-4-20250514")
|
|
self._max_tokens = config.get("max_tokens", 4096)
|
|
self._min_request_interval = config.get("min_request_interval", 0.1)
|
|
self._last_request_time = 0.0
|
|
self._lock = threading.Lock()
|
|
|
|
def send(self, messages: list[Message], system: str = "") -> str:
|
|
with self._lock:
|
|
elapsed = time.monotonic() - self._last_request_time
|
|
if elapsed < self._min_request_interval:
|
|
time.sleep(self._min_request_interval - elapsed)
|
|
self._last_request_time = time.monotonic()
|
|
|
|
api_messages = [{"role": m.role, "content": m.content} for m in messages]
|
|
kwargs: dict[str, Any] = {
|
|
"model": self._model,
|
|
"max_tokens": self._max_tokens,
|
|
"messages": api_messages,
|
|
}
|
|
if system:
|
|
kwargs["system"] = system
|
|
response = self._client.messages.create(**kwargs)
|
|
return response.content[0].text
|
|
|
|
@property
|
|
def model_name(self) -> str:
|
|
return self._model
|