Coverage for aipyapp/llm/client_claude.py: 30%
44 statements
« prev ^ index » next coverage.py v7.10.3, created at 2025-08-11 12:02 +0200
« prev ^ index » next coverage.py v7.10.3, created at 2025-08-11 12:02 +0200
1#! /usr/bin/env python3
2# -*- coding: utf-8 -*-
4from collections import Counter
6from . import BaseClient, ChatMessage
8# https://docs.anthropic.com/en/api/messages
9class ClaudeClient(BaseClient):
10 MODEL = "claude-sonnet-4-20250514"
11 ENV_API_KEY = "ANTHROPIC_API_KEY"
12 #PARAMS = {'thinking': {'type': 'enabled', 'budget_tokens': 1024}}
14 def __init__(self, config):
15 super().__init__(config)
16 self._system_prompt = None
18 def _get_client(self):
19 import anthropic
20 return anthropic.Anthropic(api_key=self._api_key, timeout=self._timeout)
22 def usable(self):
23 return super().usable() and self._api_key
25 def _parse_usage(self, response):
26 usage = response.usage
27 ret = {'input_tokens': usage.input_tokens, 'output_tokens': usage.output_tokens}
28 ret['total_tokens'] = ret['input_tokens'] + ret['output_tokens']
29 return ret
31 def _parse_stream_response(self, response, stream_processor):
32 usage = Counter()
33 with stream_processor as lm:
34 for event in response:
35 if hasattr(event, 'delta') and hasattr(event.delta, 'text') and event.delta.text:
36 content = event.delta.text
37 lm.process_chunk(content)
38 elif hasattr(event, 'message') and hasattr(event.message, 'usage') and event.message.usage:
39 usage['input_tokens'] += getattr(event.message.usage, 'input_tokens', 0)
40 usage['output_tokens'] += getattr(event.message.usage, 'output_tokens', 0)
41 elif hasattr(event, 'usage') and event.usage:
42 usage['input_tokens'] += getattr(event.usage, 'input_tokens', 0)
43 usage['output_tokens'] += getattr(event.usage, 'output_tokens', 0)
45 usage['total_tokens'] = usage['input_tokens'] + usage['output_tokens']
46 return ChatMessage(role="assistant", content=lm.content, usage=usage)
48 def _parse_response(self, response):
49 content = response.content[0].text
50 role = response.role
51 return ChatMessage(role=role, content=content, usage=self._parse_usage(response))
53 def add_system_prompt(self, history, system_prompt):
54 self._system_prompt = system_prompt
56 def get_completion(self, messages):
57 if not self._client:
58 self._client = self._get_client()
60 message = self._client.messages.create(
61 model = self._model,
62 messages = messages,
63 stream=self._stream,
64 system=self._system_prompt,
65 max_tokens = self.max_tokens,
66 temperature = self._temperature,
67 **self._params
68 )
69 return message