-
-
Notifications
You must be signed in to change notification settings - Fork 345
Add basic anthropic sdk module - draft PR #771
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
d6d2241
843ab0d
5c33b1a
286ac57
3327eb4
fde438d
46752d1
3d22914
90d81c9
7647b36
11c2876
1d9eca0
48f8d07
6850115
0c3d1ca
0366023
1e91cb9
5e1b73e
d49eb2d
59857ce
6eb4760
a85d128
5ce375d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -3,4 +3,4 @@ repos: | |
# Ruff version. | ||
rev: v0.11.4 | ||
hooks: | ||
- id: ruff | ||
- id: ruff |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -9,6 +9,7 @@ | |
from langroid.agent.tool_message import ToolMessage | ||
from langroid.agent.xml_tool_message import XMLToolMessage | ||
from langroid.language_models.base import ( | ||
AnthropicToolCall, | ||
LLMFunctionCall, | ||
LLMMessage, | ||
LLMResponse, | ||
|
@@ -123,6 +124,8 @@ class ChatDocument(Document): | |
oai_tool_id2result: Optional[OrderedDict[str, str]] = None | ||
oai_tool_choice: ToolChoiceTypes | Dict[str, Dict[str, str] | str] = "auto" | ||
function_call: Optional[LLMFunctionCall] = None | ||
ant_tool_calls: Optional[List[AnthropicToolCall]] = None | ||
ant_tool_id2result: Optional[OrderedDict[str, str]] = None | ||
# tools that are explicitly added by agent response/handler, | ||
# or tools recognized in the ChatDocument as handle-able tools | ||
tool_messages: List[ToolMessage] = [] | ||
|
@@ -296,12 +299,16 @@ def from_LLMResponse( | |
# there must be at least one if it's not None | ||
for oai_tc in response.oai_tool_calls: | ||
ChatDocument._clean_fn_call(oai_tc.function) | ||
if response.ant_tool_calls: | ||
for ant_tc in response.ant_tool_calls: | ||
ChatDocument._clean_fn_call(ant_tc.function) | ||
return ChatDocument( | ||
content=message, | ||
reasoning=response.reasoning, | ||
content_any=message, | ||
oai_tool_calls=response.oai_tool_calls, | ||
function_call=response.function_call, | ||
ant_tool_calls=response.ant_tool_calls, | ||
metadata=ChatDocMetaData( | ||
source=Entity.LLM, | ||
sender=Entity.LLM, | ||
|
@@ -334,6 +341,7 @@ def from_str(msg: str) -> "ChatDocument": | |
def to_LLMMessage( | ||
message: Union[str, "ChatDocument"], | ||
oai_tools: Optional[List[OpenAIToolCall]] = None, | ||
anthropic_tools: Optional[List[AnthropicToolCall]] = None, | ||
) -> List[LLMMessage]: | ||
""" | ||
Convert to list of LLMMessage, to incorporate into msg-history sent to LLM API. | ||
|
@@ -352,12 +360,14 @@ def to_LLMMessage( | |
sender_role = Role.USER | ||
fun_call = None | ||
oai_tool_calls = None | ||
ant_tool_calls = None | ||
tool_id = "" # for OpenAI Assistant | ||
chat_document_id: str = "" | ||
if isinstance(message, ChatDocument): | ||
content = message.content or to_string(message.content_any) or "" | ||
fun_call = message.function_call | ||
oai_tool_calls = message.oai_tool_calls | ||
ant_tool_calls = message.ant_tool_calls | ||
if message.metadata.sender == Entity.USER and fun_call is not None: | ||
# This may happen when a (parent agent's) LLM generates a | ||
# a Function-call, and it ends up being sent to the current task's | ||
|
@@ -372,6 +382,9 @@ def to_LLMMessage( | |
# same reasoning as for function-call above | ||
content += " " + "\n\n".join(str(tc) for tc in oai_tool_calls) | ||
oai_tool_calls = None | ||
if message.metadata.sender == Entity.USER and ant_tool_calls is not None: | ||
content += " " + "\n\n".join(str(tc) for tc in ant_tool_calls) | ||
ant_tool_calls = None | ||
sender_name = message.metadata.sender_name | ||
tool_ids = message.metadata.tool_ids | ||
tool_id = tool_ids[-1] if len(tool_ids) > 0 else "" | ||
|
@@ -445,7 +458,7 @@ def to_LLMMessage( | |
tool_id=tool_id, # for OpenAI Assistant | ||
content=content, | ||
function_call=fun_call, | ||
tool_calls=oai_tool_calls, | ||
tool_calls=oai_tool_calls if oai_tool_calls else ant_tool_calls, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. [nitpick] The merging logic for tool_calls prioritizes oai_tool_calls over ant_tool_calls outright. Consider explicitly handling the scenario where both lists are populated (e.g. merging them) to avoid unintentionally dropping valid Anthropic tool calls. Copilot uses AI. Check for mistakes. Positive FeedbackNegative Feedback |
||
name=sender_name, | ||
chat_document_id=chat_document_id, | ||
) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
[nitpick] Using a fallback that checks only one collection may lead to overlooking cases where both oai_tool_calls and ant_tool_calls are present. It would be beneficial to review and possibly merge both lists or clearly document the intended priority.
Copilot uses AI. Check for mistakes.