Source code for langchain_core.messages.ai

from typing import Any, List, Literal

from langchain_core.messages.base import (
    BaseMessage,
    BaseMessageChunk,
    merge_content,
)
from langchain_core.utils._merge import merge_dicts


[docs]class AIMessage(BaseMessage): """Message from an AI.""" example: bool = False """Whether this Message is being passed in to the model as part of an example conversation. """ type: Literal["ai"] = "ai"
[docs] @classmethod def get_lc_namespace(cls) -> List[str]: """Get the namespace of the langchain object.""" return ["langchain", "schema", "messages"]
AIMessage.update_forward_refs()
[docs]class AIMessageChunk(AIMessage, BaseMessageChunk): """Message chunk from an AI.""" # Ignoring mypy re-assignment here since we're overriding the value # to make sure that the chunk variant can be discriminated from the # non-chunk variant. type: Literal["AIMessageChunk"] = "AIMessageChunk" # type: ignore[assignment] # noqa: E501
[docs] @classmethod def get_lc_namespace(cls) -> List[str]: """Get the namespace of the langchain object.""" return ["langchain", "schema", "messages"]
def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore if isinstance(other, AIMessageChunk): if self.example != other.example: raise ValueError( "Cannot concatenate AIMessageChunks with different example values." ) return self.__class__( example=self.example, content=merge_content(self.content, other.content), additional_kwargs=merge_dicts( self.additional_kwargs, other.additional_kwargs ), response_metadata=merge_dicts( self.response_metadata, other.response_metadata ), ) return super().__add__(other)