from langchain.chat_models import init_chat_modelfrom langchain.messages import HumanMessage, AIMessage, SystemMessagemodel = init_chat_model("gpt-5-nano")system_msg = SystemMessage("You are a helpful assistant.")human_msg = HumanMessage("Hello, how are you?")# Use with chat modelsmessages = [system_msg, human_msg]response = model.invoke(messages) # Returns AIMessage
from langchain.messages import SystemMessage, HumanMessage, AIMessagemessages = [ SystemMessage("You are a poetry expert"), HumanMessage("Write a haiku about spring"), AIMessage("Cherry blossoms bloom...")]response = model.invoke(messages)
system_msg = SystemMessage("You are a helpful coding assistant.")messages = [ system_msg, HumanMessage("How do I create a REST API?")]response = model.invoke(messages)
详细的人物
复制
向 AI 提问
from langchain.messages import SystemMessage, HumanMessagesystem_msg = SystemMessage("""You are a senior Python developer with expertise in web frameworks.Always provide code examples and explain your reasoning.Be concise but thorough in your explanations.""")messages = [ system_msg, HumanMessage("How do I create a REST API?")]response = model.invoke(messages)
from langchain.messages import AIMessage, SystemMessage, HumanMessage# Create an AI message manually (e.g., for conversation history)ai_msg = AIMessage("I'd be happy to help you with that question!")# Add to conversation historymessages = [ SystemMessage("You are a helpful assistant"), HumanMessage("Can you help me?"), ai_msg, # Insert as if it came from the model HumanMessage("Great! What's 2+2?")]response = model.invoke(messages)
from langchain.chat_models import init_chat_modelmodel = init_chat_model("gpt-5-nano")def get_weather(location: str) -> str: """Get the weather at a location.""" ...model_with_tools = model.bind_tools([get_weather])response = model_with_tools.invoke("What's the weather in Paris?")for tool_call in response.tool_calls: print(f"Tool: {tool_call['name']}") print(f"Args: {tool_call['args']}") print(f"ID: {tool_call['id']}")
# After a model makes a tool callai_message = AIMessage( content=[], tool_calls=[{ "name": "get_weather", "args": {"location": "San Francisco"}, "id": "call_123" }])# Execute tool and create result messageweather_result = "Sunny, 72°F"tool_message = ToolMessage( content=weather_result, tool_call_id="call_123" # Must match the call ID)# Continue conversationmessages = [ HumanMessage("What's the weather in San Francisco?"), ai_message, # Model's tool call tool_message, # Tool execution result]response = model.invoke(messages) # Model processes the result
from langchain.messages import ToolMessage# Sent to modelmessage_content = "It was the best of times, it was the worst of times."# Artifact available downstreamartifact = {"document_id": "doc_123", "page": 0}tool_message = ToolMessage( content=message_content, tool_call_id="call_123", name="search_books", artifact=artifact,)
from langchain.messages import HumanMessage# String contenthuman_message = HumanMessage("Hello, how are you?")# Provider-native format (e.g., OpenAI)human_message = HumanMessage(content=[ {"type": "text", "text": "Hello, how are you?"}, {"type": "image_url", "image_url": {"url": "https://example.com/image.jpg"}}])# List of standard content blockshuman_message = HumanMessage(content_blocks=[ {"type": "text", "text": "Hello, how are you?"}, {"type": "image", "url": "https://example.com/image.jpg"},])