# Querying chat models with Together AIfrom langchain_together import ChatTogether# choose from our 50+ models here: https://docs.together.ai/docs/inference-modelschat = ChatTogether( # together_api_key="YOUR_API_KEY", model="meta-llama/Llama-3-70b-chat-hf",)# stream the response back from the modelfor m in chat.stream("Tell me fun things to do in NYC"): print(m.content, end="", flush=True)# if you don't want to do streaming, you can use the invoke method# chat.invoke("Tell me fun things to do in NYC")
复制
向 AI 提问
# Querying code and language models with Together AIfrom langchain_together import Togetherllm = Together( model="codellama/CodeLlama-70b-Python-hf", # together_api_key="...")print(llm.invoke("def bubble_sort(): "))