import osos.environ["YI_API_KEY"] = "YOUR_API_KEY"from langchain_community.llms import YiLLM# Load the modelllm = YiLLM(model="yi-large")# You can specify the region if needed (default is "auto")# llm = YiLLM(model="yi-large", region="domestic") # or "international"# Basic usageres = llm.invoke("What's your name?")print(res)
复制
向 AI 提问
# Generate methodres = llm.generate( prompts=[ "Explain the concept of large language models.", "What are the potential applications of AI in healthcare?", ])print(res)
复制
向 AI 提问
# Streamingfor chunk in llm.stream("Describe the key features of the Yi language model series."): print(chunk, end="", flush=True)
复制
向 AI 提问
# Asynchronous streamingimport asyncioasync def run_aio_stream(): async for chunk in llm.astream( "Write a brief on the future of AI according to Dr. Kai-Fu Lee's vision." ): print(chunk, end="", flush=True)asyncio.run(run_aio_stream())
复制
向 AI 提问
# Adjusting parametersllm_with_params = YiLLM( model="yi-large", temperature=0.7, top_p=0.9,)res = llm_with_params( "Propose an innovative AI application that could benefit society.")print(res)