跳到主要内容
在某些情况下,您需要访问追踪函数中的当前运行(span)。这对于从当前运行中提取 UUID、标签或其他信息非常有用。 您可以通过分别调用 Python 或 TypeScript SDK 中的 get_current_run_tree/getCurrentRunTree 函数来访问当前运行。 有关 RunTree 对象上可用属性的完整列表,请参阅此参考
from langsmith import traceable
from langsmith.run_helpers import get_current_run_tree
from openai import Client

    openai = Client()

    @traceable
    def format_prompt(subject):
        run = get_current_run_tree()
        print(f"format_prompt Run Id: {run.id}")
        print(f"format_prompt Trace Id: {run.trace_id}")
        print(f"format_prompt Parent Run Id: {run.parent_run.id}")
        return [
            {
                "role": "system",
                "content": "You are a helpful assistant.",
            },
            {
                "role": "user",
                "content": f"What's a good name for a store that sells {subject}?"
            }
        ]

    @traceable(run_type="llm")
    def invoke_llm(messages):
        run = get_current_run_tree()
        print(f"invoke_llm Run Id: {run.id}")
        print(f"invoke_llm Trace Id: {run.trace_id}")
        print(f"invoke_llm Parent Run Id: {run.parent_run.id}")
        return openai.chat.completions.create(
            messages=messages, model="gpt-4o-mini", temperature=0
        )

    @traceable
    def parse_output(response):
        run = get_current_run_tree()
        print(f"parse_output Run Id: {run.id}")
        print(f"parse_output Trace Id: {run.trace_id}")
        print(f"parse_output Parent Run Id: {run.parent_run.id}")
        return response.choices[0].message.content

    @traceable
    def run_pipeline():
        run = get_current_run_tree()
        print(f"run_pipeline Run Id: {run.id}")
        print(f"run_pipeline Trace Id: {run.trace_id}")
        messages = format_prompt("colorful socks")
        response = invoke_llm(messages)
        return parse_output(response)

run_pipeline()

以编程方式连接这些文档到 Claude、VSCode 等,通过 MCP 获取实时答案。
© . This site is unofficial and not affiliated with LangChain, Inc.