-
Notifications
You must be signed in to change notification settings - Fork 254
Expand file tree
/
Copy pathlangchain_lcel_pipeline.py
More file actions
58 lines (43 loc) · 1.41 KB
/
langchain_lcel_pipeline.py
File metadata and controls
58 lines (43 loc) · 1.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
"""
LangChain LCEL Pipeline Example (Python)
Demonstrates:
- Prompt -> cascadeflow -> parser composition (LCEL-style)
- LangSmith tags/metadata passed through cascadeflow nested runs
Setup:
export OPENAI_API_KEY="sk-..."
pip install -U langchain-core langchain-openai
python examples/langchain_lcel_pipeline.py
"""
import os
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI
from cascadeflow.langchain import CascadeFlow
def main() -> None:
if not os.environ.get("OPENAI_API_KEY"):
raise SystemExit("Set OPENAI_API_KEY first.")
drafter = ChatOpenAI(model="gpt-4o-mini", temperature=0.2)
verifier = ChatOpenAI(model="gpt-4o", temperature=0.2)
cascade = CascadeFlow(
drafter=drafter,
verifier=verifier,
quality_threshold=0.7,
cost_tracking_provider="langsmith",
)
prompt = ChatPromptTemplate.from_messages(
[
("system", "You are a concise engineer."),
("human", "{question}"),
]
)
chain = prompt | cascade | StrOutputParser()
out = chain.invoke(
{"question": "List 3 pitfalls when designing agent tool loops."},
config={
"tags": ["example", "lcel"],
"metadata": {"example": "lcel-pipeline"},
},
)
print(out)
if __name__ == "__main__":
main()