Skip to content

Commit 451f266

Browse files
Amnah199Amna Mubashar
and
Amna Mubashar
authored
Replaces occurences of dynamic prompt builders (#250)
Co-authored-by: Amna Mubashar <amna.mubashar@Amnas-MBP.fritz.box>
1 parent e3a4916 commit 451f266

File tree

5 files changed

+14
-13
lines changed

5 files changed

+14
-13
lines changed

integrations/anthropic.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ Below is an example RAG Pipeline where we answer a predefined question using the
5858

5959
```python
6060
from haystack import Pipeline
61-
from haystack.components.builders import DynamicChatPromptBuilder
61+
from haystack.components.builders import ChatPromptBuilder
6262
from haystack.components.converters import HTMLToDocument
6363
from haystack.components.fetchers import LinkContentFetcher
6464
from haystack.components.generators.utils import print_streaming_chunk
@@ -75,7 +75,7 @@ messages = [
7575
rag_pipeline = Pipeline()
7676
rag_pipeline.add_component("fetcher", LinkContentFetcher())
7777
rag_pipeline.add_component("converter", HTMLToDocument())
78-
rag_pipeline.add_component("prompt_builder", DynamicChatPromptBuilder(runtime_variables=["documents"]))
78+
rag_pipeline.add_component("prompt_builder", ChatPromptBuilder())
7979
rag_pipeline.add_component(
8080
"llm",
8181
AnthropicChatGenerator(
@@ -88,13 +88,13 @@ rag_pipeline.add_component(
8888

8989
rag_pipeline.connect("fetcher", "converter")
9090
rag_pipeline.connect("converter", "prompt_builder")
91-
rag_pipeline.connect("prompt_builder", "llm")
91+
rag_pipeline.connect("prompt_builder.prompt", "llm.messages")
9292

9393
question = "What are the best practices in prompt engineering?"
9494
rag_pipeline.run(
9595
data={
9696
"fetcher": {"urls": ["https://docs.anthropic.com/claude/docs/prompt-engineering"]},
97-
"prompt_builder": {"template_variables": {"query": question}, "prompt_source": messages},
97+
"prompt_builder": {"template_variables": {"query": question}, "template": messages},
9898
}
9999
)
100100
```

integrations/cohere.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -117,13 +117,13 @@ Similar to the above example, you can also use [`CohereChatGenerator`](https://d
117117

118118
```python
119119
from haystack import Pipeline
120-
from haystack.components.builders import DynamicChatPromptBuilder
120+
from haystack.components.builders import ChatPromptBuilder
121121
from haystack.dataclasses import ChatMessage
122122
from haystack_integrations.components.generators.cohere.chat import CohereChatGenerator
123123

124124

125125
pipe = Pipeline()
126-
pipe.add_component("prompt_builder", DynamicChatPromptBuilder())
126+
pipe.add_component("prompt_builder", ChatPromptBuilder())
127127
pipe.add_component("llm", CohereChatGenerator())
128128
pipe.connect("prompt_builder", "llm")
129129

integrations/context-ai.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ import uuid
5959
import os
6060

6161
from haystack.components.generators.chat import OpenAIChatGenerator
62-
from haystack.components.builders import DynamicChatPromptBuilder
62+
from haystack.components.builders import ChatPromptBuilder
6363
from haystack import Pipeline
6464
from haystack.dataclasses import ChatMessage
6565

@@ -70,7 +70,7 @@ model = "gpt-3.5-turbo"
7070
os.environ["GETCONTEXT_TOKEN"] = "GETCONTEXT_TOKEN"
7171
os.environ["OPENAI_API_KEY"] = "OPENAI_API_KEY"
7272

73-
prompt_builder = DynamicChatPromptBuilder()
73+
prompt_builder = ChatPromptBuilder()
7474
llm = OpenAIChatGenerator(model=model)
7575
prompt_analytics = ContextAIAnalytics()
7676
assistant_analytics = ContextAIAnalytics()

integrations/langfuse.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -132,14 +132,14 @@ Once you've run these code samples, you can also [use the Langfuse dashboard to
132132

133133
```python
134134
from haystack import Pipeline
135-
from haystack.components.builders import DynamicChatPromptBuilder
135+
from haystack.components.builders import ChatPromptBuilder
136136
from haystack.components.generators.chat import OpenAIChatGenerator
137137
from haystack.dataclasses import ChatMessage
138138
from haystack_integrations.components.connectors.langfuse import LangfuseConnector
139139

140140
pipe = Pipeline()
141141
pipe.add_component("tracer", LangfuseConnector("Chat example"))
142-
pipe.add_component("prompt_builder", DynamicChatPromptBuilder())
142+
pipe.add_component("prompt_builder", ChatPromptBuilder())
143143
pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo"))
144144

145145
pipe.connect("prompt_builder.prompt", "llm.messages")
@@ -149,7 +149,7 @@ messages = [
149149
]
150150

151151
response = pipe.run(
152-
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "prompt_source": messages}}
152+
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "template": messages}}
153153
)
154154
print(response["llm"]["replies"][0])
155155
print(response["tracer"]["trace_url"])

integrations/mistral.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,7 @@ from haystack.document_stores.in_memory import InMemoryDocumentStore
125125
from haystack_integrations.components.embedders.mistral.document_embedder import MistralDocumentEmbedder
126126
from haystack_integrations.components.embedders.mistral.text_embedder import MistralTextEmbedder
127127
from haystack.components.retrievers.in_memory import InMemoryEmbeddingRetriever
128+
from haystack.components.builders import ChatPromptBuilder
128129

129130
os.environ["MISTRAL_API_KEY"] = "YOUR_MISTRAL_API_KEY"
130131

@@ -140,7 +141,7 @@ document_store.write_documents(documents)
140141

141142
text_embedder = MistralTextEmbedder()
142143
retriever = InMemoryEmbeddingRetriever(document_store=document_store)
143-
prompt_builder = DynamicChatPromptBuilder(runtime_variables=["documents"])
144+
prompt_builder = ChatPromptBuilder()
144145
llm = MistralChatGenerator(streaming_callback=print_streaming_chunk)
145146

146147
messages = [ChatMessage.from_user("Here are some the documents: {{documents}} \\n Answer: {{query}}")]
@@ -161,7 +162,7 @@ question = "Who lives in Berlin?"
161162
result = rag_pipeline.run(
162163
{
163164
"text_embedder": {"text": question},
164-
"prompt_builder": {"template_variables": {"query": question}, "prompt_source": messages},
165+
"prompt_builder": {"template_variables": {"query": question}, "template": messages},
165166
"llm": {"generation_kwargs": {"max_tokens": 165}},
166167
}
167168
)

0 commit comments

Comments
 (0)