Haystack
1.1 Usage
Method 1: OpenAIChatGenerator (Recommended)
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.dataclasses import ChatMessage
from haystack.utils import Secret
generator = OpenAIChatGenerator(
api_key=Secret.from_token("your-hpc-ai-api-key"),
model="minimax/minimax-m2.5",
api_base_url="https://api.hpc-ai.com/inference/v1"
)
response = generator.run(
messages=[ChatMessage.from_user("Hello!")]
)
print(response["replies"])
Method 2: OpenAIGenerator
from haystack.components.generators import OpenAIGenerator
from haystack.utils import Secret
generator = OpenAIGenerator(
api_key=Secret.from_token("your-hpc-ai-api-key"),
model="minimax/minimax-m2.5",
api_base_url="https://api.hpc-ai.com/inference/v1"
)
response = generator.run(prompt="Hello, how are you?")
print(response["replies"])
Method 3: Full RAG Pipeline Example
from haystack import Pipeline
from haystack.utils import Secret
from haystack.components.retrievers.in_memory import InMemoryBM25Retriever
from haystack.components.builders.prompt_builder import PromptBuilder
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.document_stores.in_memory import InMemoryDocumentStore
from haystack import Document
# Initialize document store
docstore = InMemoryDocumentStore()
docstore.write_documents([Document(content="Your document content here")])
# Define prompt template
template = """
Given the following information, answer the question.
Context:
{% for document in documents %}
{{ document.content }}
{% endfor %}
Question: {{ query }}?
"""
# Create pipeline
pipe = Pipeline()
pipe.add_component("retriever", InMemoryBM25Retriever(document_store=docstore))
pipe.add_component("prompt_builder", PromptBuilder(template=template))
pipe.add_component("llm", OpenAIChatGenerator(
api_key=Secret.from_token("your-hpc-ai-api-key"),
model="minimax/minimax-m2.5",
api_base_url="https://api.hpc-ai.com/inference/v1"
))
pipe.connect("retriever", "prompt_builder.documents")
pipe.connect("prompt_builder", "llm")
# Run query
result = pipe.run({
"prompt_builder": {"query": "Your question?"},
"retriever": {"query": "Your question?"}
})
Method 4: Embedding Model
from haystack.components.embedders import OpenAITextEmbedder
from haystack.utils import Secret
embedder = OpenAITextEmbedder(
api_key=Secret.from_token("your-hpc-ai-api-key"),
model="minimax/minimax-m2.5",
api_base_url="https://api.hpc-ai.com/inference/v1"
)
result = embedder.run(text="Your text to embed")