from raglight.rag.simple_rag_api import RAGPipeline
from raglight.config.settings import Settings
from raglight.config.rag_config import RAGConfig
from raglight.models.data_source_model import FolderSource
# 1. Initialize Settings
Settings.setup_logging()
# 2. Configure the Pipeline
# We use Ollama with llama3 by default, but you can switch to OpenAI/Mistral.
config = RAGConfig(
provider=Settings.OLLAMA,
llm="llama3",
knowledge_base=[FolderSource(path="./data")]
)
# 3. Build the Pipeline
# This will ingest documents and create the local ChromaDB
pipeline = RAGPipeline(config)
pipeline.build()
# 4. Ask a Question
response = pipeline.generate("What are the key takeaways from these documents?")
print("\n🤖 Answer:", response)