# from haystack.document_stores import PineconeDocumentStore
from haystack_integrations.document_stores.pinecone import PineconeDocumentStore

from haystack.nodes import DenseRetriever, RAGenerator
from haystack.pipelines import GenerativeQAPipeline
from haystack.nodes import OpenAI

from dotenv import load_dotenv
import os
load_dotenv()


document_store = PineconeDocumentStore(
    api_key=os.getenv('PINECONE_API_KEY'),
    environment=os.getenv('PINECONE_ENVIRONMENT'),
    index='user-index-svetoslav',
    embedding_dim=1024,
    similarity="cosine"
)

retriever = DenseRetriever(document_store=document_store, embedding_model="multilingual-e5-large")
generator = RAGenerator(model_name_or_path="facebook/rag-token-nq")

# Initialize OpenAI LLM
llm = OpenAI(
    api_key=os.getenv('OPENAI_API_KEY'),
    model_name="gpt-4o",
    max_tokens=500,
    temperature=0.7
)
pipeline = GenerativeQAPipeline(generator=llm, retriever=retriever)

def answer_question(query, top_k_retriever=10, top_k_generator=5):
    prediction = pipeline.run(
        query=query,
        params={
            "Retriever": {"top_k": top_k_retriever},
            "Generator": {"top_k": top_k_generator}
        }
    )
    return prediction

if __name__ == "__main__":
    user_query = "What was last job possition of Svetoslav Trushev?"
    result = answer_question(user_query)
    print("Answer:", result["answers"][0].answer)