diff --git a/cookbook/rag_with_faiss.ipynb b/cookbook/rag_with_faiss.ipynb index ef48b0e558710..a066129c878d6 100644 --- a/cookbook/rag_with_faiss.ipynb +++ b/cookbook/rag_with_faiss.ipynb @@ -89,7 +89,7 @@ "metadata": {}, "outputs": [], "source": [ - "#Import all necessary libraries\n", + "# Import all necessary libraries\n", "import requests\n", "from langchain_openai import ChatOpenAI\n", "from langchain_community.vectorstores import FAISS\n", @@ -123,7 +123,7 @@ "import os\n", "\n", "if not os.environ.get(\"OPENAI_API_KEY\"):\n", - " os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"Enter API key for OpenAI: \")\n" + " os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"Enter API key for OpenAI: \")\n" ] }, { @@ -158,7 +158,7 @@ "metadata": {}, "outputs": [], "source": [ - "#Write the helper functions\n", + "# Write the helper functions\n", "def process_documents(file_paths):\n", " print(file_paths)\n", " \"\"\"Processes and updates the vector store with new documents.\"\"\"\n", @@ -262,7 +262,7 @@ } ], "source": [ - "#Create Vector embeddings\n", + "# Create Vector embeddings\n", "process_documents(filepaths)" ] }, @@ -282,8 +282,8 @@ } ], "source": [ - "qa_chain = get_qa_chain()\n", - "# " + "# Build QA Chain\n", + "qa_chain = get_qa_chain()" ] }, { @@ -301,7 +301,7 @@ } ], "source": [ - "#query the documents\n", + "# query the documents\n", "question = 'Which company is performing better in between Nike and Puma'\n", "response = qa_chain.invoke(question)\n", "print(response)"