Langchain - Question and Answer

I’m replicating the langchain courses on my computer - when i run this index = VectorstoreIndexCreator(
vectorstore_cls=DocArrayInMemorySearch
).from_loaders([loader]) i get this ---------------------------------------------------------------------------
ValidationError Traceback (most recent call last)
Cell In[10], line 1
----> 1 index = VectorstoreIndexCreator(
2 vectorstore_cls=DocArrayInMemorySearch
3 ).from_loaders([loader])

File ~/anaconda3/envs/chatenv/lib/python3.11/site-packages/pydantic/main.py:339, in pydantic.main.BaseModel.init()

File ~/anaconda3/envs/chatenv/lib/python3.11/site-packages/pydantic/main.py:1066, in pydantic.main.validate_model()

File ~/anaconda3/envs/chatenv/lib/python3.11/site-packages/pydantic/fields.py:439, in pydantic.fields.ModelField.get_default()

File ~/anaconda3/envs/chatenv/lib/python3.11/site-packages/pydantic/main.py:341, in pydantic.main.BaseModel.init()

ValidationError: 1 validation error for OpenAIEmbeddings
root
Did not find openai_api_key, please add an environment variable OPENAI_API_KEY which contains it, or pass openai_api_key as a named parameter. (type=value_error)
BUT, all my previous Notebooks run OK - I get the problem oNLY with this code

Hi!

Open AI the company runs the api that grants access to the GPT models.
The error is asking that you get an api key from openAI and then set an environment variable to the value of that api key.

This can be done in this way:

import os
os.environ["OPENAI_API_KEY"] = "Your API key"

One thing to remember is that this will cost money to use and you can find GPT’s pricing on their website.
Pricing.
OpenAI Platform

Hope this helps!

Sam

In your previous notebooks did you use the direct string assignment method key=“my_openai_key” and then call openAI functions directly? That’s how I did it in the prompt engineering course and it worked fine. Doesn’t work with langchain, however. My experience was you have to use the set env approach. HTH

I’m facing a similar issue with the langchain short course:

from langchain.embeddings import OpenAIEmbeddings
embeddings = OpenAIEmbeddings()

gives an error below


TypeError Traceback (most recent call last)
Input In [1], in
----> 1 from langchain.embeddings import OpenAIEmbeddings
2 embeddings = OpenAIEmbeddings()

File /opt/conda/lib/python3.8/site-packages/langchain/init.py:6, in
3 from importlib import metadata
4 from typing import Optional
----> 6 from langchain.agents import MRKLChain, ReActChain, SelfAskWithSearchChain
7 from langchain.cache import BaseCache
8 from langchain.chains import (
9 ConversationChain,
10 LLMBashChain,
(…)
18 VectorDBQAWithSourcesChain,
19 )

File /opt/conda/lib/python3.8/site-packages/langchain/agents/init.py:2, in
1 “”“Interface for agents.”“”
----> 2 from langchain.agents.agent import (
3 Agent,
4 AgentExecutor,
5 AgentOutputParser,
6 BaseMultiActionAgent,
7 BaseSingleActionAgent,
8 LLMSingleActionAgent,
9 )
10 from langchain.agents.agent_toolkits import (
11 create_csv_agent,
12 create_json_agent,
(…)
21 create_vectorstore_router_agent,
22 )
23 from langchain.agents.agent_types import AgentType

File /opt/conda/lib/python3.8/site-packages/langchain/agents/agent.py:16, in
13 from pydantic import BaseModel, root_validator
15 from langchain.agents.agent_types import AgentType
—> 16 from langchain.agents.tools import InvalidTool
17 from langchain.base_language import BaseLanguageModel
18 from langchain.callbacks.base import BaseCallbackManager

File /opt/conda/lib/python3.8/site-packages/langchain/agents/tools.py:8, in
2 from typing import Optional
4 from langchain.callbacks.manager import (
5 AsyncCallbackManagerForToolRun,
6 CallbackManagerForToolRun,
7 )
----> 8 from langchain.tools.base import BaseTool, Tool, tool
11 class InvalidTool(BaseTool):
12 “”“Tool that is run when invalid tool name is encountered by agent.”“”

File /opt/conda/lib/python3.8/site-packages/langchain/tools/init.py:3, in
1 “”“Core toolkit implementations.”“”
----> 3 from langchain.tools.arxiv.tool import ArxivQueryRun
4 from langchain.tools.azure_cognitive_services import (
5 AzureCogsFormRecognizerTool,
6 AzureCogsImageAnalysisTool,
7 AzureCogsSpeech2TextTool,
8 AzureCogsText2SpeechTool,
9 )
10 from langchain.tools.base import BaseTool, StructuredTool, Tool, tool

File /opt/conda/lib/python3.8/site-packages/langchain/tools/arxiv/tool.py:12, in
7 from langchain.callbacks.manager import (
8 AsyncCallbackManagerForToolRun,
9 CallbackManagerForToolRun,
10 )
11 from langchain.tools.base import BaseTool
—> 12 from langchain.utilities.arxiv import ArxivAPIWrapper
15 class ArxivQueryRun(BaseTool):
16 “”“Tool that adds the capability to search using the Arxiv API.”“”

File /opt/conda/lib/python3.8/site-packages/langchain/utilities/init.py:3, in
1 “”“General utilities.”“”
2 from langchain.requests import TextRequestsWrapper
----> 3 from langchain.utilities.apify import ApifyWrapper
4 from langchain.utilities.arxiv import ArxivAPIWrapper
5 from langchain.utilities.awslambda import LambdaWrapper

File /opt/conda/lib/python3.8/site-packages/langchain/utilities/apify.py:5, in
1 from typing import Any, Callable, Dict, Optional
3 from pydantic import BaseModel, root_validator
----> 5 from langchain.document_loaders import ApifyDatasetLoader
6 from langchain.document_loaders.base import Document
7 from langchain.utils import get_from_dict_or_env

File /opt/conda/lib/python3.8/site-packages/langchain/document_loaders/init.py:42, in
37 from langchain.document_loaders.duckdb_loader import DuckDBLoader
38 from langchain.document_loaders.email import (
39 OutlookMessageLoader,
40 UnstructuredEmailLoader,
41 )
—> 42 from langchain.document_loaders.embaas import EmbaasBlobLoader, EmbaasLoader
43 from langchain.document_loaders.epub import UnstructuredEPubLoader
44 from langchain.document_loaders.evernote import EverNoteLoader

File /opt/conda/lib/python3.8/site-packages/langchain/document_loaders/embaas.py:54, in
50 bytes: str
51 “”“The base64 encoded bytes of the document to extract text from.”“”
—> 54 class BaseEmbaasLoader(BaseModel):
55 “”“Base class for embedding a model into an Embaas document extraction API.”“”
57 embaas_api_key: Optional[str] = None

File /opt/conda/lib/python3.8/site-packages/pydantic/main.py:299, in pydantic.main.ModelMetaclass.new()

File /opt/conda/lib/python3.8/site-packages/pydantic/fields.py:411, in pydantic.fields.ModelField.infer()

File /opt/conda/lib/python3.8/site-packages/pydantic/fields.py:342, in pydantic.fields.ModelField.init()

File /opt/conda/lib/python3.8/site-packages/pydantic/fields.py:456, in pydantic.fields.ModelField.prepare()

File /opt/conda/lib/python3.8/site-packages/pydantic/fields.py:670, in pydantic.fields.ModelField.populate_validators()

File /opt/conda/lib/python3.8/site-packages/pydantic/validators.py:688, in find_validators()

File /opt/conda/lib/python3.8/site-packages/pydantic/validators.py:582, in pydantic.validators.make_typeddict_validator()

File /opt/conda/lib/python3.8/site-packages/pydantic/annotated_types.py:35, in pydantic.annotated_types.create_model_from_typeddict()

File /opt/conda/lib/python3.8/site-packages/pydantic/main.py:990, in pydantic.main.create_model()

File /opt/conda/lib/python3.8/site-packages/pydantic/main.py:299, in pydantic.main.ModelMetaclass.new()

File /opt/conda/lib/python3.8/site-packages/pydantic/fields.py:411, in pydantic.fields.ModelField.infer()

File /opt/conda/lib/python3.8/site-packages/pydantic/fields.py:342, in pydantic.fields.ModelField.init()

File /opt/conda/lib/python3.8/site-packages/pydantic/fields.py:451, in pydantic.fields.ModelField.prepare()

File /opt/conda/lib/python3.8/site-packages/pydantic/fields.py:550, in pydantic.fields.ModelField._type_analysis()

File /opt/conda/lib/python3.8/typing.py:774, in _GenericAlias.subclasscheck(self, cls)
772 if self._special:
773 if not isinstance(cls, _GenericAlias):
→ 774 return issubclass(cls, self.origin)
775 if cls._special:
776 return issubclass(cls.origin, self.origin)

TypeError: issubclass() arg 1 must be a class

1 Like