I get an error when running
customer_response = chat(customer_messages)
I’m not sure what is wrong - everything (up to this point) worked fine.
Response: Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.._completion_with_retry in 1.0 seconds as it raised APIConnectionError: Error communicating with OpenAI: (‘Connection aborted.’, RemoteDisconnected(‘Remote end closed connection without response’)).
AuthenticationError Traceback (most recent call last)
Cell In[69], line 2
1 # Call the LLM to translate to the style of the customer message
----> 2 customer_response = chat(customer_messages)
File ~\AppData\Roaming\Python\Python311\site-packages\langchain\chat_models\base.py:183, in BaseChatModel.__call__(self, messages, stop, callbacks)
177 def __call__(
178 self,
179 messages: List[BaseMessage],
180 stop: Optional[List[str]] = None,
181 callbacks: Callbacks = None,
182 ) -> BaseMessage:
--> 183 generation = self.generate(
184 [messages], stop=stop, callbacks=callbacks
185 ).generations[0][0]
186 if isinstance(generation, ChatGeneration):
187 return generation.message
File ~\AppData\Roaming\Python\Python311\site-packages\langchain\chat_models\base.py:92, in BaseChatModel.generate(self, messages, stop, callbacks)
90 except (KeyboardInterrupt, Exception) as e:
91 run_manager.on_llm_error(e)
---> 92 raise e
93 llm_output = self._combine_llm_outputs([res.llm_output for res in results])
94 generations = [res.generations for res in results]
File ~\AppData\Roaming\Python\Python311\site-packages\langchain\chat_models\base.py:84, in BaseChatModel.generate(self, messages, stop, callbacks)
80 new_arg_supported = inspect.signature(self._generate).parameters.get(
81 "run_manager"
82 )
83 try:
---> 84 results = [
85 self._generate(m, stop=stop, run_manager=run_manager)
86 if new_arg_supported
87 else self._generate(m, stop=stop)
88 for m in messages
89 ]
90 except (KeyboardInterrupt, Exception) as e:
91 run_manager.on_llm_error(e)
File ~\AppData\Roaming\Python\Python311\site-packages\langchain\chat_models\base.py:85, in (.0)
80 new_arg_supported = inspect.signature(self._generate).parameters.get(
81 "run_manager"
82 )
83 try:
84 results = [
---> 85 self._generate(m, stop=stop, run_manager=run_manager)
86 if new_arg_supported
87 else self._generate(m, stop=stop)
88 for m in messages
89 ]
90 except (KeyboardInterrupt, Exception) as e:
91 run_manager.on_llm_error(e)
File ~\AppData\Roaming\Python\Python311\site-packages\langchain\chat_models\openai.py:323, in ChatOpenAI._generate(self, messages, stop, run_manager)
319 message = _convert_dict_to_message(
320 {"content": inner_completion, "role": role}
321 )
322 return ChatResult(generations=[ChatGeneration(message=message)])
--> 323 response = self.completion_with_retry(messages=message_dicts, **params)
324 return self._create_chat_result(response)
File ~\AppData\Roaming\Python\Python311\site-packages\langchain\chat_models\openai.py:284, in ChatOpenAI.completion_with_retry(self, **kwargs)
280 @retry_decorator
281 def _completion_with_retry(**kwargs: Any) -> Any:
282 return self.client.create(**kwargs)
--> 284 return _completion_with_retry(**kwargs)
File ~\AppData\Roaming\Python\Python311\site-packages\tenacity\__init__.py:289, in BaseRetrying.wraps..wrapped_f(*args, **kw)
287 @functools.wraps(f)
288 def wrapped_f(*args: t.Any, **kw: t.Any) -> t.Any:
--> 289 return self(f, *args, **kw)
File ~\AppData\Roaming\Python\Python311\site-packages\tenacity\__init__.py:379, in Retrying.__call__(self, fn, *args, **kwargs)
377 retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
378 while True:
--> 379 do = self.iter(retry_state=retry_state)
380 if isinstance(do, DoAttempt):
381 try:
File ~\AppData\Roaming\Python\Python311\site-packages\tenacity\__init__.py:314, in BaseRetrying.iter(self, retry_state)
312 is_explicit_retry = fut.failed and isinstance(fut.exception(), TryAgain)
313 if not (is_explicit_retry or self.retry(retry_state)):
--> 314 return fut.result()
316 if self.after is not None:
317 self.after(retry_state)
File c:\Program Files\Python311\Lib\concurrent\futures\_base.py:449, in Future.result(self, timeout)
447 raise CancelledError()
448 elif self._state == FINISHED:
--> 449 return self.__get_result()
451 self._condition.wait(timeout)
453 if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
File c:\Program Files\Python311\Lib\concurrent\futures\_base.py:401, in Future.__get_result(self)
399 if self._exception:
400 try:
--> 401 raise self._exception
402 finally:
403 # Break a reference cycle with the exception in self._exception
404 self = None
File ~\AppData\Roaming\Python\Python311\site-packages\tenacity\__init__.py:382, in Retrying.__call__(self, fn, *args, **kwargs)
380 if isinstance(do, DoAttempt):
381 try:
--> 382 result = fn(*args, **kwargs)
383 except BaseException: # noqa: B902
384 retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
File ~\AppData\Roaming\Python\Python311\site-packages\langchain\chat_models\openai.py:282, in ChatOpenAI.completion_with_retry.._completion_with_retry(**kwargs)
280 @retry_decorator
281 def _completion_with_retry(**kwargs: Any) -> Any:
--> 282 return self.client.create(**kwargs)
File ~\AppData\Roaming\Python\Python311\site-packages\openai\api_resources\chat_completion.py:25, in ChatCompletion.create(cls, *args, **kwargs)
23 while True:
24 try:
---> 25 return super().create(*args, **kwargs)
26 except TryAgain as e:
27 if timeout is not None and time.time() > start + timeout:
File ~\AppData\Roaming\Python\Python311\site-packages\openai\api_resources\abstract\engine_api_resource.py:153, in EngineAPIResource.create(cls, api_key, api_base, api_type, request_id, api_version, organization, **params)
127 @classmethod
128 def create(
129 cls,
(...)
136 **params,
137 ):
138 (
139 deployment_id,
140 engine,
(...)
150 api_key, api_base, api_type, api_version, organization, **params
151 )
--> 153 response, _, api_key = requestor.request(
154 "post",
155 url,
156 params=params,
157 headers=headers,
158 stream=stream,
159 request_id=request_id,
160 request_timeout=request_timeout,
161 )
163 if stream:
164 # must be an iterator
165 assert not isinstance(response, OpenAIResponse)
File ~\AppData\Roaming\Python\Python311\site-packages\openai\api_requestor.py:230, in APIRequestor.request(self, method, url, params, headers, files, stream, request_id, request_timeout)
209 def request(
210 self,
211 method,
(...)
218 request_timeout: Optional[Union[float, Tuple[float, float]]] = None,
219 ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool, str]:
220 result = self.request_raw(
221 method.lower(),
222 url,
(...)
228 request_timeout=request_timeout,
229 )
--> 230 resp, got_stream = self._interpret_response(result, stream)
231 return resp, got_stream, self.api_key
File ~\AppData\Roaming\Python\Python311\site-packages\openai\api_requestor.py:624, in APIRequestor._interpret_response(self, result, stream)
616 return (
617 self._interpret_response_line(
618 line, result.status_code, result.headers, stream=True
619 )
620 for line in parse_stream(result.iter_lines())
621 ), True
622 else:
623 return (
--> 624 self._interpret_response_line(
625 result.content.decode("utf-8"),
626 result.status_code,
627 result.headers,
628 stream=False,
629 ),
630 False,
631 )
File ~\AppData\Roaming\Python\Python311\site-packages\openai\api_requestor.py:687, in APIRequestor._interpret_response_line(self, rbody, rcode, rheaders, stream)
685 stream_error = stream and "error" in resp.data
686 if stream_error or not 200 <= rcode < 300:
--> 687 raise self.handle_error_response(
688 rbody, rcode, resp.data, rheaders, stream_error=stream_error
689 )
690 return resp
AuthenticationError: