L4 Error on running the last command


KeyError Traceback (most recent call last)
Cell In[55], line 1
----> 1 chain.invoke(doc.page_content)

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/base.py:1113, in RunnableSequence.invoke(self, input, config)
1111 try:
1112 for i, step in enumerate(self.steps):
→ 1113 input = step.invoke(
1114 input,
1115 # mark each step as a child run
1116 patch_config(
1117 config, callbacks=run_manager.get_child(f"seq:step:{i+1}")
1118 ),
1119 )
1120 # finish the root run
1121 except BaseException as e:

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/base.py:2263, in RunnableEach.invoke(self, input, config)
2260 def invoke(
2261 self, input: List[Input], config: Optional[RunnableConfig] = None
2262 ) → List[Output]:
→ 2263 return self._call_with_config(self._invoke, input, config)

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/base.py:633, in Runnable._call_with_config(self, func, input, config, run_type, **kwargs)
626 run_manager = callback_manager.on_chain_start(
627 dumpd(self),
628 input,
629 run_type=run_type,
630 name=config.get(“run_name”),
631 )
632 try:
→ 633 output = call_func_with_variable_args(
634 func, input, run_manager, config, **kwargs
635 )
636 except BaseException as e:
637 run_manager.on_chain_error(e)

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/config.py:173, in call_func_with_variable_args(func, input, run_manager, config, **kwargs)
171 if accepts_run_manager(func):
172 kwargs[“run_manager”] = run_manager
→ 173 return func(input, **kwargs)

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/base.py:2256, in RunnableEach._invoke(self, inputs, run_manager, config)
2250 def _invoke(
2251 self,
2252 inputs: List[Input],
2253 run_manager: CallbackManagerForChainRun,
2254 config: RunnableConfig,
2255 ) → List[Output]:
→ 2256 return self.bound.batch(
2257 inputs, patch_config(config, callbacks=run_manager.get_child())
2258 )

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/base.py:1247, in RunnableSequence.batch(self, inputs, config, return_exceptions, **kwargs)
1245 else:
1246 for i, step in enumerate(self.steps):
→ 1247 inputs = step.batch(
1248 inputs,
1249 [
1250 # each step a child run of the corresponding root run
1251 patch_config(
1252 config, callbacks=rm.get_child(f"seq:step:{i+1}")
1253 )
1254 for rm, config in zip(run_managers, configs)
1255 ],
1256 )
1258 # finish the root runs
1259 except BaseException as e:

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/base.py:309, in Runnable.batch(self, inputs, config, return_exceptions, **kwargs)
306 return cast(List[Output], [invoke(inputs[0], configs[0])])
308 with get_executor_for_config(configs[0]) as executor:
→ 309 return cast(List[Output], list(executor.map(invoke, inputs, configs)))

File /usr/local/lib/python3.9/concurrent/futures/_base.py:609, in Executor.map..result_iterator()
606 while fs:
607 # Careful not to keep a reference to the popped future
608 if timeout is None:
→ 609 yield fs.pop().result()
610 else:
611 yield fs.pop().result(end_time - time.monotonic())

File /usr/local/lib/python3.9/concurrent/futures/_base.py:439, in Future.result(self, timeout)
437 raise CancelledError()
438 elif self._state == FINISHED:
→ 439 return self.__get_result()
441 self._condition.wait(timeout)
443 if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:

File /usr/local/lib/python3.9/concurrent/futures/_base.py:391, in Future.__get_result(self)
389 if self._exception:
390 try:
→ 391 raise self._exception
392 finally:
393 # Break a reference cycle with the exception in self._exception
394 self = None

File /usr/local/lib/python3.9/concurrent/futures/thread.py:58, in _WorkItem.run(self)
55 return
57 try:
—> 58 result = self.fn(*self.args, **self.kwargs)
59 except BaseException as exc:
60 self.future.set_exception(exc)

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/base.py:302, in Runnable.batch..invoke(input, config)
300 return e
301 else:
→ 302 return self.invoke(input, config, **kwargs)

File /usr/local/lib/python3.9/site-packages/langchain/schema/output_parser.py:173, in BaseOutputParser.invoke(self, input, config)
169 def invoke(
170 self, input: Union[str, BaseMessage], config: Optional[RunnableConfig] = None
171 ) → T:
172 if isinstance(input, BaseMessage):
→ 173 return self._call_with_config(
174 lambda inner_input: self.parse_result(
175 [ChatGeneration(message=inner_input)]
176 ),
177 input,
178 config,
179 run_type=“parser”,
180 )
181 else:
182 return self._call_with_config(
183 lambda inner_input: self.parse_result([Generation(text=inner_input)]),
184 input,
185 config,
186 run_type=“parser”,
187 )

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/base.py:633, in Runnable._call_with_config(self, func, input, config, run_type, **kwargs)
626 run_manager = callback_manager.on_chain_start(
627 dumpd(self),
628 input,
629 run_type=run_type,
630 name=config.get(“run_name”),
631 )
632 try:
→ 633 output = call_func_with_variable_args(
634 func, input, run_manager, config, **kwargs
635 )
636 except BaseException as e:
637 run_manager.on_chain_error(e)

File /usr/local/lib/python3.9/site-packages/langchain/schema/runnable/config.py:173, in call_func_with_variable_args(func, input, run_manager, config, **kwargs)
171 if accepts_run_manager(func):
172 kwargs[“run_manager”] = run_manager
→ 173 return func(input, **kwargs)

File /usr/local/lib/python3.9/site-packages/langchain/schema/output_parser.py:174, in BaseOutputParser.invoke..(inner_input)
169 def invoke(
170 self, input: Union[str, BaseMessage], config: Optional[RunnableConfig] = None
171 ) → T:
172 if isinstance(input, BaseMessage):
173 return self._call_with_config(
→ 174 lambda inner_input: self.parse_result(
175 [ChatGeneration(message=inner_input)]
176 ),
177 input,
178 config,
179 run_type=“parser”,
180 )
181 else:
182 return self._call_with_config(
183 lambda inner_input: self.parse_result([Generation(text=inner_input)]),
184 input,
185 config,
186 run_type=“parser”,
187 )

File /usr/local/lib/python3.9/site-packages/langchain/output_parsers/openai_functions.py:134, in JsonKeyOutputFunctionsParser.parse_result(self, result, partial)
132 def parse_result(self, result: List[Generation], *, partial: bool = False) → Any:
133 res = super().parse_result(result)
→ 134 return res.get(self.key_name) if partial else res[self.key_name]

KeyError: ‘papers’









1 Like

NVM, it fixed itself when I ran it a couple of times. Could be a timeout issue from ChatGPT side.

I faced the same issue. Ended up limiting the size of the doc, to at least see it working:
chain.invoke(doc.page_content[:30000])

By incrementing the number until it failed, then reducing it again, I could see that the error was likely with the call to the LLM.

Hi, I checked that a moment ago. And I don’t understan why the parser was wrong. After reviewed by parts I found that probably there’s not exist the key papers as the trace said, because in the template instruction says “Just return an empty list” so the model returns [ ] and parser says there’s no key papers to extract. In this case I proved by parts the chain and found model returns only [ ] in some cases that cant extract papers. So I added this to template: “…you don’t need to extract any! Just return an empty list with the key papers.” With this we ensure the model returns “{‘papers’: [ ]}” when can’t extract any papers and the parser can find the key papers in the response of the model. I don’t know why the map apply map function to each part of the extraction chain, instead of apply the whole extraction chain for each part of document but its a reasonable form of solve this problem.

From my side, I confirm that there is really a KeyError Trackback in the last code of the Notebook within the DeepLearninig.AI platform.

However, in the video the output is diplayed correctly. Besides, if you run the Notebook locally in your own computer, you do not have to change any code.

Hoping this can help.