> Entering new MultiPromptChain chain...
---------------------------------------------------------------------------
OutputParserException Traceback (most recent call last)
File /usr/local/lib/python3.9/site-packages/langchain/chains/router/llm_router.py:80, in RouterOutputParser.parse(self, text)
79 expected_keys = ["destination", "next_inputs"]
---> 80 parsed = parse_json_markdown(text, expected_keys)
81 if not isinstance(parsed["destination"], str):
File /usr/local/lib/python3.9/site-packages/langchain/output_parsers/structured.py:27, in parse_json_markdown(text, expected_keys)
26 if "```json" not in text:
---> 27 raise OutputParserException(
28 f"Got invalid return object. Expected markdown code snippet with JSON "
29 f"object, but got:\n{text}"
30 )
32 json_string = text.split("```json")[1].strip().strip("```").strip()
OutputParserException: Got invalid return object. Expected markdown code snippet with JSON object, but got:
{
"destination": "physics",
"next_inputs": "What is black body radiation?"
}
During handling of the above exception, another exception occurred:
OutputParserException Traceback (most recent call last)
Cell In[33], line 1
----> 1 chain.run("What is black body radiation?")
File /usr/local/lib/python3.9/site-packages/langchain/chains/base.py:236, in Chain.run(self, callbacks, *args, **kwargs)
234 if len(args) != 1:
235 raise ValueError("`run` supports only one positional argument.")
--> 236 return self(args[0], callbacks=callbacks)[self.output_keys[0]]
238 if kwargs and not args:
239 return self(kwargs, callbacks=callbacks)[self.output_keys[0]]
File /usr/local/lib/python3.9/site-packages/langchain/chains/base.py:140, in Chain.__call__(self, inputs, return_only_outputs, callbacks)
138 except (KeyboardInterrupt, Exception) as e:
139 run_manager.on_chain_error(e)
--> 140 raise e
141 run_manager.on_chain_end(outputs)
142 return self.prep_outputs(inputs, outputs, return_only_outputs)
File /usr/local/lib/python3.9/site-packages/langchain/chains/base.py:134, in Chain.__call__(self, inputs, return_only_outputs, callbacks)
128 run_manager = callback_manager.on_chain_start(
129 {"name": self.__class__.__name__},
130 inputs,
131 )
132 try:
133 outputs = (
--> 134 self._call(inputs, run_manager=run_manager)
135 if new_arg_supported
136 else self._call(inputs)
137 )
138 except (KeyboardInterrupt, Exception) as e:
139 run_manager.on_chain_error(e)
File /usr/local/lib/python3.9/site-packages/langchain/chains/router/base.py:72, in MultiRouteChain._call(self, inputs, run_manager)
70 _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager()
71 callbacks = _run_manager.get_child()
---> 72 route = self.router_chain.route(inputs, callbacks=callbacks)
74 _run_manager.on_text(
75 str(route.destination) + ": " + str(route.next_inputs), verbose=self.verbose
76 )
77 if not route.destination:
File /usr/local/lib/python3.9/site-packages/langchain/chains/router/base.py:26, in RouterChain.route(self, inputs, callbacks)
25 def route(self, inputs: Dict[str, Any], callbacks: Callbacks = None) -> Route:
---> 26 result = self(inputs, callbacks=callbacks)
27 return Route(result["destination"], result["next_inputs"])
File /usr/local/lib/python3.9/site-packages/langchain/chains/base.py:140, in Chain.__call__(self, inputs, return_only_outputs, callbacks)
138 except (KeyboardInterrupt, Exception) as e:
139 run_manager.on_chain_error(e)
--> 140 raise e
141 run_manager.on_chain_end(outputs)
142 return self.prep_outputs(inputs, outputs, return_only_outputs)
File /usr/local/lib/python3.9/site-packages/langchain/chains/base.py:134, in Chain.__call__(self, inputs, return_only_outputs, callbacks)
128 run_manager = callback_manager.on_chain_start(
129 {"name": self.__class__.__name__},
130 inputs,
131 )
132 try:
133 outputs = (
--> 134 self._call(inputs, run_manager=run_manager)
135 if new_arg_supported
136 else self._call(inputs)
137 )
138 except (KeyboardInterrupt, Exception) as e:
139 run_manager.on_chain_error(e)
File /usr/local/lib/python3.9/site-packages/langchain/chains/router/llm_router.py:57, in LLMRouterChain._call(self, inputs, run_manager)
53 _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager()
54 callbacks = _run_manager.get_child()
55 output = cast(
56 Dict[str, Any],
---> 57 self.llm_chain.predict_and_parse(callbacks=callbacks, **inputs),
58 )
59 return output
File /usr/local/lib/python3.9/site-packages/langchain/chains/llm.py:238, in LLMChain.predict_and_parse(self, callbacks, **kwargs)
236 result = self.predict(callbacks=callbacks, **kwargs)
237 if self.prompt.output_parser is not None:
--> 238 return self.prompt.output_parser.parse(result)
239 else:
240 return result
File /usr/local/lib/python3.9/site-packages/langchain/chains/router/llm_router.py:97, in RouterOutputParser.parse(self, text)
95 return parsed
96 except Exception as e:
---> 97 raise OutputParserException(
98 f"Parsing text\n{text}\n raised following error:\n{e}"
99 )
OutputParserException: Parsing text
{
"destination": "physics",
"next_inputs": "What is black body radiation?"
}
raised following error:
Got invalid return object. Expected markdown code snippet with JSON object, but got:
{
"destination": "physics",
"next_inputs": "What is black body radiation?"
}
Modify template as below:
DEFAULT to default
missing ``` after json
MULTI_PROMPT_ROUTER_TEMPLATE =
(…)
{{{{
"destination": string \ name of the prompt to use or "default"
"next_inputs": string \ a potentially modified version of the original input
}}}}
REMEMBER: “destination” MUST be one of the candidate prompt
names specified below OR it can be “default” if the input is not
well suited for any of the candidate prompts.
REMEMBER: “next_inputs” can just be the original input
if you don’t think any modifications are needed.
(…)
<< OUTPUT (remember to include the ```json```)>>"""
the above certainly works and there will be many other ways to make it work too.
One such option is to change the following line
Return a markdown code snippet with a JSON object formatted to look like:
to
Return a string snippet enclosed by triple backticks a JSON object formatted to look like below:
and the "DEFAULT"
to lowercase
Here’s the entire prompt
Tried above solution but still getting an error: Got invalid JSON object. Error: Extra data: line 5 column 1 (char 77)
Try again, there was some formatting error. I have pasted the entire prompt again.
This is getting stranger by the minute.
Now it works on the first chain.run(“What is black body radiation?”).
But it still fails on the other two (What is 2+2 and Why does every cell in our body contain DNA?).
Could also be I’m working on Azure AI Studio.
Stumbled on more minor differences in the courses.
Tip:
Be careful with the curly double quotes, can also cause errors.
Make sure there is an indent as shown in the course:
{{{{
"destination": string \ name of the prompt to use or "default"
"next_inputs": string \ a potentially modified version of the original input
}}}}
and not:
{{{{
"destination": string \ name of the prompt to use or "default"
"next_inputs": string \ a potentially modified version of the original input
}}}}
That indent seems to do the trick for the first chain.run.
Still lost for the other two.