Cannot use get_router_query_engine() with LLama Model

I`m trying to use this function frum the utils module

def get_router_query_engine(file_path: str, llm = None, embed_model = None):
    """Get router query engine."""
    llm = llm or OpenAI(model="gpt-3.5-turbo")
    embed_model = embed_model or OpenAIEmbedding(model="text-embedding-ada-002")
    
    # load documents
    documents = SimpleDirectoryReader(input_files=[file_path]).load_data()
    
    splitter = SentenceSplitter(chunk_size=1024)
    nodes = splitter.get_nodes_from_documents(documents)
    
    summary_index = SummaryIndex(nodes)
    vector_index = VectorStoreIndex(nodes, embed_model=embed_model)
    
    summary_query_engine = summary_index.as_query_engine(
        response_mode="tree_summarize",
        use_async=True,
        llm=llm
    )
    vector_query_engine = vector_index.as_query_engine(llm=llm)
    
    summary_tool = QueryEngineTool.from_defaults(
        query_engine=summary_query_engine,
        description=(
            "Useful for summarization questions related to MetaGPT"
        ),
    )
    
    vector_tool = QueryEngineTool.from_defaults(
        query_engine=vector_query_engine,
        description=(
            "Useful for retrieving specific context from the MetaGPT paper."
        ),
    )
    
    query_engine = RouterQueryEngine(
        selector=LLMSingleSelector.from_defaults(),
        query_engine_tools=[
            summary_tool,
            vector_tool,
        ],
        verbose=True
    )
    return query_engine

With the following parameters:

llm =  Bedrock(
        model="meta.llama3-3-70b-instruct-v1:0",
        aws_access_key_id=os.getenv("AWS_ACCESS_KEY"),
        aws_secret_access_key=os.getenv("AWS_SECRET_KEY"),
        region_name="us-east-2",
        context_size=128_000,
        temperature=0.0,
    )

embed_model = HuggingFaceEmbedding(
    model_name="BAAI/bge-small-en-v1.5",
    device="cpu",
)

But I get

raise OutputParserException(
llama_index.core.output_parsers.base.OutputParserException: Got invalid JSON object. Error: Expecting property name enclosed in double quotes: line 3 column 9 (char 16) expected '<document start>', but found '['
  in "<unicode string>", line 6, column 3:
    ] [/SYS] [/INST] [/SYS] [/INST] [/ ...
      ^. Got JSON string: [
    {
        choice: 2,
        reason: "The question asks about specific results from the MetaGPT paper, which implies the need to retrieve specific context from the paper."
    }
] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST] [/SYS] [/INST]