I’m getting 2 error just running this Lab - Introduction to the Weaviate API. I have tried, Updating the Lab, Rebooting the Server, and Revert to Checkpoint all with same results.
Under adding elements to a collection, this code:
# Set up a batch process with specified fixed size and concurrency
with collection.batch.fixed_size(batch_size=1, concurrent_requests=1) as batch:
# Iterate over a subset of the dataset
for document in tqdm(data): # tqdm is a library to show progress bars
# Generate a UUID based on the article_content text for unique identification
uuid = generate_uuid5(document)
# Add the object to the batch with properties and UUID.
# properties expects a dictionary with the keys being the properties.
batch.add_object(
properties=document,
uuid=uuid,
)
produces this error:
It does result in a collection of vectors with length 17.
Then, under section 3.2 Semantic Search it is producing a fatal error when trying to invoke the .near_text method:
result = collection.query.near_text(query = ‘I want suggestions to travel during Winter. I want cheap places.’, limit = 4)
---------------------------------------------------------------------------
_InactiveRpcError Traceback (most recent call last)
File /usr/local/lib/python3.10/dist-packages/weaviate/connect/v4.py:985, in ConnectionSync.grpc_search(self, request)
984 assert self.grpc_stub is not None
--> 985 res = _Retry(4).with_exponential_backoff(
986 0,
987 f"Searching in collection {request.collection}",
988 self.grpc_stub.Search,
989 request,
990 metadata=self.grpc_headers(),
991 timeout=self.timeout_config.query,
992 )
993 return cast(search_get_pb2.SearchReply, res)
File /usr/local/lib/python3.10/dist-packages/weaviate/retry.py:54, in _Retry.with_exponential_backoff(self, count, error, f, *args, **kwargs)
53 if err.code() != StatusCode.UNAVAILABLE:
---> 54 raise e
55 logger.info(
56 f"{error} received exception: {e}. Retrying with exponential backoff in {2**count} seconds"
57 )
File /usr/local/lib/python3.10/dist-packages/weaviate/retry.py:50, in _Retry.with_exponential_backoff(self, count, error, f, *args, **kwargs)
49 try:
---> 50 return f(*args, **kwargs)
51 except RpcError as e:
File /usr/local/lib/python3.10/dist-packages/grpc/_channel.py:1181, in _UnaryUnaryMultiCallable.__call__(self, request, timeout, metadata, credentials, wait_for_ready, compression)
1175 (
1176 state,
1177 call,
1178 ) = self._blocking(
1179 request, timeout, metadata, credentials, wait_for_ready, compression
1180 )
-> 1181 return _end_unary_response_blocking(state, call, False, None)
File /usr/local/lib/python3.10/dist-packages/grpc/_channel.py:1006, in _end_unary_response_blocking(state, call, with_call, deadline)
1005 else:
-> 1006 raise _InactiveRpcError(state)
_InactiveRpcError: <_InactiveRpcError of RPC that terminated with:
status = StatusCode.UNKNOWN
details = "explorer: get class: concurrentTargetVectorSearch): explorer: get class: vectorize search vector: vectorize params: vectorize params: vectorize keywords: remote client vectorize: fail with status 500: Failed to get correct output from LLM call.
Exception: Error code: 400 - {'id': 'oZBJEha-2kFHot-9d4fbd771fc5df0a', 'error': {'message': 'Unable to access non-serverless model BAAI/bge-base-en-v1.5. Please visit https://api.together.ai/models/BAAI/bge-base-en-v1.5 to create and start a new dedicated endpoint for the model.', 'type': 'invalid_request_error', 'param': None, 'code': 'model_not_available'}}"
debug_error_string = "UNKNOWN:Error received from peer {grpc_status:2, grpc_message:"explorer: get class: concurrentTargetVectorSearch): explorer: get class: vectorize search vector: vectorize params: vectorize params: vectorize keywords: remote client vectorize: fail with status 500: Failed to get correct output from LLM call.\nException: Error code: 400 - {\'id\': \'oZBJEha-2kFHot-9d4fbd771fc5df0a\', \'error\': {\'message\': \'Unable to access non-serverless model BAAI/bge-base-en-v1.5. Please visit https://api.together.ai/models/BAAI/bge-base-en-v1.5 to create and start a new dedicated endpoint for the model.\', \'type\': \'invalid_request_error\', \'param\': None, \'code\': \'model_not_available\'}}"}"
>
During handling of the above exception, another exception occurred:
WeaviateQueryError Traceback (most recent call last)
Cell In[17], line 1
----> 1 result = collection.query.near_text(query = 'I want suggestions to travel during Winter. I want cheap places.', limit = 4)
File /usr/local/lib/python3.10/dist-packages/weaviate/collections/queries/near_text/query/executor.py:424, in _NearTextQueryExecutor.near_text(self, query, certainty, distance, move_to, move_away, limit, offset, auto_limit, filters, group_by, rerank, target_vector, include_vector, return_metadata, return_properties, return_references)
391 return cast(
392 Any,
393 self._result_to_query_or_groupby_return(
(...)
404 ),
405 )
407 request = self._query.near_text(
408 near_text=query,
409 certainty=certainty,
(...)
422 return_references=self._parse_return_references(return_references),
423 )
--> 424 return executor.execute(
425 response_callback=resp,
426 method=self._connection.grpc_search,
427 request=request,
428 )
File /usr/local/lib/python3.10/dist-packages/weaviate/connect/executor.py:99, in execute(method, response_callback, exception_callback, *args, **kwargs)
97 return resp_call
98 except Exception as e:
---> 99 return cast(T, exception_callback(e))
File /usr/local/lib/python3.10/dist-packages/weaviate/connect/executor.py:38, in raise_exception(e)
37 def raise_exception(e: Exception) -> Any:
---> 38 raise e
File /usr/local/lib/python3.10/dist-packages/weaviate/connect/executor.py:80, in execute(method, response_callback, exception_callback, *args, **kwargs)
71 def execute(
72 method: SyncOrAsyncMethod[P, R],
73 response_callback: SyncOrAsyncCallback[R, T, A],
(...)
77 ) -> Union[T, Awaitable[T], Awaitable[A]]:
78 # wrap method call in try-except to catch exceptions for sync method
79 try:
---> 80 call = method(*args, **kwargs)
81 if isinstance(call, Awaitable):
83 async def _execute() -> T:
84 # wrap await in try-except to catch exceptions for async method
File /usr/local/lib/python3.10/dist-packages/weaviate/connect/v4.py:998, in ConnectionSync.grpc_search(self, request)
996 if error.code() == StatusCode.PERMISSION_DENIED:
997 raise InsufficientPermissionsError(error)
--> 998 raise WeaviateQueryError(str(error.details()), "GRPC search") # pyright: ignore
999 except WeaviateRetryError as e:
1000 raise WeaviateQueryError(str(e), "GRPC search")
WeaviateQueryError: Query call with protocol GRPC search failed with message explorer: get class: concurrentTargetVectorSearch): explorer: get class: vectorize search vector: vectorize params: vectorize params: vectorize keywords: remote client vectorize: fail with status 500: Failed to get correct output from LLM call.
Exception: Error code: 400 - {'id': 'oZBJEha-2kFHot-9d4fbd771fc5df0a', 'error': {'message': 'Unable to access non-serverless model BAAI/bge-base-en-v1.5. Please visit https://api.together.ai/models/BAAI/bge-base-en-v1.5 to create and start a new dedicated endpoint for the model.', 'type': 'invalid_request_error', 'param': None, 'code': 'model_not_available'}}.
