Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
c01b0ee
feat:Making grounding with parallel.ai available on SDK
gcf-owl-bot[bot] Apr 29, 2026
14bdea4
feat:Making grounding with parallel.ai available on SDK
gcf-owl-bot[bot] Apr 29, 2026
2a5cfb9
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Apr 29, 2026
31bc33f
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Apr 29, 2026
64e776d
Merge branch 'owl-bot-copy' of https://github.com/googleapis/python-a…
gcf-owl-bot[bot] Apr 29, 2026
6390c82
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Apr 29, 2026
553eaf4
Merge branch 'owl-bot-copy' of https://github.com/googleapis/python-a…
gcf-owl-bot[bot] Apr 29, 2026
1e869eb
chore(aiplatform/v1beta1): add missing go_package
gcf-owl-bot[bot] Apr 29, 2026
02b2bf1
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] Apr 29, 2026
5ae76cd
feat: add ReasoningEngineRuntimeRevisionService and update related fi…
gcf-owl-bot[bot] May 5, 2026
c1139f5
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] May 5, 2026
e45664d
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] May 5, 2026
579021e
Merge branch 'owl-bot-copy' of https://github.com/googleapis/python-a…
gcf-owl-bot[bot] May 5, 2026
b896f6c
feat: Release ReasoningEngineExecutionService.CancelAsyncQueryReasoni…
gcf-owl-bot[bot] May 7, 2026
168b62c
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] May 7, 2026
fda8aec
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] May 7, 2026
c11bc96
Merge branch 'owl-bot-copy' of https://github.com/googleapis/python-a…
gcf-owl-bot[bot] May 7, 2026
a50675b
feat: Release ReasoningEngineExecutionService.CancelAsyncQueryReasoni…
gcf-owl-bot[bot] May 7, 2026
dafa004
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] May 7, 2026
701f6da
🦉 Updates from OwlBot post-processor
gcf-owl-bot[bot] May 7, 2026
e0f8488
Merge branch 'owl-bot-copy' of https://github.com/googleapis/python-a…
gcf-owl-bot[bot] May 7, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions .kokoro/samples/python3.9/common.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Format: //devtools/kokoro/config/proto/build.proto

# Build logs will be here
action {
define_artifacts {
regex: "**/*sponge_log.xml"
}
}

# Specify which tests to run
env_vars: {
key: "RUN_TESTS_SESSION"
value: "py-3.9"
}

# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
value: "python-docs-samples-tests-py39"
}

env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-aiplatform/.kokoro/test-samples.sh"
}

# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
}

# Download secrets for samples
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"

# Download trampoline resources.
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"

# Use the trampoline script to run in docker.
build_file: "python-aiplatform/.kokoro/trampoline_v2.sh"
6 changes: 6 additions & 0 deletions .kokoro/samples/python3.9/continuous.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Format: //devtools/kokoro/config/proto/build.proto

env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "True"
}
11 changes: 11 additions & 0 deletions .kokoro/samples/python3.9/periodic-head.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Format: //devtools/kokoro/config/proto/build.proto

env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "True"
}

env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-aiplatform/.kokoro/test-samples-against-head.sh"
}
6 changes: 6 additions & 0 deletions .kokoro/samples/python3.9/periodic.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Format: //devtools/kokoro/config/proto/build.proto

env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
}
6 changes: 6 additions & 0 deletions .kokoro/samples/python3.9/presubmit.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Format: //devtools/kokoro/config/proto/build.proto

env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "True"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
ReasoningEngineRuntimeRevisionService
-------------------------------------------------------

.. automodule:: google.cloud.aiplatform_v1beta1.services.reasoning_engine_runtime_revision_service
:members:
:inherited-members:

.. automodule:: google.cloud.aiplatform_v1beta1.services.reasoning_engine_runtime_revision_service.pagers
:members:
:inherited-members:
1 change: 1 addition & 0 deletions docs/aiplatform_v1beta1/services_.rst
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ Services for Google Cloud Aiplatform v1beta1 API
pipeline_service
prediction_service
reasoning_engine_execution_service
reasoning_engine_runtime_revision_service
reasoning_engine_service
schedule_service
session_service
Expand Down
8 changes: 8 additions & 0 deletions google/cloud/aiplatform_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -900,6 +900,12 @@
)
from .types.reasoning_engine_execution_service import AsyncQueryReasoningEngineRequest
from .types.reasoning_engine_execution_service import AsyncQueryReasoningEngineResponse
from .types.reasoning_engine_execution_service import (
CancelAsyncQueryReasoningEngineRequest,
)
from .types.reasoning_engine_execution_service import (
CancelAsyncQueryReasoningEngineResponse,
)
from .types.reasoning_engine_execution_service import QueryReasoningEngineRequest
from .types.reasoning_engine_execution_service import QueryReasoningEngineResponse
from .types.reasoning_engine_execution_service import StreamQueryReasoningEngineRequest
Expand Down Expand Up @@ -1338,6 +1344,8 @@ def _get_version(dependency_name):
"BlurBaselineConfig",
"BoolArray",
"CachedContent",
"CancelAsyncQueryReasoningEngineRequest",
"CancelAsyncQueryReasoningEngineResponse",
"CancelBatchPredictionJobRequest",
"CancelCustomJobRequest",
"CancelDataLabelingJobRequest",
Expand Down
15 changes: 15 additions & 0 deletions google/cloud/aiplatform_v1/gapic_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -4315,6 +4315,11 @@
"async_query_reasoning_engine"
]
},
"CancelAsyncQueryReasoningEngine": {
"methods": [
"cancel_async_query_reasoning_engine"
]
},
"QueryReasoningEngine": {
"methods": [
"query_reasoning_engine"
Expand All @@ -4335,6 +4340,11 @@
"async_query_reasoning_engine"
]
},
"CancelAsyncQueryReasoningEngine": {
"methods": [
"cancel_async_query_reasoning_engine"
]
},
"QueryReasoningEngine": {
"methods": [
"query_reasoning_engine"
Expand All @@ -4355,6 +4365,11 @@
"async_query_reasoning_engine"
]
},
"CancelAsyncQueryReasoningEngine": {
"methods": [
"cancel_async_query_reasoning_engine"
]
},
"QueryReasoningEngine": {
"methods": [
"query_reasoning_engine"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -271,40 +271,40 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]:
@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
return "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -674,6 +674,103 @@ async def sample_async_query_reasoning_engine():
# Done; return the response.
return response

async def cancel_async_query_reasoning_engine(
self,
request: Optional[
Union[
reasoning_engine_execution_service.CancelAsyncQueryReasoningEngineRequest,
dict,
]
] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> reasoning_engine_execution_service.CancelAsyncQueryReasoningEngineResponse:
r"""Cancels an AsyncQueryReasoningEngine operation.

.. code-block:: python

# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1

async def sample_cancel_async_query_reasoning_engine():
# Create a client
client = aiplatform_v1.ReasoningEngineExecutionServiceAsyncClient()

# Initialize request argument(s)
request = aiplatform_v1.CancelAsyncQueryReasoningEngineRequest(
name="name_value",
operation_name="operation_name_value",
)

# Make the request
response = await client.cancel_async_query_reasoning_engine(request=request)

# Handle the response
print(response)

Args:
request (Optional[Union[google.cloud.aiplatform_v1.types.CancelAsyncQueryReasoningEngineRequest, dict]]):
The request object. Request message for
[ReasoningEngineExecutionService.CancelAsyncQueryReasoningEngine][google.cloud.aiplatform.v1.ReasoningEngineExecutionService.CancelAsyncQueryReasoningEngine].
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.

Returns:
google.cloud.aiplatform_v1.types.CancelAsyncQueryReasoningEngineResponse:
Response message for
[ReasoningEngineExecutionService.CancelAsyncQueryReasoningEngine][google.cloud.aiplatform.v1.ReasoningEngineExecutionService.CancelAsyncQueryReasoningEngine].

"""
# Create or coerce a protobuf request object.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(
request,
reasoning_engine_execution_service.CancelAsyncQueryReasoningEngineRequest,
):
request = reasoning_engine_execution_service.CancelAsyncQueryReasoningEngineRequest(
request
)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._client._transport._wrapped_methods[
self._client._transport.cancel_async_query_reasoning_engine
]

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)

# Validate the universe domain.
self._client._validate_universe_domain()

# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

async def list_operations(
self,
request: Optional[operations_pb2.ListOperationsRequest] = None,
Expand Down
Loading
Loading