-
Notifications
You must be signed in to change notification settings - Fork 62
feat: Add ai_generate_bool to the bigframes.bigquery package #2060
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 7 commits
d250e14
442654d
cf255ec
85d5f93
1f00e3c
b2446ab
10e6497
49b431c
79b9651
9955544
ce5cc38
61e10ce
7cf3e41
c9151dc
6ebb05b
79c5b24
c586c5a
4856315
bb63e1d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -18,6 +18,7 @@ | |||||
|
|
||||||
| import sys | ||||||
|
|
||||||
| from bigframes.bigquery._operations.ai import ai_generate_bool | ||||||
| from bigframes.bigquery._operations.approx_agg import approx_top_count | ||||||
| from bigframes.bigquery._operations.array import ( | ||||||
| array_agg, | ||||||
|
|
@@ -57,6 +58,8 @@ | |||||
| from bigframes.core import log_adapter | ||||||
|
|
||||||
| _functions = [ | ||||||
| # ai ops | ||||||
| ai_generate_bool, | ||||||
|
||||||
| ai_generate_bool, | |
| ai, |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Note, if we do this, I think we'll want to add the ai submodule to the docs,
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Updated. I'm not familiar with how docs are setup. Please take a look to make sure I get everything correct. Thanks!
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,165 @@ | ||
| # Copyright 2025 Google LLC | ||
| # | ||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| # you may not use this file except in compliance with the License. | ||
| # You may obtain a copy of the License at | ||
| # | ||
| # http://www.apache.org/licenses/LICENSE-2.0 | ||
| # | ||
| # Unless required by applicable law or agreed to in writing, software | ||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| # See the License for the specific language governing permissions and | ||
| # limitations under the License. | ||
|
|
||
| from __future__ import annotations | ||
|
|
||
| import json | ||
| from typing import Any, List, Literal, Mapping, Tuple | ||
|
|
||
| from bigframes import clients, dtypes, series | ||
| from bigframes.operations import ai_ops | ||
|
|
||
|
|
||
| def ai_generate_bool( | ||
sycai marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| prompt: series.Series | List[str | series.Series] | Tuple[str | series.Series, ...], | ||
| *, | ||
| connection_id: str | None = None, | ||
| endpoint: str | None = None, | ||
| request_type: Literal["dedicated", "shared", "unspecified"] = "unspecified", | ||
| model_params: Mapping[Any, Any] | None = None, | ||
| ) -> series.Series: | ||
| """ | ||
| Returns the AI analysis based on the prompt, which can be any combination of text and unstructured data. | ||
| **Examples:** | ||
| >>> import bigframes.pandas as bpd | ||
| >>> import bigframes.bigquery as bbq | ||
| >>> bpd.options.display.progress_bar = None | ||
| >>> df = bpd.DataFrame({ | ||
| ... "col_1": ["apple", "bear", "pear"], | ||
| ... "col_2": ["fruit", "animal", "animal"] | ||
| ... }) | ||
| >>> bbq.ai_generate_bool((df["col_1"], " is a ", df["col_2"])) | ||
| 0 {'result': True, 'full_response': '{"candidate... | ||
| 1 {'result': True, 'full_response': '{"candidate... | ||
| 2 {'result': False, 'full_response': '{"candidat... | ||
| dtype: struct<result: bool, full_response: string, status: string>[pyarrow] | ||
| >>> bbq.ai_generate_bool((df["col_1"], " is a ", df["col_2"])).struct.field("result") | ||
| 0 True | ||
| 1 True | ||
| 2 False | ||
| Name: result, dtype: boolean | ||
| >>> model_params = { | ||
| ... "generation_config": { | ||
| ... "thinking_config": { | ||
| ... "thinking_budget": 0 | ||
| ... } | ||
| ... } | ||
| ... } | ||
| >>> bbq.ai_generate_bool( | ||
| ... (df["col_1"], " is a ", df["col_2"]), | ||
| ... endpoint="gemini-2.5-pro", | ||
| ... model_params=model_params, | ||
| ... ).struct.field("result") | ||
| 0 True | ||
| 1 True | ||
| 2 False | ||
| Name: result, dtype: boolean | ||
| Args: | ||
| prompt (series.Series | List[str|series.Series] | Tuple[str|series.Series, ...]): | ||
| A mixture of Series and string literals that specifies the prompt to send to the model. | ||
| connection_id (str, optional): | ||
| Specifies the connection to use to communicate with the model. For example, `myproject.us.myconnection`. | ||
| If not provided, the connection from the current session will be used. | ||
| endpoint (str, optional): | ||
| Specifies the Vertex AI endpoint to use for the model. For example `"gemini-2.5-flash"`. You can specify any | ||
| generally available or preview Gemini model. If you specify the model name, BigQuery ML automatically identifies and | ||
| uses the full endpoint of the model. If you don't specify an ENDPOINT value, BigQuery ML selects a recent stable | ||
| version of Gemini to use. | ||
| request_type (Literal["dedicated", "shared", "unspecified"]): | ||
| Specifies the type of inference request to send to the Gemini model. The request type determines what quota the request uses. | ||
| * "dedicated": function only uses Provisioned Throughput quota. The function returns the error Provisioned throughput is not | ||
| purchased or is not active if Provisioned Throughput quota isn't available. | ||
| * "shared": the function only uses dynamic shared quota (DSQ), even if you have purchased Provisioned Throughput quota. | ||
| * "unspecified": If you haven't purchased Provisioned Throughput quota, the function uses DSQ quota. | ||
| If you have purchased Provisioned Throughput quota, the function uses the Provisioned Throughput quota first. | ||
| If requests exceed the Provisioned Throughput quota, the overflow traffic uses DSQ quota. | ||
| model_params (Mapping[Any, Any]): | ||
| Provides additional parameters to the model. The MODEL_PARAMS value must conform to the generateContent request body format. | ||
| Returns: | ||
| bigframes.series.Series: A new struct Series with the result data. The struct contains these fields: | ||
| * "result": a BOOL value containing the model's response to the prompt. The result is None if the request fails or is filtered by responsible AI. | ||
| * "full_response": a STRING value containing the JSON response from the projects.locations.endpoints.generateContent call to the model. | ||
| The generated text is in the text element. | ||
| * "status": a STRING value that contains the API response status for the corresponding row. This value is empty if the operation was successful. | ||
| """ | ||
|
|
||
| prompt_context, series_list = _separate_context_and_series(prompt) | ||
| assert len(series_list) > 0 | ||
|
|
||
| operator = ai_ops.AIGenerateBool( | ||
| prompt_context=tuple(prompt_context), | ||
| connection_id=_resolve_connection_id(series_list[0], connection_id), | ||
| endpoint=endpoint, | ||
| request_type=request_type, | ||
| model_params=json.dumps(model_params) if model_params else None, | ||
| ) | ||
|
|
||
| return series_list[0]._apply_nary_op(operator, series_list[1:]) | ||
|
|
||
|
|
||
| def _separate_context_and_series( | ||
| prompt: series.Series | List[str | series.Series] | Tuple[str | series.Series, ...], | ||
| ) -> Tuple[List[str | None], List[series.Series]]: | ||
| """ | ||
| Returns the two values. The first value is the prompt with all series replaced by None. The second value is all the series | ||
| in the prompt. The original item order is kept. | ||
| For example: | ||
| Input: ("str1", series1, "str2", "str3", series2) | ||
| Output: ["str1", None, "str2", "str3", None], [series1, series2] | ||
| """ | ||
| if not isinstance(prompt, (list, tuple, series.Series)): | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could we call the
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Will do that in a separate PR. |
||
| raise ValueError(f"Unsupported prompt type: {type(prompt)}") | ||
|
|
||
| if isinstance(prompt, series.Series): | ||
| if prompt.dtype == dtypes.OBJ_REF_DTYPE: | ||
| # Multi-model support | ||
| return [None], [prompt.blob.read_url()] | ||
| return [None], [prompt] | ||
|
|
||
| prompt_context: List[str | None] = [] | ||
| series_list: List[series.Series] = [] | ||
|
|
||
| for item in prompt: | ||
| if isinstance(item, str): | ||
| prompt_context.append(item) | ||
|
|
||
| elif isinstance(item, series.Series): | ||
| prompt_context.append(None) | ||
|
|
||
| if item.dtype == dtypes.OBJ_REF_DTYPE: | ||
| # Multi-model support | ||
| item = item.blob.read_url() | ||
| series_list.append(item) | ||
|
|
||
| else: | ||
| raise TypeError(f"Unsupported type in prompt: {type(item)}") | ||
|
|
||
| if not series_list: | ||
| raise ValueError("Please provide at least one Series in the prompt") | ||
|
|
||
| return prompt_context, series_list | ||
|
|
||
|
|
||
| def _resolve_connection_id(series: series.Series, connection_id: str | None): | ||
| return clients.get_canonical_bq_connection_id( | ||
| connection_id or series._session._bq_connection, | ||
| series._session._project, | ||
| series._session._location, | ||
| ) | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,47 @@ | ||
| # Copyright 2025 Google LLC | ||
| # | ||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| # you may not use this file except in compliance with the License. | ||
| # You may obtain a copy of the License at | ||
| # | ||
| # http://www.apache.org/licenses/LICENSE-2.0 | ||
| # | ||
| # Unless required by applicable law or agreed to in writing, software | ||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| # See the License for the specific language governing permissions and | ||
| # limitations under the License. | ||
|
|
||
| from __future__ import annotations | ||
|
|
||
| import dataclasses | ||
| from typing import ClassVar, Literal, Tuple | ||
|
|
||
| import pandas as pd | ||
| import pyarrow as pa | ||
|
|
||
| from bigframes import dtypes | ||
| from bigframes.operations import base_ops | ||
|
|
||
|
|
||
| @dataclasses.dataclass(frozen=True) | ||
| class AIGenerateBool(base_ops.NaryOp): | ||
| name: ClassVar[str] = "ai_generate_bool" | ||
|
|
||
| # None are the placeholders for column references. | ||
| prompt_context: Tuple[str | None, ...] | ||
| connection_id: str | ||
| endpoint: str | None | ||
| request_type: Literal["dedicated", "shared", "unspecified"] | ||
| model_params: str | None | ||
|
|
||
| def output_type(self, *input_types: dtypes.ExpressionType) -> dtypes.ExpressionType: | ||
| return pd.ArrowDtype( | ||
| pa.struct( | ||
| ( | ||
| pa.field("result", pa.bool_()), | ||
| pa.field("full_response", pa.string()), | ||
| pa.field("status", pa.string()), | ||
| ) | ||
| ) | ||
| ) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,13 @@ | ||
| # Copyright 2025 Google LLC | ||
| # | ||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| # you may not use this file except in compliance with the License. | ||
| # You may obtain a copy of the License at | ||
| # | ||
| # http://www.apache.org/licenses/LICENSE-2.0 | ||
| # | ||
| # Unless required by applicable law or agreed to in writing, software | ||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| # See the License for the specific language governing permissions and | ||
| # limitations under the License. |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,35 @@ | ||
| # Copyright 2025 Google LLC | ||
| # | ||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| # you may not use this file except in compliance with the License. | ||
| # You may obtain a copy of the License at | ||
| # | ||
| # http://www.apache.org/licenses/LICENSE-2.0 | ||
| # | ||
| # Unless required by applicable law or agreed to in writing, software | ||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| # See the License for the specific language governing permissions and | ||
| # limitations under the License. | ||
|
|
||
| import pandas as pd | ||
| import pandas.testing | ||
|
|
||
| import bigframes.bigquery as bbq | ||
|
|
||
|
|
||
| def test_ai_generate_bool_multi_model(session): | ||
| df = session.from_glob_path( | ||
| "gs://bigframes-dev-testing/a_multimodel/images/*", name="image" | ||
| ) | ||
|
|
||
| result = bbq.ai_generate_bool((df["image"], " contains an animal")).struct.field( | ||
| "result" | ||
| ) | ||
|
|
||
| pandas.testing.assert_series_equal( | ||
| result.to_pandas(), | ||
| pd.Series([True, True, False, False, False], name="result"), | ||
| check_dtype=False, | ||
| check_index=False, | ||
| ) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,37 @@ | ||
| # Copyright 2025 Google LLC | ||
| # | ||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| # you may not use this file except in compliance with the License. | ||
| # You may obtain a copy of the License at | ||
| # | ||
| # http://www.apache.org/licenses/LICENSE-2.0 | ||
| # | ||
| # Unless required by applicable law or agreed to in writing, software | ||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| # See the License for the specific language governing permissions and | ||
| # limitations under the License. | ||
|
|
||
| import pandas as pd | ||
| import pandas.testing | ||
|
|
||
| import bigframes.bigquery as bbq | ||
| import bigframes.pandas as bpd | ||
|
|
||
|
|
||
| def test_ai_generate_bool(session): | ||
| s1 = bpd.Series(["apple", "bear"], session=session) | ||
| s2 = bpd.Series(["fruit", "tree"], session=session) | ||
| prompt = (s1, " is a ", s2) | ||
| model_params = {"generation_config": {"thinking_config": {"thinking_budget": 0}}} | ||
|
|
||
| result = bbq.ai_generate_bool( | ||
| prompt, endpoint="gemini-2.5-flash", model_params=model_params | ||
| ).struct.field("result") | ||
|
|
||
| pandas.testing.assert_series_equal( | ||
| result.to_pandas(), | ||
| pd.Series([True, False], name="result"), | ||
| check_dtype=False, | ||
| check_index=False, | ||
| ) |
Uh oh!
There was an error while loading. Please reload this page.