diff --git a/bigframes/core/blocks.py b/bigframes/core/blocks.py index 07d7e4c45b..74caff515c 100644 --- a/bigframes/core/blocks.py +++ b/bigframes/core/blocks.py @@ -30,6 +30,7 @@ import textwrap import typing from typing import ( + Callable, Iterable, Iterator, List, @@ -679,6 +680,7 @@ def to_pandas_batches( page_size: Optional[int] = None, max_results: Optional[int] = None, allow_large_results: Optional[bool] = None, + callback: Callable = lambda _: None, ) -> Iterator[pd.DataFrame]: """Download results one message at a time. @@ -696,6 +698,7 @@ def to_pandas_batches( promise_under_10gb=under_10gb, ordered=True, ), + callback=callback, ) # To reduce the number of edge cases to consider when working with the diff --git a/bigframes/dataframe.py b/bigframes/dataframe.py index f9de117b29..8f4e0d7319 100644 --- a/bigframes/dataframe.py +++ b/bigframes/dataframe.py @@ -1885,6 +1885,21 @@ def to_pandas_batches( allow_large_results=allow_large_results, ) + def _to_pandas_batches_colab( + self, + page_size: Optional[int] = None, + max_results: Optional[int] = None, + *, + allow_large_results: Optional[bool] = None, + callback: Callable = lambda _: None, + ) -> Iterable[pandas.DataFrame]: + return self._block.to_pandas_batches( + page_size=page_size, + max_results=max_results, + allow_large_results=allow_large_results, + callback=callback, + ) + def _compute_dry_run(self) -> bigquery.QueryJob: _, query_job = self._block._compute_dry_run() return query_job diff --git a/bigframes/display/anywidget.py b/bigframes/display/anywidget.py index 5a20ddcb7f..b79085f85a 100644 --- a/bigframes/display/anywidget.py +++ b/bigframes/display/anywidget.py @@ -23,6 +23,7 @@ import pandas as pd import bigframes +import bigframes.dataframe import bigframes.display.html # anywidget and traitlets are optional dependencies. We don't want the import of this @@ -73,7 +74,7 @@ def __init__(self, dataframe: bigframes.dataframe.DataFrame): initial_page_size = bigframes.options.display.max_rows # Initialize data fetching attributes. - self._batches = dataframe.to_pandas_batches(page_size=initial_page_size) + # self._batches = dataframe._to_pandas_batches_colab(page_size=initial_page_size, callback=self._update_progress) # set traitlets properties that trigger observers self.page_size = initial_page_size @@ -100,6 +101,7 @@ def _css(self): page = traitlets.Int(0).tag(sync=True) page_size = traitlets.Int(25).tag(sync=True) row_count = traitlets.Int(0).tag(sync=True) + progress_html = traitlets.Unicode().tag(sync=True) table_html = traitlets.Unicode().tag(sync=True) @traitlets.validate("page") @@ -145,6 +147,10 @@ def _validate_page_size(self, proposal: Dict[str, Any]) -> int: max_page_size = 1000 return min(value, max_page_size) + def _update_progress(self, event): + # TODO: use formatting helpers here. + self.progress_html = f"{repr(event)}" + def _get_next_batch(self) -> bool: """ Gets the next batch of data from the generator and appends to cache. @@ -180,7 +186,9 @@ def _cached_data(self) -> pd.DataFrame: def _reset_batches_for_new_page_size(self): """Reset the batch iterator when page size changes.""" - self._batches = self._dataframe.to_pandas_batches(page_size=self.page_size) + self._batches = self._dataframe._to_pandas_batches_colab( + page_size=self.page_size, callback=self._update_progress + ) self._cached_batches = [] self._batch_iter = None self._all_data_loaded = False diff --git a/bigframes/display/table_widget.js b/bigframes/display/table_widget.js index 6b4d99ff28..4356c79faa 100644 --- a/bigframes/display/table_widget.js +++ b/bigframes/display/table_widget.js @@ -19,11 +19,13 @@ const ModelProperty = { PAGE_SIZE: "page_size", ROW_COUNT: "row_count", TABLE_HTML: "table_html", + PROGRESS_HTML: "progress_html", }; const Event = { CHANGE: "change", CHANGE_TABLE_HTML: `change:${ModelProperty.TABLE_HTML}`, + CHANGE_PROGRESS_HTML: `change:${ModelProperty.PROGRESS_HTML}`, CLICK: "click", }; @@ -39,6 +41,7 @@ function render({ model, el }) { el.classList.add("bigframes-widget"); // Structure + const progressContainer = document.createElement("div"); const tableContainer = document.createElement("div"); const footer = document.createElement("div"); @@ -57,6 +60,7 @@ function render({ model, el }) { const pageSizeSelect = document.createElement("select"); // Add CSS classes + progressContainer.classList.add("progress-container"); tableContainer.classList.add("table-container"); footer.classList.add("footer"); paginationContainer.classList.add("pagination"); @@ -119,6 +123,13 @@ function render({ model, el }) { } } + /** Updates the HTML in the progress container. */ + function handleProgressHTMLChange() { + // Note: Using innerHTML is safe here because the content is generated + // by a trusted backend (formatting_helpers). + progressContainer.innerHTML = model.get(ModelProperty.PROGRESS_HTML); + } + /** Updates the HTML in the table container and refreshes button states. */ function handleTableHTMLChange() { // Note: Using innerHTML is safe here because the content is generated @@ -137,6 +148,7 @@ function render({ model, el }) { } }); model.on(Event.CHANGE_TABLE_HTML, handleTableHTMLChange); + model.on(Event.CHANGE_PROGRESS_HTML, handleProgressHTMLChange); // Assemble the DOM paginationContainer.appendChild(prevPage); @@ -150,6 +162,7 @@ function render({ model, el }) { footer.appendChild(paginationContainer); footer.appendChild(pageSizeContainer); + el.appendChild(progressContainer); el.appendChild(tableContainer); el.appendChild(footer); diff --git a/bigframes/formatting_helpers.py b/bigframes/formatting_helpers.py index 48afb4fdbd..2c2318dfed 100644 --- a/bigframes/formatting_helpers.py +++ b/bigframes/formatting_helpers.py @@ -13,15 +13,17 @@ # limitations under the License. """Shared helper functions for formatting jobs related info.""" -# TODO(orrbradford): cleanup up typings and documenttion in this file + +from __future__ import annotations import datetime import random -from typing import Any, Optional, Type, Union +from typing import Any, Callable, Optional, Type, Union import bigframes_vendored.constants as constants import google.api_core.exceptions as api_core_exceptions import google.cloud.bigquery as bigquery +import google.cloud.bigquery._job_helpers import humanize import IPython import IPython.display as display @@ -40,6 +42,45 @@ } +def create_progress_bar_callback( + *, + progress_bar: Optional[str] = None, + callback: Callable = lambda _: None, +) -> Callable: + if progress_bar == "auto": + progress_bar = "notebook" if in_ipython() else "terminal" + + if progress_bar == "notebook": + loading_bar = display.HTML("") + display_id = str(random.random()) + display.display(loading_bar, display_id=display_id) + + def outer_callback(event): + callback(event) + display.update_display( + display.HTML(get_query_job_loading_html(event)), + display_id=display_id, + ) + + elif progress_bar == "terminal": + previous_bar_text = "" + + def outer_callback(event): + nonlocal previous_bar_text + + callback(event) + + bar_text = get_query_job_loading_string(event) + if bar_text != previous_bar_text: + print(bar_text) + previous_bar_text = bar_text + + else: + outer_callback = callback + + return outer_callback + + def add_feedback_link( exception: Union[ api_core_exceptions.RetryError, api_core_exceptions.GoogleAPICallError @@ -123,7 +164,7 @@ def wait_for_query_job( query_job: bigquery.QueryJob, max_results: Optional[int] = None, page_size: Optional[int] = None, - progress_bar: Optional[str] = None, + callback: Callable = lambda _: None, ) -> bigquery.table.RowIterator: """Return query results. Displays a progress bar while the query is running Args: @@ -138,46 +179,57 @@ def wait_for_query_job( Returns: A row iterator over the query results. """ - if progress_bar == "auto": - progress_bar = "notebook" if in_ipython() else "terminal" - try: - if progress_bar == "notebook": - display_id = str(random.random()) - loading_bar = display.HTML(get_query_job_loading_html(query_job)) - display.display(loading_bar, display_id=display_id) - query_result = query_job.result( - max_results=max_results, page_size=page_size + callback( + # DONOTSUBMIT: we should create our own events. + google.cloud.bigquery._job_helpers.QueryReceivedEvent( + billing_project=query_job.project, + location=query_job.location, + job_id=query_job.job_id, + statement_type=query_job.statement_type, + state=query_job.state, + query_plan=query_job.query_plan, + created=query_job.created, + started=query_job.started, + ended=query_job.ended, ) - query_job.reload() - display.update_display( - display.HTML(get_query_job_loading_html(query_job)), - display_id=display_id, - ) - elif progress_bar == "terminal": - initial_loading_bar = get_query_job_loading_string(query_job) - print(initial_loading_bar) - query_result = query_job.result( - max_results=max_results, page_size=page_size - ) - query_job.reload() - if initial_loading_bar != get_query_job_loading_string(query_job): - print(get_query_job_loading_string(query_job)) - else: - # No progress bar. - query_result = query_job.result( - max_results=max_results, page_size=page_size + ) + # TODO(tswast): Add a timeout so that progress bars can make updates as + # the query stats come int. + # TODO(tswast): Listen for cancellation on the callback (or maybe + # callbacks should just raise KeyboardInterrupt like IPython does?). + query_results = query_job.result( + page_size=page_size, + max_results=max_results, + ) + callback( + # DONOTSUBMIT: we should create our own events. + google.cloud.bigquery._job_helpers.QueryFinishedEvent( + billing_project=query_job.project, + location=query_results.location, + query_id=query_results.query_id, + job_id=query_results.job_id, + total_rows=query_results.total_rows, + total_bytes_processed=query_results.total_bytes_processed, + slot_millis=query_results.slot_millis, + destination=query_job.destination, + created=query_job.created, + started=query_job.started, + ended=query_job.ended, ) - query_job.reload() - return query_result + ) + return query_results except api_core_exceptions.RetryError as exc: + # TODO: turn this into a callback event, too. add_feedback_link(exc) raise except api_core_exceptions.GoogleAPICallError as exc: + # TODO: turn this into a callback event, too. add_feedback_link(exc) raise except KeyboardInterrupt: query_job.cancel() + # TODO: turn this into a callback event, too. print( f"Requested cancellation for {query_job.job_type.capitalize()}" f" job {query_job.job_id} in location {query_job.location}..." diff --git a/bigframes/pandas/io/api.py b/bigframes/pandas/io/api.py index 483bc5e530..d2e4e6658f 100644 --- a/bigframes/pandas/io/api.py +++ b/bigframes/pandas/io/api.py @@ -273,7 +273,8 @@ def _try_read_gbq_colab_sessionless_dry_run( with _default_location_lock: if not config.options.bigquery._session_started: return _run_read_gbq_colab_sessionless_dry_run( - query, pyformat_args=pyformat_args + query, + pyformat_args=pyformat_args, ) # Explicitly return None to indicate that we didn't run the dry run query. @@ -305,6 +306,7 @@ def _read_gbq_colab( *, pyformat_args: Optional[Dict[str, Any]] = None, dry_run: bool = False, + callback: Callable = lambda _: None, ) -> bigframes.dataframe.DataFrame | pandas.Series: """A Colab-specific version of read_gbq. @@ -319,6 +321,8 @@ def _read_gbq_colab( dry_run (bool): If True, estimates the query results size without returning data. The return will be a pandas Series with query metadata. + callback (Callable): + A callback function used by bigframes to report query progress. Returns: Union[bigframes.dataframe.DataFrame, pandas.Series]: @@ -364,6 +368,7 @@ def _read_gbq_colab( query_or_table, pyformat_args=pyformat_args, dry_run=dry_run, + callback=callback, ) diff --git a/bigframes/session/__init__.py b/bigframes/session/__init__.py index df67e64e9e..42526a0800 100644 --- a/bigframes/session/__init__.py +++ b/bigframes/session/__init__.py @@ -503,6 +503,7 @@ def _read_gbq_colab( *, pyformat_args: Optional[Dict[str, Any]] = None, dry_run: bool = False, + callback: Callable = lambda _: None, ) -> Union[dataframe.DataFrame, pandas.Series]: """A version of read_gbq that has the necessary default values for use in colab integrations. @@ -519,6 +520,11 @@ def _read_gbq_colab( instead. Note: unlike read_gbq / read_gbq_query, even if set to None, this function always assumes {var} refers to a variable that is supposed to be supplied in this dictionary. + dry_run (bool): + If True, estimates the query results size without returning data. + The return will be a pandas Series with query metadata. + callback (Callable): + A callback function used by bigframes to report query progress. """ if pyformat_args is None: pyformat_args = {} @@ -538,6 +544,7 @@ def _read_gbq_colab( force_total_order=False, dry_run=typing.cast(Union[Literal[False], Literal[True]], dry_run), allow_large_results=allow_large_results, + callback=callback, ) @overload diff --git a/bigframes/session/_io/bigquery/__init__.py b/bigframes/session/_io/bigquery/__init__.py index 83f63e8b9a..64e968c0ed 100644 --- a/bigframes/session/_io/bigquery/__init__.py +++ b/bigframes/session/_io/bigquery/__init__.py @@ -22,7 +22,17 @@ import textwrap import types import typing -from typing import Dict, Iterable, Literal, Mapping, Optional, overload, Tuple, Union +from typing import ( + Callable, + Dict, + Iterable, + Literal, + Mapping, + Optional, + overload, + Tuple, + Union, +) import bigframes_vendored.google_cloud_bigquery.retry as third_party_gcb_retry import bigframes_vendored.pandas.io.gbq as third_party_pandas_gbq @@ -268,6 +278,38 @@ def start_query_with_client( ... +@overload +def start_query_with_client( + bq_client: bigquery.Client, + sql: str, + *, + job_config: bigquery.QueryJobConfig, + location: Optional[str], + project: Optional[str], + timeout: Optional[float], + metrics: Optional[bigframes.session.metrics.ExecutionMetrics], + query_with_job: Literal[False], + callback: Callable = ..., +) -> Tuple[bigquery.table.RowIterator, Optional[bigquery.QueryJob]]: + ... + + +@overload +def start_query_with_client( + bq_client: bigquery.Client, + sql: str, + *, + job_config: bigquery.QueryJobConfig, + location: Optional[str], + project: Optional[str], + timeout: Optional[float], + metrics: Optional[bigframes.session.metrics.ExecutionMetrics], + query_with_job: Literal[True], + callback: Callable = ..., +) -> Tuple[bigquery.table.RowIterator, bigquery.QueryJob]: + ... + + @overload def start_query_with_client( bq_client: bigquery.Client, @@ -315,23 +357,32 @@ def start_query_with_client( # https://github.com/googleapis/python-bigquery/pull/2256 merged, likely # version 3.36.0 or later. job_retry: google.api_core.retry.Retry = third_party_gcb_retry.DEFAULT_JOB_RETRY, + callback: Callable = lambda _: None, ) -> Tuple[bigquery.table.RowIterator, Optional[bigquery.QueryJob]]: """ Starts query job and waits for results. """ + opts = bigframes.options.display + progress_callback = formatting_helpers.create_progress_bar_callback( + progress_bar=opts.progress_bar, + callback=callback, + ) + try: # Note: Ensure no additional labels are added to job_config after this # point, as `add_and_trim_labels` ensures the label count does not # exceed MAX_LABELS_COUNT. add_and_trim_labels(job_config) if not query_with_job: - results_iterator = bq_client.query_and_wait( + # DONOTSUBMIT: we should create our own events for callback. + results_iterator = bq_client._query_and_wait_bigframes( sql, job_config=job_config, location=location, project=project, api_timeout=timeout, job_retry=job_retry, + callback=progress_callback, ) if metrics is not None: metrics.count_job_stats(row_iterator=results_iterator) @@ -350,11 +401,11 @@ def start_query_with_client( ex.message += CHECK_DRIVE_PERMISSIONS raise - opts = bigframes.options.display if opts.progress_bar is not None and not query_job.configuration.dry_run: results_iterator = formatting_helpers.wait_for_query_job( query_job, progress_bar=opts.progress_bar, + callback=callback, ) else: results_iterator = query_job.result() diff --git a/bigframes/session/bq_caching_executor.py b/bigframes/session/bq_caching_executor.py index b428cd646c..60de87e70e 100644 --- a/bigframes/session/bq_caching_executor.py +++ b/bigframes/session/bq_caching_executor.py @@ -16,7 +16,7 @@ import math import threading -from typing import Literal, Mapping, Optional, Sequence, Tuple +from typing import Callable, Literal, Mapping, Optional, Sequence, Tuple import warnings import weakref @@ -186,13 +186,18 @@ def execute( self, array_value: bigframes.core.ArrayValue, execution_spec: ex_spec.ExecutionSpec, + *, + callback: Callable = lambda _: None, ) -> executor.ExecuteResult: # TODO: Support export jobs in combination with semi executors if execution_spec.destination_spec is None: plan = self.prepare_plan(array_value.node, target="simplify") for exec in self._semi_executors: maybe_result = exec.execute( - plan, ordered=execution_spec.ordered, peek=execution_spec.peek + plan, + ordered=execution_spec.ordered, + peek=execution_spec.peek, + callback=callback, ) if maybe_result: return maybe_result @@ -203,7 +208,9 @@ def execute( "Ordering and peeking not supported for gbq export" ) # separate path for export_gbq, as it has all sorts of annoying logic, such as possibly running as dml - return self._export_gbq(array_value, execution_spec.destination_spec) + return self._export_gbq( + array_value, execution_spec.destination_spec, callback=callback + ) result = self._execute_plan_gbq( array_value.node, @@ -213,6 +220,7 @@ def execute( if isinstance(execution_spec.destination_spec, ex_spec.CacheSpec) else None, must_create_table=not execution_spec.promise_under_10gb, + callback=callback, ) # post steps: export if isinstance(execution_spec.destination_spec, ex_spec.GcsOutputSpec): @@ -221,7 +229,10 @@ def execute( return result def _export_result_gcs( - self, result: executor.ExecuteResult, gcs_export_spec: ex_spec.GcsOutputSpec + self, + result: executor.ExecuteResult, + gcs_export_spec: ex_spec.GcsOutputSpec, + callback: Callable = lambda _: None, ): query_job = result.query_job assert query_job is not None @@ -242,6 +253,7 @@ def _export_result_gcs( location=None, timeout=None, query_with_job=True, + callback=callback, ) def _maybe_find_existing_table( @@ -266,7 +278,10 @@ def _maybe_find_existing_table( return None def _export_gbq( - self, array_value: bigframes.core.ArrayValue, spec: ex_spec.TableOutputSpec + self, + array_value: bigframes.core.ArrayValue, + spec: ex_spec.TableOutputSpec, + callback: Callable = lambda _: None, ) -> executor.ExecuteResult: """ Export the ArrayValue to an existing BigQuery table. @@ -309,6 +324,7 @@ def _export_gbq( row_iter, query_job = self._run_execute_query( sql=sql, job_config=job_config, + callback=callback, ) has_timedelta_col = any( @@ -372,6 +388,7 @@ def _run_execute_query( sql: str, job_config: Optional[bq_job.QueryJobConfig] = None, query_with_job: bool = True, + callback: Callable = lambda _: None, ) -> Tuple[bq_table.RowIterator, Optional[bigquery.QueryJob]]: """ Starts BigQuery query job and waits for results. @@ -397,6 +414,7 @@ def _run_execute_query( location=None, timeout=None, query_with_job=True, + callback=callback, ) else: return bq_io.start_query_with_client( @@ -408,6 +426,7 @@ def _run_execute_query( location=None, timeout=None, query_with_job=False, + callback=callback, ) except google.api_core.exceptions.BadRequest as e: @@ -587,6 +606,7 @@ def _execute_plan_gbq( peek: Optional[int] = None, cache_spec: Optional[ex_spec.CacheSpec] = None, must_create_table: bool = True, + callback: Callable = lambda _: None, ) -> executor.ExecuteResult: """Just execute whatever plan as is, without further caching or decomposition.""" # TODO(swast): plumb through the api_name of the user-facing api that @@ -637,6 +657,7 @@ def _execute_plan_gbq( sql=compiled.sql, job_config=job_config, query_with_job=(destination_table is not None), + callback=callback, ) table_info: Optional[bigquery.Table] = None diff --git a/bigframes/session/direct_gbq_execution.py b/bigframes/session/direct_gbq_execution.py index ff91747a62..f09c31f92b 100644 --- a/bigframes/session/direct_gbq_execution.py +++ b/bigframes/session/direct_gbq_execution.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Literal, Optional, Tuple +from typing import Callable, Literal, Optional, Tuple from google.cloud import bigquery import google.cloud.bigquery.job as bq_job @@ -45,6 +45,8 @@ def execute( plan: nodes.BigFrameNode, ordered: bool, peek: Optional[int] = None, + *, + callback: Callable = lambda _: None, ) -> executor.ExecuteResult: """Just execute whatever plan as is, without further caching or decomposition.""" # TODO(swast): plumb through the api_name of the user-facing api that @@ -69,6 +71,7 @@ def _run_execute_query( self, sql: str, job_config: Optional[bq_job.QueryJobConfig] = None, + callback: Callable = lambda _: None, ) -> Tuple[bq_table.RowIterator, Optional[bigquery.QueryJob]]: """ Starts BigQuery query job and waits for results. @@ -82,4 +85,5 @@ def _run_execute_query( timeout=None, metrics=None, query_with_job=False, + callback=callback, ) diff --git a/bigframes/session/executor.py b/bigframes/session/executor.py index 748b10647a..3ac580e773 100644 --- a/bigframes/session/executor.py +++ b/bigframes/session/executor.py @@ -18,7 +18,7 @@ import dataclasses import functools import itertools -from typing import Iterator, Literal, Optional, Union +from typing import Callable, Iterator, Literal, Optional, Union from google.cloud import bigquery import pandas as pd @@ -153,6 +153,8 @@ def execute( self, array_value: bigframes.core.ArrayValue, execution_spec: ex_spec.ExecutionSpec, + *, + callback: Callable = lambda _: None, ) -> ExecuteResult: """ Execute the ArrayValue. diff --git a/bigframes/session/loader.py b/bigframes/session/loader.py index 49b1195235..d0c7ea346c 100644 --- a/bigframes/session/loader.py +++ b/bigframes/session/loader.py @@ -22,6 +22,7 @@ import os import typing from typing import ( + Callable, cast, Dict, Generator, @@ -899,6 +900,7 @@ def read_gbq_query( # type: ignore[overload-overlap] dry_run: Literal[False] = ..., force_total_order: Optional[bool] = ..., allow_large_results: bool, + callback: Callable = ..., ) -> dataframe.DataFrame: ... @@ -916,6 +918,7 @@ def read_gbq_query( dry_run: Literal[True] = ..., force_total_order: Optional[bool] = ..., allow_large_results: bool, + callback: Callable = ..., ) -> pandas.Series: ... @@ -932,6 +935,7 @@ def read_gbq_query( dry_run: bool = False, force_total_order: Optional[bool] = None, allow_large_results: bool, + callback: Callable = lambda _: None, ) -> dataframe.DataFrame | pandas.Series: configuration = _transform_read_gbq_configuration(configuration) @@ -1016,6 +1020,7 @@ def read_gbq_query( rows = self._start_query_with_job_optional( query, job_config=job_config, + callback=callback, ) # If there is a query job, fetch it so that we can get the @@ -1163,6 +1168,7 @@ def _start_query_with_job_optional( *, job_config: Optional[google.cloud.bigquery.QueryJobConfig] = None, timeout: Optional[float] = None, + callback: Callable = lambda _: None, ) -> google.cloud.bigquery.table.RowIterator: """ Starts BigQuery query with job optional and waits for results. @@ -1179,6 +1185,7 @@ def _start_query_with_job_optional( project=None, metrics=None, query_with_job=False, + callback=callback, ) return rows @@ -1188,6 +1195,7 @@ def _start_query_with_job( *, job_config: Optional[google.cloud.bigquery.QueryJobConfig] = None, timeout: Optional[float] = None, + callback: Callable = lambda _: None, ) -> bigquery.QueryJob: """ Starts BigQuery query job and waits for results. @@ -1204,6 +1212,7 @@ def _start_query_with_job( project=None, metrics=None, query_with_job=True, + callback=callback, ) return query_job diff --git a/bigframes/session/local_scan_executor.py b/bigframes/session/local_scan_executor.py index 65f088e8a1..d7618f1c7e 100644 --- a/bigframes/session/local_scan_executor.py +++ b/bigframes/session/local_scan_executor.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Optional +from typing import Callable, Optional from bigframes.core import bigframe_node, rewrite from bigframes.session import executor, semi_executor @@ -29,6 +29,8 @@ def execute( plan: bigframe_node.BigFrameNode, ordered: bool, peek: Optional[int] = None, + *, + callback: Callable = lambda _: None, ) -> Optional[executor.ExecuteResult]: reduced_result = rewrite.try_reduce_to_local_scan(plan) if not reduced_result: diff --git a/bigframes/session/polars_executor.py b/bigframes/session/polars_executor.py index d8df558fe4..d483a01d64 100644 --- a/bigframes/session/polars_executor.py +++ b/bigframes/session/polars_executor.py @@ -14,7 +14,7 @@ from __future__ import annotations import itertools -from typing import Optional, TYPE_CHECKING +from typing import Callable, Optional, TYPE_CHECKING import pyarrow as pa @@ -133,6 +133,8 @@ def execute( plan: bigframe_node.BigFrameNode, ordered: bool, peek: Optional[int] = None, + *, + callback: Callable = lambda _: None, ) -> Optional[executor.ExecuteResult]: if not self._can_execute(plan): return None diff --git a/bigframes/session/read_api_execution.py b/bigframes/session/read_api_execution.py index 037fde011f..0405a909dc 100644 --- a/bigframes/session/read_api_execution.py +++ b/bigframes/session/read_api_execution.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Any, Iterator, Optional +from typing import Any, Callable, Iterator, Optional from google.cloud import bigquery_storage_v1 import pyarrow as pa @@ -38,6 +38,8 @@ def execute( plan: bigframe_node.BigFrameNode, ordered: bool, peek: Optional[int] = None, + *, + callback: Callable = lambda _: None, ) -> Optional[executor.ExecuteResult]: adapt_result = self._try_adapt_plan(plan, ordered) if not adapt_result: diff --git a/bigframes/session/semi_executor.py b/bigframes/session/semi_executor.py index c41d7c96d3..126f41f637 100644 --- a/bigframes/session/semi_executor.py +++ b/bigframes/session/semi_executor.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import abc -from typing import Optional +from typing import Callable, Optional from bigframes.core import bigframe_node from bigframes.session import executor @@ -29,5 +29,7 @@ def execute( plan: bigframe_node.BigFrameNode, ordered: bool, peek: Optional[int] = None, + *, + callback: Callable = lambda _: None, ) -> Optional[executor.ExecuteResult]: raise NotImplementedError("execute not implemented for this executor") diff --git a/bigframes/testing/compiler_session.py b/bigframes/testing/compiler_session.py index 289b2600fd..77b218ef29 100644 --- a/bigframes/testing/compiler_session.py +++ b/bigframes/testing/compiler_session.py @@ -13,7 +13,7 @@ # limitations under the License. import dataclasses -import typing +from typing import Callable, Optional import bigframes.core import bigframes.core.compile.sqlglot as sqlglot @@ -29,7 +29,7 @@ class SQLCompilerExecutor(bigframes.session.executor.Executor): def to_sql( self, array_value: bigframes.core.ArrayValue, - offset_column: typing.Optional[str] = None, + offset_column: Optional[str] = None, ordered: bool = True, enable_cache: bool = False, ) -> str: @@ -46,5 +46,7 @@ def execute( self, array_value, execution_spec, + *, + callback: Callable = lambda _: None, ): raise NotImplementedError("SQLCompilerExecutor.execute not implemented") diff --git a/bigframes/testing/polars_session.py b/bigframes/testing/polars_session.py index 29eae20b7a..cd41881c33 100644 --- a/bigframes/testing/polars_session.py +++ b/bigframes/testing/polars_session.py @@ -13,7 +13,7 @@ # limitations under the License. import dataclasses -from typing import Union +from typing import Callable, Union import weakref import pandas @@ -37,6 +37,8 @@ def execute( self, array_value: bigframes.core.ArrayValue, execution_spec: bigframes.session.execution_spec.ExecutionSpec, + *, + callback: Callable = lambda _: None, ): """ Execute the ArrayValue, storing the result to a temporary session-owned table. diff --git a/notebooks/dataframes/anywidget_mode.ipynb b/notebooks/dataframes/anywidget_mode.ipynb index 617329ba65..e0622e383c 100644 --- a/notebooks/dataframes/anywidget_mode.ipynb +++ b/notebooks/dataframes/anywidget_mode.ipynb @@ -56,7 +56,8 @@ "outputs": [], "source": [ "bpd.options.bigquery.ordering_mode = \"partial\"\n", - "bpd.options.display.repr_mode = \"anywidget\"" + "bpd.options.display.repr_mode = \"anywidget\"\n", + "bpd.options.display.progress_bar = \"notebook\"" ] }, { @@ -75,9 +76,7 @@ "outputs": [ { "data": { - "text/html": [ - "Query job a643d120-4af9-44fc-ba3c-ed461cf1092b is DONE. 0 Bytes processed. Open Job" - ], + "text/html": [], "text/plain": [ "" ] @@ -86,15 +85,47 @@ "output_type": "display_data" }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "Computation deferred. Computation will process 171.4 MB\n" + "ename": "AttributeError", + "evalue": "'QuerySentEvent' object has no attribute 'state'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m df \u001b[38;5;241m=\u001b[39m \u001b[43mbpd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mread_gbq\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mSELECT state, name, number, year FROM bigquery-public-data.usa_names.usa_1910_2013\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\n\u001b[1;32m 3\u001b[0m \u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/core/log_adapter.py:175\u001b[0m, in \u001b[0;36mmethod_logger..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 172\u001b[0m _call_stack\u001b[38;5;241m.\u001b[39mappend(full_method_name)\n\u001b[1;32m 174\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 175\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mmethod\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 176\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mNotImplementedError\u001b[39;00m, \u001b[38;5;167;01mTypeError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 177\u001b[0m \u001b[38;5;66;03m# Log method parameters that are implemented in pandas but either missing (TypeError)\u001b[39;00m\n\u001b[1;32m 178\u001b[0m \u001b[38;5;66;03m# or not fully supported (NotImplementedError) in BigFrames.\u001b[39;00m\n\u001b[1;32m 179\u001b[0m \u001b[38;5;66;03m# Logging is currently supported only when we can access the bqclient through\u001b[39;00m\n\u001b[1;32m 180\u001b[0m \u001b[38;5;66;03m# _block.session.bqclient.\u001b[39;00m\n\u001b[1;32m 181\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(_call_stack) \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m1\u001b[39m:\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/pandas/io/api.py:226\u001b[0m, in \u001b[0;36mread_gbq\u001b[0;34m(query_or_table, index_col, columns, configuration, max_results, filters, use_cache, col_order, dry_run, allow_large_results)\u001b[0m\n\u001b[1;32m 212\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mread_gbq\u001b[39m(\n\u001b[1;32m 213\u001b[0m query_or_table: \u001b[38;5;28mstr\u001b[39m,\n\u001b[1;32m 214\u001b[0m \u001b[38;5;241m*\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 223\u001b[0m allow_large_results: Optional[\u001b[38;5;28mbool\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 224\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m bigframes\u001b[38;5;241m.\u001b[39mdataframe\u001b[38;5;241m.\u001b[39mDataFrame \u001b[38;5;241m|\u001b[39m pandas\u001b[38;5;241m.\u001b[39mSeries:\n\u001b[1;32m 225\u001b[0m _set_default_session_location_if_possible(query_or_table)\n\u001b[0;32m--> 226\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mglobal_session\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwith_default_session\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 227\u001b[0m \u001b[43m \u001b[49m\u001b[43mbigframes\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msession\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mSession\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mread_gbq\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 228\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_or_table\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 229\u001b[0m \u001b[43m \u001b[49m\u001b[43mindex_col\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mindex_col\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 230\u001b[0m \u001b[43m \u001b[49m\u001b[43mcolumns\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcolumns\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 231\u001b[0m \u001b[43m \u001b[49m\u001b[43mconfiguration\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfiguration\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 232\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmax_results\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 233\u001b[0m \u001b[43m \u001b[49m\u001b[43mfilters\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfilters\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 234\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 235\u001b[0m \u001b[43m \u001b[49m\u001b[43mcol_order\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcol_order\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 236\u001b[0m \u001b[43m \u001b[49m\u001b[43mdry_run\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdry_run\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 237\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_large_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mallow_large_results\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 238\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/core/global_session.py:114\u001b[0m, in \u001b[0;36mwith_default_session\u001b[0;34m(func_, *args, **kwargs)\u001b[0m\n\u001b[1;32m 113\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mwith_default_session\u001b[39m(func_: Callable[\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m, _T], \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m _T:\n\u001b[0;32m--> 114\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc_\u001b[49m\u001b[43m(\u001b[49m\u001b[43mget_global_session\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/core/log_adapter.py:175\u001b[0m, in \u001b[0;36mmethod_logger..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 172\u001b[0m _call_stack\u001b[38;5;241m.\u001b[39mappend(full_method_name)\n\u001b[1;32m 174\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 175\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mmethod\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 176\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mNotImplementedError\u001b[39;00m, \u001b[38;5;167;01mTypeError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 177\u001b[0m \u001b[38;5;66;03m# Log method parameters that are implemented in pandas but either missing (TypeError)\u001b[39;00m\n\u001b[1;32m 178\u001b[0m \u001b[38;5;66;03m# or not fully supported (NotImplementedError) in BigFrames.\u001b[39;00m\n\u001b[1;32m 179\u001b[0m \u001b[38;5;66;03m# Logging is currently supported only when we can access the bqclient through\u001b[39;00m\n\u001b[1;32m 180\u001b[0m \u001b[38;5;66;03m# _block.session.bqclient.\u001b[39;00m\n\u001b[1;32m 181\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(_call_stack) \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m1\u001b[39m:\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/session/__init__.py:441\u001b[0m, in \u001b[0;36mSession.read_gbq\u001b[0;34m(self, query_or_table, index_col, columns, configuration, max_results, filters, use_cache, col_order, dry_run, allow_large_results)\u001b[0m\n\u001b[1;32m 438\u001b[0m allow_large_results \u001b[38;5;241m=\u001b[39m bigframes\u001b[38;5;241m.\u001b[39m_config\u001b[38;5;241m.\u001b[39moptions\u001b[38;5;241m.\u001b[39m_allow_large_results\n\u001b[1;32m 440\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m bf_io_bigquery\u001b[38;5;241m.\u001b[39mis_query(query_or_table):\n\u001b[0;32m--> 441\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_loader\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mread_gbq_query\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore # for dry_run overload\u001b[39;49;00m\n\u001b[1;32m 442\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_or_table\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 443\u001b[0m \u001b[43m \u001b[49m\u001b[43mindex_col\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mindex_col\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 444\u001b[0m \u001b[43m \u001b[49m\u001b[43mcolumns\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcolumns\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 445\u001b[0m \u001b[43m \u001b[49m\u001b[43mconfiguration\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfiguration\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 446\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmax_results\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 447\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 448\u001b[0m \u001b[43m \u001b[49m\u001b[43mfilters\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfilters\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 449\u001b[0m \u001b[43m \u001b[49m\u001b[43mdry_run\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdry_run\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 450\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_large_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mallow_large_results\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 451\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 452\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 453\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m configuration \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/session/loader.py:1020\u001b[0m, in \u001b[0;36mGbqDataLoader.read_gbq_query\u001b[0;34m(self, query, index_col, columns, configuration, max_results, use_cache, filters, dry_run, force_total_order, allow_large_results, callback)\u001b[0m\n\u001b[1;32m 1013\u001b[0m job_config \u001b[38;5;241m=\u001b[39m typing\u001b[38;5;241m.\u001b[39mcast(\n\u001b[1;32m 1014\u001b[0m bigquery\u001b[38;5;241m.\u001b[39mQueryJobConfig,\n\u001b[1;32m 1015\u001b[0m bigquery\u001b[38;5;241m.\u001b[39mQueryJobConfig\u001b[38;5;241m.\u001b[39mfrom_api_repr(configuration),\n\u001b[1;32m 1016\u001b[0m )\n\u001b[1;32m 1018\u001b[0m \u001b[38;5;66;03m# TODO(b/420984164): We may want to set a page_size here to limit\u001b[39;00m\n\u001b[1;32m 1019\u001b[0m \u001b[38;5;66;03m# the number of results in the first jobs.query response.\u001b[39;00m\n\u001b[0;32m-> 1020\u001b[0m rows \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_start_query_with_job_optional\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1021\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1022\u001b[0m \u001b[43m \u001b[49m\u001b[43mjob_config\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjob_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1023\u001b[0m \u001b[43m \u001b[49m\u001b[43mcallback\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallback\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1024\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1026\u001b[0m \u001b[38;5;66;03m# If there is a query job, fetch it so that we can get the\u001b[39;00m\n\u001b[1;32m 1027\u001b[0m \u001b[38;5;66;03m# statistics and destination table, if needed.\u001b[39;00m\n\u001b[1;32m 1028\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m rows\u001b[38;5;241m.\u001b[39mjob_id \u001b[38;5;129;01mand\u001b[39;00m rows\u001b[38;5;241m.\u001b[39mlocation \u001b[38;5;129;01mand\u001b[39;00m rows\u001b[38;5;241m.\u001b[39mproject:\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/session/loader.py:1179\u001b[0m, in \u001b[0;36mGbqDataLoader._start_query_with_job_optional\u001b[0;34m(self, sql, job_config, timeout, callback)\u001b[0m\n\u001b[1;32m 1173\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1174\u001b[0m \u001b[38;5;124;03mStarts BigQuery query with job optional and waits for results.\u001b[39;00m\n\u001b[1;32m 1175\u001b[0m \n\u001b[1;32m 1176\u001b[0m \u001b[38;5;124;03mDo not execute dataframe through this API, instead use the executor.\u001b[39;00m\n\u001b[1;32m 1177\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1178\u001b[0m job_config \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_prepare_job_config(job_config)\n\u001b[0;32m-> 1179\u001b[0m rows, _ \u001b[38;5;241m=\u001b[39m \u001b[43mbf_io_bigquery\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstart_query_with_client\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1180\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_bqclient\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1181\u001b[0m \u001b[43m \u001b[49m\u001b[43msql\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1182\u001b[0m \u001b[43m \u001b[49m\u001b[43mjob_config\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjob_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1183\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1184\u001b[0m \u001b[43m \u001b[49m\u001b[43mlocation\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1185\u001b[0m \u001b[43m \u001b[49m\u001b[43mproject\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1186\u001b[0m \u001b[43m \u001b[49m\u001b[43mmetrics\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1187\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_with_job\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1188\u001b[0m \u001b[43m \u001b[49m\u001b[43mcallback\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallback\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1189\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1190\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m rows\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/session/_io/bigquery/__init__.py:378\u001b[0m, in \u001b[0;36mstart_query_with_client\u001b[0;34m(bq_client, sql, job_config, location, project, timeout, metrics, query_with_job, job_retry, callback)\u001b[0m\n\u001b[1;32m 375\u001b[0m add_and_trim_labels(job_config)\n\u001b[1;32m 376\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m query_with_job:\n\u001b[1;32m 377\u001b[0m \u001b[38;5;66;03m# DONOTSUBMIT: we should create our own events for callback.\u001b[39;00m\n\u001b[0;32m--> 378\u001b[0m results_iterator \u001b[38;5;241m=\u001b[39m \u001b[43mbq_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_query_and_wait_bigframes\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 379\u001b[0m \u001b[43m \u001b[49m\u001b[43msql\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 380\u001b[0m \u001b[43m \u001b[49m\u001b[43mjob_config\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjob_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 381\u001b[0m \u001b[43m \u001b[49m\u001b[43mlocation\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlocation\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 382\u001b[0m \u001b[43m \u001b[49m\u001b[43mproject\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mproject\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 383\u001b[0m \u001b[43m \u001b[49m\u001b[43mapi_timeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 384\u001b[0m \u001b[43m \u001b[49m\u001b[43mjob_retry\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjob_retry\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 385\u001b[0m \u001b[43m \u001b[49m\u001b[43mcallback\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mprogress_callback\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 386\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 387\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m metrics \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 388\u001b[0m metrics\u001b[38;5;241m.\u001b[39mcount_job_stats(row_iterator\u001b[38;5;241m=\u001b[39mresults_iterator)\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/venv/lib/python3.10/site-packages/google/cloud/bigquery/client.py:3707\u001b[0m, in \u001b[0;36mClient._query_and_wait_bigframes\u001b[0;34m(self, query, job_config, location, project, api_timeout, wait_timeout, retry, job_retry, page_size, max_results, callback)\u001b[0m\n\u001b[1;32m 3701\u001b[0m _verify_job_config_type(job_config, QueryJobConfig)\n\u001b[1;32m 3703\u001b[0m job_config \u001b[38;5;241m=\u001b[39m _job_helpers\u001b[38;5;241m.\u001b[39mjob_config_with_defaults(\n\u001b[1;32m 3704\u001b[0m job_config, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_default_query_job_config\n\u001b[1;32m 3705\u001b[0m )\n\u001b[0;32m-> 3707\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_job_helpers\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery_and_wait\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 3708\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3709\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3710\u001b[0m \u001b[43m \u001b[49m\u001b[43mjob_config\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjob_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3711\u001b[0m \u001b[43m \u001b[49m\u001b[43mlocation\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlocation\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3712\u001b[0m \u001b[43m \u001b[49m\u001b[43mproject\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mproject\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3713\u001b[0m \u001b[43m \u001b[49m\u001b[43mapi_timeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mapi_timeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3714\u001b[0m \u001b[43m \u001b[49m\u001b[43mwait_timeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mwait_timeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3715\u001b[0m \u001b[43m \u001b[49m\u001b[43mretry\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mretry\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3716\u001b[0m \u001b[43m \u001b[49m\u001b[43mjob_retry\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjob_retry\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3717\u001b[0m \u001b[43m \u001b[49m\u001b[43mpage_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpage_size\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3718\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmax_results\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3719\u001b[0m \u001b[43m \u001b[49m\u001b[43mcallback\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallback\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3720\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/venv/lib/python3.10/site-packages/google/cloud/bigquery/_job_helpers.py:627\u001b[0m, in \u001b[0;36mquery_and_wait\u001b[0;34m(client, query, job_config, location, project, api_timeout, wait_timeout, retry, job_retry, page_size, max_results, callback)\u001b[0m\n\u001b[1;32m 604\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m table\u001b[38;5;241m.\u001b[39mRowIterator(\n\u001b[1;32m 605\u001b[0m client\u001b[38;5;241m=\u001b[39mclient,\n\u001b[1;32m 606\u001b[0m api_request\u001b[38;5;241m=\u001b[39mfunctools\u001b[38;5;241m.\u001b[39mpartial(client\u001b[38;5;241m.\u001b[39m_call_api, retry, timeout\u001b[38;5;241m=\u001b[39mapi_timeout),\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 623\u001b[0m ended\u001b[38;5;241m=\u001b[39mquery_results\u001b[38;5;241m.\u001b[39mended,\n\u001b[1;32m 624\u001b[0m )\n\u001b[1;32m 626\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m job_retry \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 627\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mjob_retry\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdo_query\u001b[49m\u001b[43m)\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 628\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 629\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m do_query()\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/venv/lib/python3.10/site-packages/google/api_core/retry/retry_unary.py:294\u001b[0m, in \u001b[0;36mRetry.__call__..retry_wrapped_func\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 290\u001b[0m target \u001b[38;5;241m=\u001b[39m functools\u001b[38;5;241m.\u001b[39mpartial(func, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 291\u001b[0m sleep_generator \u001b[38;5;241m=\u001b[39m exponential_sleep_generator(\n\u001b[1;32m 292\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_initial, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_maximum, multiplier\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_multiplier\n\u001b[1;32m 293\u001b[0m )\n\u001b[0;32m--> 294\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mretry_target\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 295\u001b[0m \u001b[43m \u001b[49m\u001b[43mtarget\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 296\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_predicate\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 297\u001b[0m \u001b[43m \u001b[49m\u001b[43msleep_generator\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 298\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_timeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 299\u001b[0m \u001b[43m \u001b[49m\u001b[43mon_error\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mon_error\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 300\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/venv/lib/python3.10/site-packages/google/api_core/retry/retry_unary.py:156\u001b[0m, in \u001b[0;36mretry_target\u001b[0;34m(target, predicate, sleep_generator, timeout, on_error, exception_factory, **kwargs)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[38;5;66;03m# pylint: disable=broad-except\u001b[39;00m\n\u001b[1;32m 153\u001b[0m \u001b[38;5;66;03m# This function explicitly must deal with broad exceptions.\u001b[39;00m\n\u001b[1;32m 154\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m 155\u001b[0m \u001b[38;5;66;03m# defer to shared logic for handling errors\u001b[39;00m\n\u001b[0;32m--> 156\u001b[0m next_sleep \u001b[38;5;241m=\u001b[39m \u001b[43m_retry_error_helper\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 157\u001b[0m \u001b[43m \u001b[49m\u001b[43mexc\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 158\u001b[0m \u001b[43m \u001b[49m\u001b[43mdeadline\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 159\u001b[0m \u001b[43m \u001b[49m\u001b[43msleep_iter\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 160\u001b[0m \u001b[43m \u001b[49m\u001b[43merror_list\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 161\u001b[0m \u001b[43m \u001b[49m\u001b[43mpredicate\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 162\u001b[0m \u001b[43m \u001b[49m\u001b[43mon_error\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 163\u001b[0m \u001b[43m \u001b[49m\u001b[43mexception_factory\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 164\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 165\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 166\u001b[0m \u001b[38;5;66;03m# if exception not raised, sleep before next attempt\u001b[39;00m\n\u001b[1;32m 167\u001b[0m time\u001b[38;5;241m.\u001b[39msleep(next_sleep)\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/venv/lib/python3.10/site-packages/google/api_core/retry/retry_base.py:214\u001b[0m, in \u001b[0;36m_retry_error_helper\u001b[0;34m(exc, deadline, sleep_iterator, error_list, predicate_fn, on_error_fn, exc_factory_fn, original_timeout)\u001b[0m\n\u001b[1;32m 208\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m predicate_fn(exc):\n\u001b[1;32m 209\u001b[0m final_exc, source_exc \u001b[38;5;241m=\u001b[39m exc_factory_fn(\n\u001b[1;32m 210\u001b[0m error_list,\n\u001b[1;32m 211\u001b[0m RetryFailureReason\u001b[38;5;241m.\u001b[39mNON_RETRYABLE_ERROR,\n\u001b[1;32m 212\u001b[0m original_timeout,\n\u001b[1;32m 213\u001b[0m )\n\u001b[0;32m--> 214\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m final_exc \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21;01msource_exc\u001b[39;00m\n\u001b[1;32m 215\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m on_error_fn \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 216\u001b[0m on_error_fn(exc)\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/venv/lib/python3.10/site-packages/google/api_core/retry/retry_unary.py:147\u001b[0m, in \u001b[0;36mretry_target\u001b[0;34m(target, predicate, sleep_generator, timeout, on_error, exception_factory, **kwargs)\u001b[0m\n\u001b[1;32m 145\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[1;32m 146\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 147\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mtarget\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 148\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m inspect\u001b[38;5;241m.\u001b[39misawaitable(result):\n\u001b[1;32m 149\u001b[0m warnings\u001b[38;5;241m.\u001b[39mwarn(_ASYNC_RETRY_WARNING)\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/venv/lib/python3.10/site-packages/google/cloud/bigquery/_job_helpers.py:533\u001b[0m, in \u001b[0;36mquery_and_wait..do_query\u001b[0;34m()\u001b[0m\n\u001b[1;32m 530\u001b[0m span_attributes \u001b[38;5;241m=\u001b[39m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpath\u001b[39m\u001b[38;5;124m\"\u001b[39m: path}\n\u001b[1;32m 532\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdryRun\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m request_body:\n\u001b[0;32m--> 533\u001b[0m \u001b[43mcallback\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 534\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_sent_factory\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 535\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 536\u001b[0m \u001b[43m \u001b[49m\u001b[43mbilling_project\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mproject\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 537\u001b[0m \u001b[43m \u001b[49m\u001b[43mlocation\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlocation\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 538\u001b[0m \u001b[43m \u001b[49m\u001b[43mjob_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 539\u001b[0m \u001b[43m \u001b[49m\u001b[43mrequest_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 540\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 541\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 543\u001b[0m \u001b[38;5;66;03m# For easier testing, handle the retries ourselves.\u001b[39;00m\n\u001b[1;32m 544\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m retry \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/formatting_helpers.py:61\u001b[0m, in \u001b[0;36mcreate_progress_bar_callback..outer_callback\u001b[0;34m(event)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mouter_callback\u001b[39m(event):\n\u001b[1;32m 59\u001b[0m callback(event)\n\u001b[1;32m 60\u001b[0m display\u001b[38;5;241m.\u001b[39mupdate_display(\n\u001b[0;32m---> 61\u001b[0m display\u001b[38;5;241m.\u001b[39mHTML(\u001b[43mget_query_job_loading_html\u001b[49m\u001b[43m(\u001b[49m\u001b[43mevent\u001b[49m\u001b[43m)\u001b[49m),\n\u001b[1;32m 62\u001b[0m display_id\u001b[38;5;241m=\u001b[39mdisplay_id,\n\u001b[1;32m 63\u001b[0m )\n", + "File \u001b[0;32m~/src/github.com/googleapis/python-bigquery-dataframes-2/bigframes/formatting_helpers.py:313\u001b[0m, in \u001b[0;36mget_query_job_loading_html\u001b[0;34m(query_job)\u001b[0m\n\u001b[1;32m 305\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mget_query_job_loading_html\u001b[39m(query_job: bigquery\u001b[38;5;241m.\u001b[39mQueryJob):\n\u001b[1;32m 306\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Return progress bar html string\u001b[39;00m\n\u001b[1;32m 307\u001b[0m \u001b[38;5;124;03m Args:\u001b[39;00m\n\u001b[1;32m 308\u001b[0m \u001b[38;5;124;03m query_job (bigquery.QueryJob):\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 311\u001b[0m \u001b[38;5;124;03m Html string.\u001b[39;00m\n\u001b[1;32m 312\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 313\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\"\"\u001b[39m\u001b[38;5;124mQuery job \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mquery_job\u001b[38;5;241m.\u001b[39mjob_id\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m is \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[43mquery_job\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstate\u001b[49m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m. \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mget_bytes_processed_string(query_job\u001b[38;5;241m.\u001b[39mtotal_bytes_processed)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124mOpen Job\u001b[39m\u001b[38;5;124m\"\"\"\u001b[39m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'QuerySentEvent' object has no attribute 'state'" ] } ], "source": [ - "df = bpd.read_gbq(\"bigquery-public-data.usa_names.usa_1910_2013\")\n", + "df = bpd.read_gbq(\n", + " \"SELECT state, name, number, year FROM bigquery-public-data.usa_names.usa_1910_2013\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c447ca72-ca9a-48af-bc8f-caac8db4bd1a", + "metadata": {}, + "outputs": [], + "source": [ "print(df)" ] }, @@ -108,18 +139,10 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "42bb02ab", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Computation deferred. Computation will process 44.4 MB\n" - ] - } - ], + "outputs": [], "source": [ "test_series = df[\"year\"]\n", "print(test_series)" @@ -135,36 +158,10 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "ce250157", "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "d2d4ef22ea9f414b89ea5bd85f0e6635", - "version_major": 2, - "version_minor": 1 - }, - "text/plain": [ - "TableWidget(page_size=10, row_count=5552452, table_html='Open Job" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "# bq_df_sample = bpd.read_gbq(\"bigquery-samples.wikipedia_pageviews.200809h\")" ] @@ -472,125 +459,11 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": { "id": "XfGq5apK-D_e" }, - "outputs": [ - { - "data": { - "text/html": [ - "Query job c8669c7f-bca3-4f54-b354-8e57b3321f5a is DONE. 34.9 GB processed. Open Job" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "
\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
titleviews
21911Google1414560
27669Google_Chrome962482
28394Google_Earth383566
29184Google_Maps205089
27251Google_Android99450
33900Google_search97665
31825Google_chrome78399
30204Google_Street_View71580
40798Image:Google_Chrome.png60746
35222Googleplex53848
\n", - "

10 rows × 2 columns

\n", - "[10 rows x 2 columns in total]" - ], - "text/plain": [ - " title views\n", - "21911 Google 1414560\n", - "27669 Google_Chrome 962482\n", - "28394 Google_Earth 383566\n", - "29184 Google_Maps 205089\n", - "27251 Google_Android 99450\n", - "33900 Google_search 97665\n", - "31825 Google_chrome 78399\n", - "30204 Google_Street_View 71580\n", - "40798 Image:Google_Chrome.png 60746\n", - "35222 Googleplex 53848\n", - "\n", - "[10 rows x 2 columns]" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# bq_df_sample[bq_df_sample.title.str.contains(r\"[Gg]oogle\")]\\\n", "# .groupby(['title'], as_index=False)['views'].sum(numeric_only=True)\\\n", @@ -660,7 +533,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 9, "metadata": { "id": "SvyXzkRl783u" }, @@ -686,7 +559,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 10, "metadata": { "id": "3QHQYlnoBLpt" }, @@ -712,7 +585,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 11, "metadata": { "id": "EDAaIwHpQCDZ" }, @@ -720,7 +593,7 @@ { "data": { "text/html": [ - "Load job 93903930-10b8-48b8-b41b-3da54917b281 is DONE. Open Job" + "Load job 1dea1612-cb7e-4227-b064-0587640f1169 is DONE. Open Job" ], "text/plain": [ "" @@ -747,7 +620,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 12, "metadata": { "id": "_gPD0Zn1Stdb" }, @@ -755,7 +628,7 @@ { "data": { "text/html": [ - "Query job 17f58b5c-88b2-4b26-8d0d-cc3d9a979a06 is DONE. 28.9 kB processed. Open Job" + "Query job 0b683cea-2689-482b-bfa2-c97813606fd7 is DONE. 28.9 kB processed. Open Job" ], "text/plain": [ "" @@ -796,53 +669,53 @@ " \n", " \n", " \n", - " 78\n", - " Chinstrap penguin (Pygoscelis antarctica)\n", + " 69\n", + " Adelie Penguin (Pygoscelis adeliae)\n", " Dream\n", - " 47.0\n", - " 17.3\n", - " 185\n", - " 3700\n", + " 37.6\n", + " 19.3\n", + " 181\n", + " 3300\n", " FEMALE\n", " \n", " \n", - " 130\n", + " 119\n", + " Chinstrap penguin (Pygoscelis antarctica)\n", + " Dream\n", + " 51.3\n", + " 18.2\n", + " 197\n", + " 3750\n", + " MALE\n", + " \n", + " \n", + " 204\n", " Adelie Penguin (Pygoscelis adeliae)\n", - " Biscoe\n", - " 40.5\n", - " 17.9\n", - " 187\n", - " 3200\n", - " FEMALE\n", + " Torgersen\n", + " 38.6\n", + " 21.2\n", + " 191\n", + " 3800\n", + " MALE\n", " \n", " \n", - " 84\n", + " 215\n", " Gentoo penguin (Pygoscelis papua)\n", " Biscoe\n", - " 49.1\n", - " 14.5\n", - " 212\n", - " 4625\n", + " 44.9\n", + " 13.3\n", + " 213\n", + " 5100\n", " FEMALE\n", " \n", " \n", - " 334\n", - " Adelie Penguin (Pygoscelis adeliae)\n", + " 239\n", + " Gentoo penguin (Pygoscelis papua)\n", " Biscoe\n", - " 38.2\n", - " 20.0\n", - " 190\n", - " 3900\n", - " MALE\n", - " \n", - " \n", - " 67\n", - " Chinstrap penguin (Pygoscelis antarctica)\n", - " Dream\n", - " 55.8\n", - " 19.8\n", - " 207\n", - " 4000\n", + " 53.4\n", + " 15.8\n", + " 219\n", + " 5500\n", " MALE\n", " \n", " \n", @@ -850,22 +723,22 @@ "" ], "text/plain": [ - " species island culmen_length_mm \\\n", - "78 Chinstrap penguin (Pygoscelis antarctica) Dream 47.0 \n", - "130 Adelie Penguin (Pygoscelis adeliae) Biscoe 40.5 \n", - "84 Gentoo penguin (Pygoscelis papua) Biscoe 49.1 \n", - "334 Adelie Penguin (Pygoscelis adeliae) Biscoe 38.2 \n", - "67 Chinstrap penguin (Pygoscelis antarctica) Dream 55.8 \n", + " species island culmen_length_mm \\\n", + "69 Adelie Penguin (Pygoscelis adeliae) Dream 37.6 \n", + "119 Chinstrap penguin (Pygoscelis antarctica) Dream 51.3 \n", + "204 Adelie Penguin (Pygoscelis adeliae) Torgersen 38.6 \n", + "215 Gentoo penguin (Pygoscelis papua) Biscoe 44.9 \n", + "239 Gentoo penguin (Pygoscelis papua) Biscoe 53.4 \n", "\n", " culmen_depth_mm flipper_length_mm body_mass_g sex \n", - "78 17.3 185 3700 FEMALE \n", - "130 17.9 187 3200 FEMALE \n", - "84 14.5 212 4625 FEMALE \n", - "334 20.0 190 3900 MALE \n", - "67 19.8 207 4000 MALE " + "69 19.3 181 3300 FEMALE \n", + "119 18.2 197 3750 MALE \n", + "204 21.2 191 3800 MALE \n", + "215 13.3 213 5100 FEMALE \n", + "239 15.8 219 5500 MALE " ] }, - "execution_count": 15, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } @@ -896,7 +769,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 13, "metadata": { "id": "ZSP7gt13QrQt" }, @@ -931,7 +804,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 14, "metadata": { "id": "oP1NIAmUBjop" }, @@ -939,7 +812,7 @@ { "data": { "text/html": [ - "Query job 55aa9cc4-29b6-4052-aae4-5499dc5f1168 is DONE. 28.9 kB processed. Open Job" + "Query job 0b2a65c5-3c83-4db0-bbe8-4b6c8a5cb24b is DONE. 57.9 kB processed. Open Job" ], "text/plain": [ "" @@ -954,7 +827,7 @@ "'bigframes-dev.birds.penguins'" ] }, - "execution_count": 17, + "execution_count": 14, "metadata": {}, "output_type": "execute_result" } @@ -987,7 +860,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 15, "metadata": { "id": "IBuo-d6dWfsA" }, @@ -995,7 +868,7 @@ { "data": { "text/html": [ - "Query job 7b2ff811-1563-4ac4-9d21-69f87e8e85bc is DONE. 28.9 kB processed. Open Job" + "Query job c8e67c31-96dd-493f-bb5b-1c94957c7a12 is DONE. 28.9 kB processed. Open Job" ], "text/plain": [ "" @@ -1036,53 +909,53 @@ " \n", " \n", " \n", - " 12\n", - " Gentoo penguin (Pygoscelis papua)\n", - " Biscoe\n", - " 42.7\n", - " 13.7\n", - " 208\n", - " 3950\n", + " 207\n", + " Chinstrap penguin (Pygoscelis antarctica)\n", + " Dream\n", + " 42.5\n", + " 16.7\n", + " 187\n", + " 3350\n", " FEMALE\n", " \n", " \n", - " 24\n", - " Gentoo penguin (Pygoscelis papua)\n", - " Biscoe\n", - " 45.0\n", - " 15.4\n", - " 220\n", - " 5050\n", - " MALE\n", - " \n", - " \n", - " 62\n", + " 232\n", " Adelie Penguin (Pygoscelis adeliae)\n", - " Dream\n", - " 38.8\n", - " 20.0\n", + " Torgersen\n", + " 42.0\n", + " 20.2\n", " 190\n", - " 3950\n", + " 4250\n", + " <NA>\n", + " \n", + " \n", + " 241\n", + " Gentoo penguin (Pygoscelis papua)\n", + " Biscoe\n", + " 49.1\n", + " 15.0\n", + " 228\n", + " 5500\n", " MALE\n", " \n", " \n", - " 123\n", - " Chinstrap penguin (Pygoscelis antarctica)\n", - " Dream\n", - " 42.5\n", - " 17.3\n", - " 187\n", - " 3350\n", + " 258\n", + " Gentoo penguin (Pygoscelis papua)\n", + " Biscoe\n", + " 46.8\n", + " 14.3\n", + " 215\n", + " 4850\n", " FEMALE\n", " \n", " \n", - " 27\n", - " Adelie Penguin (Pygoscelis adeliae)\n", - " Dream\n", - " 44.1\n", - " 19.7\n", - " 196\n", - " 4400\n", + " 59\n", + " Gentoo penguin (Pygoscelis papua)\n", + " Biscoe\n", + " 49.9\n", + " 16.1\n", + " 213\n", + " 5400\n", " MALE\n", " \n", " \n", @@ -1090,22 +963,22 @@ "" ], "text/plain": [ - " species island culmen_length_mm \\\n", - "12 Gentoo penguin (Pygoscelis papua) Biscoe 42.7 \n", - "24 Gentoo penguin (Pygoscelis papua) Biscoe 45.0 \n", - "62 Adelie Penguin (Pygoscelis adeliae) Dream 38.8 \n", - "123 Chinstrap penguin (Pygoscelis antarctica) Dream 42.5 \n", - "27 Adelie Penguin (Pygoscelis adeliae) Dream 44.1 \n", + " species island culmen_length_mm \\\n", + "207 Chinstrap penguin (Pygoscelis antarctica) Dream 42.5 \n", + "232 Adelie Penguin (Pygoscelis adeliae) Torgersen 42.0 \n", + "241 Gentoo penguin (Pygoscelis papua) Biscoe 49.1 \n", + "258 Gentoo penguin (Pygoscelis papua) Biscoe 46.8 \n", + "59 Gentoo penguin (Pygoscelis papua) Biscoe 49.9 \n", "\n", " culmen_depth_mm flipper_length_mm body_mass_g sex \n", - "12 13.7 208 3950 FEMALE \n", - "24 15.4 220 5050 MALE \n", - "62 20.0 190 3950 MALE \n", - "123 17.3 187 3350 FEMALE \n", - "27 19.7 196 4400 MALE " + "207 16.7 187 3350 FEMALE \n", + "232 20.2 190 4250 \n", + "241 15.0 228 5500 MALE \n", + "258 14.3 215 4850 FEMALE \n", + "59 16.1 213 5400 MALE " ] }, - "execution_count": 18, + "execution_count": 15, "metadata": {}, "output_type": "execute_result" } @@ -1147,7 +1020,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 16, "metadata": { "id": "6i6HkFJZa8na" }, @@ -1155,7 +1028,7 @@ { "data": { "text/html": [ - "Query job b396baed-6242-4478-9092-f5e86811b045 is DONE. 31.7 kB processed. Open Job" + "Query job 8966ef61-d2fc-40fb-92a4-419fb0613818 is DONE. 31.7 kB processed. Open Job" ], "text/plain": [ "" @@ -1171,16 +1044,16 @@ "279 3150\n", "34 3400\n", "96 3600\n", - "18 3800\n", "208 3950\n", - "310 3175\n", + "18 3800\n", "64 2850\n", + "310 3175\n", "118 3550\n", "2 3075\n", "Name: body_mass_g, dtype: Int64" ] }, - "execution_count": 19, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -1200,7 +1073,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 17, "metadata": { "id": "YKwCW7Nsavap" }, @@ -1209,7 +1082,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "average_body_mass: 4201.754385964913\n" + "average_body_mass: 4201.754385964911\n" ] } ], @@ -1229,7 +1102,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 18, "metadata": { "id": "4PyKMR61-Mjy" }, @@ -1237,7 +1110,7 @@ { "data": { "text/html": [ - "Query job fef05ee2-9690-41a4-bd35-7cded77310f2 is DONE. 15.6 kB processed. Open Job" + "Query job ce7de21b-413c-4506-b700-f4bc6bacf100 is DONE. 15.6 kB processed. Open Job" ], "text/plain": [ "" @@ -1302,7 +1175,7 @@ "[3 rows x 1 columns]" ] }, - "execution_count": 21, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -1333,7 +1206,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 19, "metadata": {}, "outputs": [], "source": [ @@ -1362,7 +1235,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 20, "metadata": { "id": "rSWTOG-vb2Fc" }, @@ -1370,7 +1243,19 @@ { "data": { "text/html": [ - "Query job c7b6c009-d2c4-4739-a6f8-5ef51e6b1851 is DONE. 0 Bytes processed. Open Job" + "Query job 6a3cedc2-01c1-41fd-b30f-b48e699c6ffb is DONE. 0 Bytes processed. Open Job" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "Query job 48e16eba-16f0-48f7-9371-ff89c8e2f321 is DONE. 0 Bytes processed. Open Job" ], "text/plain": [ "" @@ -1401,7 +1286,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 22, "metadata": { "id": "6ejPXoyEQpWE" }, @@ -1410,8 +1295,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Cloud Function Name projects/bigframes-dev/locations/us-central1/functions/bigframes-sessiondf1983-1d02aa9bc80939ba72e7ff69e37e27c8\n", - "Remote Function Name bigframes-dev._f36a8f778c434a1ec421979eaa3bf562a8561e38.bigframes_sessiondf1983_1d02aa9bc80939ba72e7ff69e37e27c8\n" + "Cloud Function Name projects/bigframes-dev/locations/us-central1/functions/bigframes-session4dee1c-aa59468b9d6c757c1256e46c9f71ebe3\n", + "Remote Function Name bigframes-dev._63cfa399614a54153cc386c27d6c0c6fdb249f9e.bigframes_session4dee1c_aa59468b9d6c757c1256e46c9f71ebe3\n" ] } ], @@ -1433,7 +1318,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 23, "metadata": { "id": "NxSd9WZFcIji" }, @@ -1485,19 +1370,14 @@ " at_or_above_3500\n", " \n", " \n", - " 18\n", - " 3800\n", - " at_or_above_3500\n", - " \n", - " \n", " 208\n", " 3950\n", " at_or_above_3500\n", " \n", " \n", - " 310\n", - " 3175\n", - " below_3500\n", + " 18\n", + " 3800\n", + " at_or_above_3500\n", " \n", " \n", " 64\n", @@ -1505,6 +1385,11 @@ " below_3500\n", " \n", " \n", + " 310\n", + " 3175\n", + " below_3500\n", + " \n", + " \n", " 118\n", " 3550\n", " at_or_above_3500\n", @@ -1524,15 +1409,15 @@ "279 3150 below_3500\n", "34 3400 below_3500\n", "96 3600 at_or_above_3500\n", - "18 3800 at_or_above_3500\n", "208 3950 at_or_above_3500\n", - "310 3175 below_3500\n", + "18 3800 at_or_above_3500\n", "64 2850 below_3500\n", + "310 3175 below_3500\n", "118 3550 at_or_above_3500\n", "2 3075 below_3500" ] }, - "execution_count": 25, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } @@ -1571,7 +1456,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 24, "metadata": {}, "outputs": [], "source": [ @@ -1658,7 +1543,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.15" + "version": "3.10.16" } }, "nbformat": 4, diff --git a/setup.py b/setup.py index 2aef514749..abc760b691 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ "gcsfs >=2023.3.0, !=2025.5.0", "geopandas >=0.12.2", "google-auth >=2.15.0,<3.0", - "google-cloud-bigquery[bqstorage,pandas] >=3.31.0", + "google-cloud-bigquery[bqstorage,pandas] >=3.36.0", # 2.30 needed for arrow support. "google-cloud-bigquery-storage >= 2.30.0, < 3.0.0", "google-cloud-functions >=1.12.0", diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index 8df3a3a2c3..eceec07dc4 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -6,7 +6,7 @@ geopandas==0.12.2 google-auth==2.15.0 google-cloud-bigtable==2.24.0 google-cloud-pubsub==2.21.4 -google-cloud-bigquery==3.31.0 +google-cloud-bigquery==3.36.0 google-cloud-functions==1.12.0 google-cloud-bigquery-connection==1.12.0 google-cloud-iam==2.12.1