From 7f6481cd69238f9609b363fdad21466e9cc8d686 Mon Sep 17 00:00:00 2001 From: Trevor Bergeron Date: Wed, 17 Dec 2025 18:49:44 +0000 Subject: [PATCH 1/2] feat: Use write api for automatic data uploads --- bigframes/session/bq_caching_executor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigframes/session/bq_caching_executor.py b/bigframes/session/bq_caching_executor.py index 736dbf7be1..dea6271bd9 100644 --- a/bigframes/session/bq_caching_executor.py +++ b/bigframes/session/bq_caching_executor.py @@ -594,7 +594,7 @@ def _upload_local_data(self, local_table: local_data.ManagedArrowTable): # Might be better as a queue and a worker thread with self._upload_lock: if local_table not in self.cache._uploaded_local_data: - uploaded = self.loader.load_data( + uploaded = self.loader.write_data( local_table, bigframes.core.guid.generate_guid() ) self.cache.cache_remote_replacement(local_table, uploaded) From 40323d861dbfb5b6787a0abc14cdd9101505eb1f Mon Sep 17 00:00:00 2001 From: Trevor Bergeron Date: Fri, 19 Dec 2025 20:38:38 +0000 Subject: [PATCH 2/2] use write api as default for all uploads --- bigframes/session/__init__.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/bigframes/session/__init__.py b/bigframes/session/__init__.py index 3cb9d2bb68..c8ec877f2c 100644 --- a/bigframes/session/__init__.py +++ b/bigframes/session/__init__.py @@ -976,8 +976,7 @@ def read_pandas( quota and your data cannot be embedded in SQL due to size or data type limitations. * "bigquery_write": - [Preview] Use the BigQuery Storage Write API. This feature - is in public preview. + Use the BigQuery Storage Write API. Returns: An equivalent bigframes.pandas.(DataFrame/Series/Index) object @@ -1026,7 +1025,7 @@ def _read_pandas( mem_usage = pandas_dataframe.memory_usage(deep=True).sum() if write_engine == "default": write_engine = ( - "bigquery_load" + "bigquery_write" if mem_usage > bigframes.constants.MAX_INLINE_BYTES else "bigquery_inline" )