diff --git a/bigframes/session/__init__.py b/bigframes/session/__init__.py index 3cb9d2bb68..c8ec877f2c 100644 --- a/bigframes/session/__init__.py +++ b/bigframes/session/__init__.py @@ -976,8 +976,7 @@ def read_pandas( quota and your data cannot be embedded in SQL due to size or data type limitations. * "bigquery_write": - [Preview] Use the BigQuery Storage Write API. This feature - is in public preview. + Use the BigQuery Storage Write API. Returns: An equivalent bigframes.pandas.(DataFrame/Series/Index) object @@ -1026,7 +1025,7 @@ def _read_pandas( mem_usage = pandas_dataframe.memory_usage(deep=True).sum() if write_engine == "default": write_engine = ( - "bigquery_load" + "bigquery_write" if mem_usage > bigframes.constants.MAX_INLINE_BYTES else "bigquery_inline" ) diff --git a/bigframes/session/bq_caching_executor.py b/bigframes/session/bq_caching_executor.py index 736dbf7be1..dea6271bd9 100644 --- a/bigframes/session/bq_caching_executor.py +++ b/bigframes/session/bq_caching_executor.py @@ -594,7 +594,7 @@ def _upload_local_data(self, local_table: local_data.ManagedArrowTable): # Might be better as a queue and a worker thread with self._upload_lock: if local_table not in self.cache._uploaded_local_data: - uploaded = self.loader.load_data( + uploaded = self.loader.write_data( local_table, bigframes.core.guid.generate_guid() ) self.cache.cache_remote_replacement(local_table, uploaded)