Skip to content

Commit 535a5fc

Browse files
committed
deprecate instead of remove
1 parent 2583a41 commit 535a5fc

File tree

2 files changed

+17
-5
lines changed

2 files changed

+17
-5
lines changed

duckdb/__init__.pyi

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -415,7 +415,7 @@ class DuckDBPyRelation:
415415
def variance(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
416416
def list(self, column: str, groups: str = ..., window_spec: str = ..., projected_columns: str = ...) -> DuckDBPyRelation: ...
417417

418-
def arrow(self, batch_size: int = ...) -> pyarrow.lib.Table: ...
418+
def arrow(self, batch_size: int = ...) -> pyarrow.lib.RecordBatchReader: ...
419419
def __arrow_c_stream__(self, requested_schema: Optional[object] = None) -> object: ...
420420
def create(self, table_name: str) -> None: ...
421421
def create_view(self, view_name: str, replace: bool = ...) -> DuckDBPyRelation: ...
@@ -448,12 +448,13 @@ class DuckDBPyRelation:
448448
def pl(self, rows_per_batch: int = ..., connection: DuckDBPyConnection = ...) -> polars.DataFrame: ...
449449
def query(self, virtual_table_name: str, sql_query: str) -> DuckDBPyRelation: ...
450450
def record_batch(self, batch_size: int = ...) -> pyarrow.lib.RecordBatchReader: ...
451+
def fetch_record_batch(self, rows_per_batch: int = 1000000, *, connection: DuckDBPyConnection = ...) -> pyarrow.lib.RecordBatchReader: ...
451452
def select_types(self, types: List[Union[str, DuckDBPyType]]) -> DuckDBPyRelation: ...
452453
def select_dtypes(self, types: List[Union[str, DuckDBPyType]]) -> DuckDBPyRelation: ...
453454
def set_alias(self, alias: str) -> DuckDBPyRelation: ...
454455
def show(self, max_width: Optional[int] = None, max_rows: Optional[int] = None, max_col_width: Optional[int] = None, null_value: Optional[str] = None, render_mode: Optional[RenderMode] = None) -> None: ...
455456
def sql_query(self) -> str: ...
456-
def to_arrow_table(self, batch_size: int = ...) -> pyarrow.lib.RecordBatchReader: ...
457+
def to_arrow_table(self, batch_size: int = ...) -> pyarrow.lib.Table: ...
457458
def to_csv(
458459
self,
459460
file_name: str,

src/duckdb_py/pyrelation/initialize.cpp

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -79,9 +79,20 @@ static void InitializeConsumers(py::class_<DuckDBPyRelation> &m) {
7979
m.def("__arrow_c_stream__", &DuckDBPyRelation::ToArrowCapsule, capsule_docs,
8080
py::arg("requested_schema") = py::none());
8181
m.def("fetch_record_batch", &DuckDBPyRelation::ToRecordBatch,
82-
"Execute and return an Arrow Record Batch Reader that yields all rows", py::arg("batch_size") = 1000000)
83-
.def("fetch_arrow_reader", &DuckDBPyRelation::ToRecordBatch,
84-
"Execute and return an Arrow Record Batch Reader that yields all rows", py::arg("batch_size") = 1000000);
82+
"Execute and return an Arrow Record Batch Reader that yields all rows", py::arg("rows_per_batch") = 1000000)
83+
.def("fetch_arrow_reader", &DuckDBPyRelation::ToRecordBatch,
84+
"Execute and return an Arrow Record Batch Reader that yields all rows", py::arg("rows_per_batch") = 1000000)
85+
.def("record_batch",
86+
[](pybind11::object &self, idx_t rows_per_batch)
87+
{
88+
auto warnings = pybind11::module::import("warnings");
89+
auto builtins = pybind11::module::import("builtins");
90+
warnings.attr("warn")(
91+
"record_batch() is deprecated, use fetch_record_batch() instead.",
92+
builtins.attr("DeprecationWarning"));
93+
94+
return self.attr("fetch_record_batch")(rows_per_batch);
95+
}, py::arg("rows_per_batch") = 1000000);
8596
}
8697

8798
static void InitializeAggregates(py::class_<DuckDBPyRelation> &m) {

0 commit comments

Comments
 (0)