Skip to content

Commit 2583a41

Browse files
committed
Make arrow from relation return record batch reader
1 parent cb2c9b4 commit 2583a41

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

duckdb/__init__.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -453,7 +453,7 @@ class DuckDBPyRelation:
453453
def set_alias(self, alias: str) -> DuckDBPyRelation: ...
454454
def show(self, max_width: Optional[int] = None, max_rows: Optional[int] = None, max_col_width: Optional[int] = None, null_value: Optional[str] = None, render_mode: Optional[RenderMode] = None) -> None: ...
455455
def sql_query(self) -> str: ...
456-
def to_arrow_table(self, batch_size: int = ...) -> pyarrow.lib.Table: ...
456+
def to_arrow_table(self, batch_size: int = ...) -> pyarrow.lib.RecordBatchReader: ...
457457
def to_csv(
458458
self,
459459
file_name: str,

duckdb/experimental/spark/sql/dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def toArrow(self) -> "pa.Table":
7575
age: [[2,5]]
7676
name: [["Alice","Bob"]]
7777
"""
78-
return self.relation.arrow()
78+
return self.relation.to_arrow_table()
7979

8080
def createOrReplaceTempView(self, name: str) -> None:
8181
"""Creates or replaces a local temporary view with this :class:`DataFrame`.

src/duckdb_py/pyrelation/initialize.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ static void InitializeConsumers(py::class_<DuckDBPyRelation> &m) {
6161
py::arg("date_as_object") = false)
6262
.def("fetch_df_chunk", &DuckDBPyRelation::FetchDFChunk, "Execute and fetch a chunk of the rows",
6363
py::arg("vectors_per_chunk") = 1, py::kw_only(), py::arg("date_as_object") = false)
64-
.def("arrow", &DuckDBPyRelation::ToArrowTable, "Execute and fetch all rows as an Arrow Table",
64+
.def("arrow", &DuckDBPyRelation::ToRecordBatch, "Execute and return an Arrow Record Batch Reader that yields all rows",
6565
py::arg("batch_size") = 1000000)
6666
.def("fetch_arrow_table", &DuckDBPyRelation::ToArrowTable, "Execute and fetch all rows as an Arrow Table",
6767
py::arg("batch_size") = 1000000)
@@ -78,7 +78,7 @@ static void InitializeConsumers(py::class_<DuckDBPyRelation> &m) {
7878
)";
7979
m.def("__arrow_c_stream__", &DuckDBPyRelation::ToArrowCapsule, capsule_docs,
8080
py::arg("requested_schema") = py::none());
81-
m.def("record_batch", &DuckDBPyRelation::ToRecordBatch,
81+
m.def("fetch_record_batch", &DuckDBPyRelation::ToRecordBatch,
8282
"Execute and return an Arrow Record Batch Reader that yields all rows", py::arg("batch_size") = 1000000)
8383
.def("fetch_arrow_reader", &DuckDBPyRelation::ToRecordBatch,
8484
"Execute and return an Arrow Record Batch Reader that yields all rows", py::arg("batch_size") = 1000000);

0 commit comments

Comments
 (0)