Skip to content

Commit ec1bc7e

Browse files
committed
formatting
1 parent f02f895 commit ec1bc7e

File tree

1 file changed

+3
-4
lines changed

1 file changed

+3
-4
lines changed

awswrangler/s3/_write_deltalake.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,6 @@ def to_deltalake(
136136
)
137137

138138

139-
140139
def _df_iter_to_record_batch_reader(
141140
df_iter: Iterable[pd.DataFrame],
142141
*,
@@ -174,14 +173,14 @@ def _df_iter_to_record_batch_reader(
174173

175174
def batches() -> Iterator[pa.RecordBatch]:
176175
first_tbl: pa.Table = _df_to_table(first_df, schema, index, dtype)
177-
for b in (first_tbl.to_batches(batch_size) if batch_size is not None else first_tbl.to_batches()):
176+
for b in first_tbl.to_batches(batch_size) if batch_size is not None else first_tbl.to_batches():
178177
yield b
179178

180179
for df in it:
181180
if df.empty:
182181
continue
183182
tbl: pa.Table = _df_to_table(df, schema, index, dtype)
184-
for b in (tbl.to_batches(batch_size) if batch_size is not None else tbl.to_batches()):
183+
for b in tbl.to_batches(batch_size) if batch_size is not None else tbl.to_batches():
185184
yield b
186185

187186
reader = pa.RecordBatchReader.from_batches(schema, batches())
@@ -251,4 +250,4 @@ def to_deltalake_streaming(
251250
max_open_files=max_open_files,
252251
max_rows_per_file=max_rows_per_file,
253252
target_file_size=target_file_size,
254-
)
253+
)

0 commit comments

Comments
 (0)