Skip to content

Commit

Permalink
Parquet metadata persistence of DataFrame.attrs (#54346)
Browse files Browse the repository at this point in the history
* added df.attrs metadata to pyarrow table for persistence

* hooks

* placed unit test in correct class

* update unit test

* changed to consistent use of json

* added whatsnew

* added gaurd to check if df.attrs exists

* updated whatsnew
  • Loading branch information
SanjithChockan authored Aug 3, 2023
1 parent eaddc1d commit 152595c
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 1 deletion.
2 changes: 1 addition & 1 deletion doc/source/whatsnew/v2.1.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -212,8 +212,8 @@ Other enhancements
- Improved error message when :meth:`DataFrameGroupBy.agg` failed (:issue:`52930`)
- Many read/to_* functions, such as :meth:`DataFrame.to_pickle` and :func:`read_csv`, support forwarding compression arguments to lzma.LZMAFile (:issue:`52979`)
- Reductions :meth:`Series.argmax`, :meth:`Series.argmin`, :meth:`Series.idxmax`, :meth:`Series.idxmin`, :meth:`Index.argmax`, :meth:`Index.argmin`, :meth:`DataFrame.idxmax`, :meth:`DataFrame.idxmin` are now supported for object-dtype objects (:issue:`4279`, :issue:`18021`, :issue:`40685`, :issue:`43697`)
- :meth:`DataFrame.to_parquet` and :func:`read_parquet` will now write and read ``attrs`` respectively (:issue:`54346`)
- Performance improvement in :meth:`GroupBy.quantile` (:issue:`51722`)
-

.. ---------------------------------------------------------------------------
.. _whatsnew_210.notable_bug_fixes:
Expand Down
12 changes: 12 additions & 0 deletions pandas/io/parquet.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from __future__ import annotations

import io
import json
import os
from typing import (
TYPE_CHECKING,
Expand Down Expand Up @@ -184,6 +185,12 @@ def write(

table = self.api.Table.from_pandas(df, **from_pandas_kwargs)

if df.attrs:
df_metadata = {"PANDAS_ATTRS": json.dumps(df.attrs)}
existing_metadata = table.schema.metadata
merged_metadata = {**existing_metadata, **df_metadata}
table = table.replace_schema_metadata(merged_metadata)

path_or_handle, handles, filesystem = _get_path_or_handle(
path,
filesystem,
Expand Down Expand Up @@ -268,6 +275,11 @@ def read(

if manager == "array":
result = result._as_manager("array", copy=False)

if pa_table.schema.metadata:
if b"PANDAS_ATTRS" in pa_table.schema.metadata:
df_metadata = pa_table.schema.metadata[b"PANDAS_ATTRS"]
result.attrs = json.loads(df_metadata)
return result
finally:
if handles is not None:
Expand Down
8 changes: 8 additions & 0 deletions pandas/tests/io/test_parquet.py
Original file line number Diff line number Diff line change
Expand Up @@ -1098,6 +1098,14 @@ def test_empty_columns(self, pa):
df = pd.DataFrame(index=pd.Index(["a", "b", "c"], name="custom name"))
check_round_trip(df, pa)

def test_df_attrs_persistence(self, tmp_path, pa):
path = tmp_path / "test_df_metadata.p"
df = pd.DataFrame(data={1: [1]})
df.attrs = {"test_attribute": 1}
df.to_parquet(path, engine=pa)
new_df = read_parquet(path, engine=pa)
assert new_df.attrs == df.attrs


class TestParquetFastParquet(Base):
def test_basic(self, fp, df_full):
Expand Down

0 comments on commit 152595c

Please sign in to comment.