Skip to content

Commit

Permalink
Merge pull request #68 from bopen/36-add-ascending-node-time
Browse files Browse the repository at this point in the history
Add ascending node time
  • Loading branch information
alexamici authored Jan 10, 2022
2 parents 366bacb + 46fc55f commit 7fc1c99
Show file tree
Hide file tree
Showing 2 changed files with 56 additions and 4 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -302,11 +302,11 @@ Attributes: ...
constellation: sentinel-1
platform: sentinel-1b
... ...
xs:instrument_mode_swaths: ['IW1', 'IW2', 'IW3']
group: /IW1/VV
subgroups: ['gcp', 'orbit', 'attitude', 'dc_estimate', '...
Conventions: CF-1.8
history: created by xarray_sentinel-...
azimuth_anx_time: 2210.634453
burst_index: 8
```
Expand Down
58 changes: 55 additions & 3 deletions xarray_sentinel/sentinel1.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,50 @@ def open_pol_dataset(
return xr.Dataset(attrs=attrs, data_vars={"measurement": arr})


def crop_burst_dataset(pol_dataset: xr.Dataset, burst_index: int) -> xr.Dataset:
def find_bursts_index(
pol_dataset: xr.Dataset,
azimuth_anx_time: float,
use_center: bool = False,
) -> int:
lines_per_burst = pol_dataset.attrs["lines_per_burst"]
sat_anx_datetime = pol_dataset.attrs["sat:anx_datetime"]
sat_anx_datetime = np.datetime64(sat_anx_datetime)
azimuth_anx_time = pd.Timedelta(azimuth_anx_time, unit="s")
if use_center:
azimuth_anx_time_center = (
pol_dataset.azimuth_time[lines_per_burst // 2 :: lines_per_burst]
- sat_anx_datetime
)
distance = abs(azimuth_anx_time_center - azimuth_anx_time)
else:
azimuth_anx_time_first_line = (
pol_dataset.azimuth_time[::lines_per_burst] - sat_anx_datetime
)
distance = abs(azimuth_anx_time_first_line - azimuth_anx_time)
return distance.argmin().item() # type: ignore


def crop_burst_dataset(
pol_dataset: xr.Dataset,
burst_index: T.Optional[int] = None,
azimuth_anx_time: T.Optional[float] = None,
use_center: bool = False,
) -> xr.Dataset:
if (burst_index is not None) and (azimuth_anx_time is not None):
raise ValueError(
"only one keyword between 'index' and 'azimuth_anx_time' must be defined"
)

if burst_index is None:
if azimuth_anx_time is not None:
burst_index = find_bursts_index(
pol_dataset, azimuth_anx_time, use_center=use_center
)
else:
raise ValueError(
"one keyword between 'index' and 'azimuth_anx_time' must be defined"
)

if burst_index < 0 or burst_index >= pol_dataset.attrs["number_of_bursts"]:
raise IndexError(f"{burst_index=} out of bounds")

Expand All @@ -382,6 +425,13 @@ def crop_burst_dataset(pol_dataset: xr.Dataset, burst_index: int) -> xr.Dataset:
lines_per_burst * burst_index, lines_per_burst * (burst_index + 1) - 1
)
)

sat_anx_datetime = pol_dataset.attrs["sat:anx_datetime"]
sat_anx_datetime = np.datetime64(sat_anx_datetime)
burst_azimuth_anx_times = ds.azimuth_time - sat_anx_datetime
ds.attrs["azimuth_anx_time"] = (
burst_azimuth_anx_times / np.timedelta64(1, "s")
).item(0)
ds = ds.swap_dims({"line": "azimuth_time", "pixel": "slant_range_time"})
ds.attrs["burst_index"] = burst_index

Expand Down Expand Up @@ -473,8 +523,6 @@ def open_dataset(
ancillary_data_paths[subswath][pol]["s1Level1ProductSchema"],
chunks=chunks,
)
if burst_index is not None:
ds = crop_burst_dataset(ds, burst_index=burst_index)
else:
subswath, pol, metadata = group.split("/", 2)
with fs.open(groups[group]) as file:
Expand All @@ -484,6 +532,10 @@ def open_dataset(
if len(subgroups):
product_attrs["subgroups"] = subgroups
ds.attrs.update(product_attrs) # type: ignore

if group.count("/") == 1 and burst_index is not None:
ds = crop_burst_dataset(ds, burst_index=burst_index)

conventions.update_attributes(ds, group=metadata)

return ds
Expand Down

0 comments on commit 7fc1c99

Please sign in to comment.