Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

atl06_to_atl11.py 11 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
  1. # ---
  2. # jupyter:
  3. # jupytext:
  4. # cell_metadata_filter: title,-all
  5. # formats: ipynb,py:hydrogen
  6. # text_representation:
  7. # extension: .py
  8. # format_name: hydrogen
  9. # format_version: '1.3'
  10. # jupytext_version: 1.11.4
  11. # kernelspec:
  12. # display_name: deepicedrain
  13. # language: python
  14. # name: deepicedrain
  15. # ---
  16. # %% [markdown]
  17. # # **ATL06 to ATL11**
  18. #
  19. # Converting the ICESat-2 ATL06 (Land Ice Height) product to ATL11 (Land Ice Height Changes).
  20. # Also convert the ATL11 file format from HDF5 to [Zarr](https://zarr.readthedocs.io/).
  21. # %%
  22. import os
  23. import glob
  24. import shutil
  25. import sys
  26. import subprocess
  27. import dask
  28. import dask.distributed
  29. import h5py
  30. import intake
  31. import itertools
  32. import matplotlib.pyplot as plt
  33. import numpy as np
  34. import pandas as pd
  35. import pyproj
  36. import tqdm
  37. import xarray as xr
  38. import zarr
  39. import deepicedrain
  40. os.environ["HDF5_USE_FILE_LOCKING"] = "FALSE"
  41. # %%
  42. client = dask.distributed.Client(n_workers=8, threads_per_worker=1)
  43. client
  44. # %% [markdown]
  45. # ## Download ATL11 from [NSIDC](https://doi.org/10.5067/ATLAS/ATL11.003) up to cycle 9
  46. # %%
  47. # Note, need to downgrade using `pip install fsspec==0.7.4 intake-xarray==0.3.2`
  48. # Get list of official ATL11 files to download
  49. catalog = intake.open_catalog("deepicedrain/atlas_catalog.yaml")
  50. with open(file="ATL11_to_download.txt", mode="r") as f:
  51. urlpaths = f.readlines()
  52. dates: set = {url.split("/")[-2] for url in urlpaths}
  53. len(dates)
  54. # %%
  55. # Submit download jobs to Client
  56. futures = []
  57. for date in dates:
  58. # date = "2019.11.01" # sorted(dates)[-1]
  59. source = catalog.icesat2atl11(date=date)
  60. future = client.submit(
  61. func=source.discover, key=f"download-{date}"
  62. ) # triggers download of the file(s), or loads from cache
  63. futures.append(future)
  64. # break
  65. # source.urlpath
  66. # %%
  67. # Check download progress here, https://stackoverflow.com/a/37901797/6611055
  68. responses = [
  69. f.result()
  70. for f in tqdm.tqdm(
  71. iterable=dask.distributed.as_completed(futures=futures), total=len(futures)
  72. )
  73. ]
  74. # %%
  75. # %% [markdown]
  76. # ## Process ATL06 to ATL11 for cycle 9 or newer
  77. # %%
  78. # Create ATL06_to_ATL11 processing script, if not already present
  79. if not os.path.exists("ATL06_to_ATL11_Antarctica.sh"):
  80. # Prepare string to write into ATL06_to_ATL11_Antarctica.sh bash script
  81. writelines = []
  82. # find last cycle for each reference ground track and each orbital segment
  83. iterable = itertools.product(range(1387, 0, -1), [10, 11, 12])
  84. for referencegroundtrack, orbitalsegment in tqdm.tqdm(
  85. iterable=iterable, total=1387 * 3
  86. ):
  87. rgt, ost = referencegroundtrack, orbitalsegment
  88. last_cycle_file: str = max(
  89. glob.glob(f"ATL06.00X/{rgt:04d}/ATL06*_*_{rgt:04d}??{ost:02d}_*.h5")
  90. )
  91. last_cycle: int = int(last_cycle_file[-14:-12])
  92. if last_cycle > 8: # Only process those with Cycle 9 and newer locally
  93. writelines.append(
  94. f"ATL06_to_ATL11.py"
  95. f" {referencegroundtrack:04d} {orbitalsegment}"
  96. f" --cycles 03 {last_cycle:02d}"
  97. f" --Release 3"
  98. f" --directory 'ATL06.00X/{referencegroundtrack:04d}/'"
  99. f" --out_dir ATL11.003\n"
  100. )
  101. fname = f"ATL11_{referencegroundtrack:04d}{orbitalsegment}_0308_003_01.h5"
  102. if not os.path.exists(f"ATL11.003/official/{fname}"):
  103. try:
  104. shutil.move(src=f"ATL11.003/{fname}", dst="ATL11.003/official")
  105. except FileNotFoundError:
  106. pass
  107. # else: # Just use official NSIDC version for Cycle 8 or older
  108. # pass
  109. writelines.sort() # sort writelines in place
  110. # Finally create the bash script
  111. with open(file="ATL06_to_ATL11_Antarctica.sh", mode="w") as f:
  112. f.writelines(writelines)
  113. # %% [markdown]
  114. # Now use [GNU parallel](https://www.gnu.org/software/parallel/parallel_tutorial.html) to run the script in parallel.
  115. # Will take about 1 week to run on 64 cores.
  116. #
  117. # Reference:
  118. #
  119. # - O. Tange (2018): GNU Parallel 2018, Mar 2018, ISBN 9781387509881, DOI https://doi.org/10.5281/zenodo.1146014
  120. # %%
  121. # !head -n 2080 ATL06_to_ATL11_Antarctica.sh > ATL06_to_ATL11_Antarctica_1.sh
  122. # !tail -n +2081 ATL06_to_ATL11_Antarctica.sh > ATL06_to_ATL11_Antarctica_2.sh
  123. # %%
  124. # !PYTHONPATH=`pwd` PYTHONWARNINGS="ignore" parallel -a ATL06_to_ATL11_Antarctica_1.sh --bar --resume-failed --results logdir --joblog log1 --jobs 60 --load 90% > /dev/null
  125. # %%
  126. # df_log = pd.read_csv(filepath_or_buffer="log", sep="\t")
  127. # df_log.query(expr="Exitval > 0")
  128. # %% [markdown]
  129. # ## Convert from HDF5 to Zarr format
  130. #
  131. # For faster data access speeds!
  132. # We'll collect the data for each Reference Ground Track,
  133. # and store it inside a Zarr format,
  134. # specifically one that can be used by xarray.
  135. # See also https://xarray.pydata.org/en/v0.18.2/user-guide/io.html#zarr.
  136. #
  137. # Grouping hierarchy:
  138. # - Reference Ground Track (1-1387)
  139. # - Orbital Segments (10, 11, 12)
  140. # - Laser Pairs (pt1, pt2, pt3)
  141. # - Attributes (longitude, latitude, h_corr, delta_time, etc)
  142. # %%
  143. max_cycles: int = max(int(f[-12:-10]) for f in glob.glob("ATL11.003/*.h5"))
  144. print(f"{max_cycles} ICESat-2 cycles available")
  145. # %%
  146. @dask.delayed
  147. def open_ATL11(atl11file: str, group: str) -> xr.Dataset:
  148. """
  149. Opens up an ATL11 file using xarray and does some light pre-processing:
  150. - Mask values using _FillValue ??
  151. - Convert attribute format from binary to str
  152. """
  153. ds: xr.Dataset = xr.open_dataset(
  154. filename_or_obj=atl11file, group=group, engine="h5netcdf", mask_and_scale=True
  155. )
  156. # Change xarray.Dataset attributes from binary to str type
  157. # fixes issue when saving to Zarr format later
  158. # TypeError: Object of type bytes is not JSON serializable
  159. for key, variable in ds.variables.items():
  160. assert isinstance(ds[key].DIMENSION_LABELS, np.ndarray)
  161. ds[key].attrs["DIMENSION_LABELS"] = (
  162. ds[key].attrs["DIMENSION_LABELS"].astype(str)
  163. )
  164. try:
  165. ds.attrs["ATL06_xover_field_list"] = ds.attrs["ATL06_xover_field_list"].astype(
  166. str
  167. )
  168. except KeyError:
  169. pass
  170. return ds
  171. # %% [markdown]
  172. # ### Light pre-processing
  173. #
  174. # - Reproject longitude/latitude to EPSG:3031 x/y
  175. # - Mask out low quality height data
  176. # %%
  177. @dask.delayed
  178. def set_xy_and_mask(ds):
  179. # Calculate the EPSG:3031 x/y projection coordinates
  180. ds["x"], ds["y"] = deepicedrain.lonlat_to_xy(
  181. longitude=ds.longitude, latitude=ds.latitude
  182. )
  183. # Set x, y, x_atc and y_atc as coords of the xarray.Dataset instead of lon/lat
  184. ds: xr.Dataset = ds.set_coords(names=["x", "y", "x_atc", "y_atc"])
  185. ds: xr.Dataset = ds.reset_coords(names=["longitude", "latitude"])
  186. # Mask out low quality height data
  187. ds["h_corr"]: xr.DataArray = ds.h_corr.where(cond=ds.fit_quality == 0)
  188. return ds
  189. # %%
  190. # Consolidate together Antarctic orbital segments 10, 11, 12 into one file
  191. # Also consolidate all three laser pairs pt1, pt2, pt3 into one file
  192. atl11_dict = {}
  193. for rgt in tqdm.trange(1387):
  194. atl11files: list = glob.glob(f"ATL11.003/ATL11_{rgt+1:04d}1?_????_00?_0?.h5")
  195. try:
  196. assert len(atl11files) == 3 # Should be 3 files for Orbital Segments 10,11,12
  197. except AssertionError:
  198. # Manually handle exceptional cases
  199. if len(atl11files) != 2: # or rgt + 1 not in [1036]:
  200. raise ValueError(
  201. f"{rgt+1} only has {len(atl11files)} ATL11 files instead of 3"
  202. )
  203. if atl11files:
  204. pattern: dict = intake.source.utils.reverse_format(
  205. format_string="ATL11.003/ATL11_{referencegroundtrack:4}{orbitalsegment:2}_{cycles:4}_{version:3}_{revision:2}.h5",
  206. resolved_string=sorted(atl11files)[1], # get the '11' one, not '10' or '12'
  207. )
  208. zarrfilepath: str = "ATL11.003z123/ATL11_{referencegroundtrack}1x_{cycles}_{version}_{revision}.zarr".format(
  209. **pattern
  210. )
  211. atl11_dict[zarrfilepath] = atl11files
  212. # %%
  213. # Get proper data encoding from a sample ATL11 file
  214. atl11file: str = atl11files[0]
  215. root_ds = open_ATL11(atl11file=atl11file, group="pt2").compute()
  216. reference_surface_ds = open_ATL11(atl11file=atl11file, group="pt2/ref_surf").compute()
  217. ds: xr.Dataset = xr.combine_by_coords(data_objects=[root_ds, reference_surface_ds])
  218. # Convert variables to correct datatype
  219. encoding: dict = {}
  220. df: pd.DataFrame = pd.read_csv(
  221. "https://raw.githubusercontent.com/suzanne64/ATL11/master/ATL11/package_data/ATL11_output_attrs.csv"
  222. )[["field", "datatype"]]
  223. df = df.set_index("field")
  224. for var in ds.variables:
  225. desired_dtype = str(df.datatype[var]).lower()
  226. if ds[var].dtype.name != desired_dtype:
  227. try:
  228. desired_dtype = desired_dtype.split(var)[1].strip()
  229. except IndexError:
  230. pass
  231. encoding[var] = {"dtype": desired_dtype}
  232. # %%
  233. # Gather up all the dask.delayed conversion tasks to store data into Zarr!
  234. stores = []
  235. for zarrfilepath, atl11files in tqdm.tqdm(iterable=atl11_dict.items()):
  236. zarr.open(store=zarrfilepath, mode="w") # Make a new file/overwrite existing
  237. datasets = []
  238. for atl11file in atl11files: # Orbital Segments: 10, 11, 12
  239. for pair in ("pt1", "pt2", "pt3"): # Laser pairs: pt1, pt2, pt3
  240. # Attributes: longitude, latitude, h_corr, delta_time, etc
  241. root_ds = open_ATL11(atl11file=atl11file, group=pair)
  242. reference_surface_ds = open_ATL11(
  243. atl11file=atl11file, group=f"{pair}/ref_surf"
  244. )
  245. ds = dask.delayed(obj=xr.combine_by_coords)(
  246. data_objects=[root_ds, reference_surface_ds]
  247. )
  248. # Light pre-processing
  249. ds = set_xy_and_mask(ds=ds)
  250. _rgt_array = dask.delayed(obj=np.full)(
  251. shape=ds.ref_pt.shape,
  252. fill_value=atl11file.split("_")[1][:4],
  253. dtype=np.int8,
  254. )
  255. ds = dask.delayed(obj=ds.assign_coords)(
  256. referencegroundtrack=("ref_pt", _rgt_array)
  257. )
  258. datasets.append(ds)
  259. dataset = dask.delayed(obj=xr.concat)(objs=datasets, dim="ref_pt")
  260. store_task = dataset.to_zarr(
  261. store=zarrfilepath, mode="w", encoding=encoding, consolidated=True
  262. )
  263. stores.append(store_task)
  264. # %%
  265. # Do all the HDF5 to Zarr conversion! Should take about 1 hour to run
  266. # Check conversion progress here, https://stackoverflow.com/a/37901797/6611055
  267. futures = [client.compute(store_task) for store_task in stores]
  268. for _ in tqdm.tqdm(
  269. iterable=dask.distributed.as_completed(futures=futures), total=len(stores)
  270. ):
  271. pass
  272. # %%
  273. ds = xr.open_dataset(zarrfilepath, engine="zarr", backend_kwargs={"consolidated": True})
  274. ds.h_corr.__array__().shape
  275. # %% [raw]
  276. # # Note, this raw conversion below takes about 11 hours
  277. # # because HDF5 files work on a single thread...
  278. # for atl11file in tqdm.tqdm(iterable=sorted(glob.glob("ATL11.003/*.h5"))):
  279. # name = os.path.basename(p=os.path.splitext(p=atl11file)[0])
  280. # zarr.convenience.copy_all(
  281. # source=h5py.File(name=atl11file, mode="r"),
  282. # dest=zarr.open_group(store=f"ATL11.003z/{name}.zarr", mode="w"),
  283. # if_exists="skip",
  284. # without_attrs=True,
  285. # )
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...