Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

atl11_play.py 11 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
  1. # ---
  2. # jupyter:
  3. # jupytext:
  4. # formats: ipynb,py:hydrogen
  5. # text_representation:
  6. # extension: .py
  7. # format_name: hydrogen
  8. # format_version: '1.3'
  9. # jupytext_version: 1.9.1
  10. # kernelspec:
  11. # display_name: deepicedrain
  12. # language: python
  13. # name: deepicedrain
  14. # ---
  15. # %% [markdown]
  16. # # **ATLAS/ICESat-2 Land Ice Height Changes ATL11 Exploratory Data Analysis**
  17. #
  18. # Adapted from https://github.com/suzanne64/ATL11/blob/master/intro_to_ATL11.ipynb
  19. # %%
  20. import glob
  21. import dask
  22. import dask.array
  23. import datashader
  24. import geopandas as gpd
  25. import holoviews as hv
  26. import hvplot.dask
  27. import hvplot.pandas
  28. import hvplot.xarray
  29. import intake
  30. import matplotlib.cm
  31. import numpy as np
  32. import pandas as pd
  33. import pygmt
  34. import shapely
  35. import xarray as xr
  36. import deepicedrain
  37. # %%
  38. client = dask.distributed.Client(n_workers=32, threads_per_worker=1)
  39. client
  40. # %% [markdown]
  41. # # Load Data from Zarr
  42. #
  43. # Let's start by getting our data and running some preprocessing steps:
  44. # - Load 1387 (reference ground tracks) ATL11/*.zarr files
  45. # - Convert coordinates from longitude/latitude to x/y
  46. # - Convert GPS delta_time to UTC time
  47. # - Mask out low quality height (h_corr) data
  48. # %%
  49. # Xarray open_dataset preprocessor to add fields based on input filename.
  50. # Adapted from the intake.open_netcdf._add_path_to_ds function.
  51. add_path_to_ds = lambda ds: ds.assign_coords(
  52. coords=intake.source.utils.reverse_format(
  53. format_string="ATL11.002z123/ATL11_{referencegroundtrack:04d}1x_{}_{}_{}.zarr",
  54. resolved_string=ds.encoding["source"],
  55. )
  56. )
  57. # Load dataset from all Zarr stores
  58. # Aligning chunks spatially along cycle_number (i.e. time)
  59. ds: xr.Dataset = xr.open_mfdataset(
  60. paths="ATL11.002z123/ATL11_*_002_01.zarr",
  61. chunks="auto",
  62. engine="zarr",
  63. combine="nested",
  64. concat_dim="ref_pt",
  65. parallel="True",
  66. preprocess=add_path_to_ds,
  67. backend_kwargs={"consolidated": True},
  68. )
  69. # ds = ds.unify_chunks().compute()
  70. # TODO use intake, wait for https://github.com/intake/intake-xarray/issues/70
  71. # source = intake.open_ndzarr(url="ATL11.002z123/ATL11_0*.zarr")
  72. # %% [markdown]
  73. # ## Convert geographic lon/lat to x/y
  74. #
  75. # To center our plot on the South Pole,
  76. # we'll reproject the original longitude/latitude coordinates
  77. # to the Antarctic Polar Stereographic (EPSG:3031) projection.
  78. # %%
  79. ds["x"], ds["y"] = deepicedrain.lonlat_to_xy(
  80. longitude=ds.longitude, latitude=ds.latitude
  81. )
  82. # %%
  83. # Also set x/y as coordinates in xarray.Dataset instead of longitude/latitude
  84. ds: xr.Dataset = ds.set_coords(names=["x", "y"])
  85. ds: xr.Dataset = ds.reset_coords(names=["longitude", "latitude"])
  86. # %% [markdown]
  87. # ## Convert delta_time to utc_time
  88. #
  89. # To get more human-readable datetimes,
  90. # we'll convert the delta_time attribute from the original GPS time format
  91. # (nanoseconds since the beginning of ICESat-2 starting epoch)
  92. # to Coordinated Universal Time (UTC).
  93. # The reference date for the ICESat-2 Epoch is 2018 January 1st according to
  94. # https://github.com/SmithB/pointCollection/blob/master/is2_calendar.py#L11-L15
  95. #
  96. # TODO: Account for [leap seconds](https://en.wikipedia.org/wiki/Leap_second)
  97. # in the future.
  98. # %%
  99. ds["utc_time"] = ds.delta_time.rename(new_name_or_name_dict="utc_time")
  100. # ds["utc_time"] = deepicedrain.deltatime_to_utctime(dataarray=ds.delta_time)
  101. # %% [markdown]
  102. # ## Mask out low quality height data
  103. #
  104. # Good quality data has value 0, not so good is > 0.
  105. # Look at the 'fit_quality' attribute in `ds`
  106. # for more information on what this quality flag means.
  107. #
  108. # We'll mask out values other than 0 with NaN using xarray's
  109. # [where](http://xarray.pydata.org/en/v0.15.1/indexing.html#masking-with-where).
  110. # %%
  111. ds["h_corr"] = ds.h_corr.where(cond=ds.fit_quality == 0)
  112. # %%
  113. # %% [markdown]
  114. # ## Subset to geographic region of interest (optional)
  115. #
  116. # Take a geographical subset and save to a NetCDF/Zarr format for distribution.
  117. # %%
  118. # Antarctic bounding box locations with EPSG:3031 coordinates
  119. regions = gpd.read_file(filename="deepicedrain/deepicedrain_regions.geojson")
  120. regions: gpd.GeoDataFrame = regions.set_index(keys="placename")
  121. # Subset to essential columns
  122. essential_columns: list = [
  123. "x",
  124. "y",
  125. "utc_time",
  126. "h_corr",
  127. "longitude",
  128. "latitude",
  129. "delta_time",
  130. "cycle_number",
  131. ]
  132. # %%
  133. # Do the actual computation to find data points within region of interest
  134. placename: str = "kamb" # Select Kamb Ice Stream region
  135. region: deepicedrain.Region = deepicedrain.Region.from_gdf(gdf=regions.loc[placename])
  136. ds_subset: xr.Dataset = region.subset(data=ds)
  137. ds_subset = ds_subset.unify_chunks()
  138. ds_subset = ds_subset.compute()
  139. # %%
  140. # Save to Zarr/NetCDF formats for distribution
  141. ds_subset.to_zarr(
  142. store=f"ATLXI/ds_subset_{placename}.zarr", mode="w", consolidated=True
  143. )
  144. ds_subset.to_netcdf(path=f"ATLXI/ds_subset_{placename}.nc", engine="h5netcdf")
  145. # %%
  146. # Look at Cycle Number 7 only for plotting
  147. points_subset = hv.Points(
  148. data=ds_subset.sel(cycle_number=7)[[*essential_columns]],
  149. label="Cycle_7",
  150. kdims=["x", "y"],
  151. vdims=["utc_time", "h_corr", "cycle_number", "referencegroundtrack"],
  152. datatype=["xarray"],
  153. )
  154. df_subset = points_subset.dframe()
  155. # %%
  156. # Plot our subset of points on an interactive map
  157. df_subset.hvplot.points(
  158. title=f"Elevation (metres) at Cycle 7",
  159. x="x",
  160. y="y",
  161. c="referencegroundtrack",
  162. cmap="Set3",
  163. # rasterize=True,
  164. hover=True,
  165. datashade=True,
  166. dynspread=True,
  167. )
  168. # %% [markdown]
  169. # # Pivot into a pandas/dask dataframe
  170. #
  171. # To make data analysis and plotting easier,
  172. # let's flatten our n-dimensional `xarray.Dataset`
  173. # to a 2-dimensiontal `pandas.DataFrame` table format.
  174. #
  175. # There are currently 8 cycles (as of July 2020),
  176. # and by selecting just one cycle at a time,
  177. # we can see what the height (`h_corr`)
  178. # of the ice is like at that time.
  179. # %% [markdown]
  180. # ## Looking at ICESat-2 Cycle 7
  181. # %%
  182. cycle_number: int = 7
  183. dss = ds.sel(cycle_number=cycle_number)[[*essential_columns]]
  184. print(dss)
  185. # %%
  186. points = hv.Points(
  187. data=dss,
  188. label=f"Cycle_{cycle_number}",
  189. kdims=["x", "y"],
  190. vdims=["utc_time", "h_corr", "cycle_number"],
  191. datatype=["xarray"],
  192. )
  193. # %%
  194. df = points.dframe() # convert to pandas.DataFrame, slow
  195. df = df.dropna() # drop empty rows
  196. print(len(df))
  197. df.head()
  198. # %% [markdown]
  199. # ### Plot a sample of the points over Antarctica
  200. #
  201. # Let's take a look at an interactive map
  202. # of the ICESat-2 ATL11 height for Cycle 6!
  203. # We'll plot a random sample (n=5 million)
  204. # of the points instead of the whole dataset,
  205. # it should give a good enough picture.
  206. # %%
  207. df.sample(n=5_000_000).hvplot.points(
  208. title=f"Elevation (metres) at Cycle {cycle_number}",
  209. x="x",
  210. y="y",
  211. c="h_corr",
  212. cmap="Blues",
  213. rasterize=True,
  214. hover=True,
  215. )
  216. # %%
  217. # %% [markdown]
  218. # # Calculate Elevation Change (dh) over ICESAT-2 cycles!!
  219. #
  220. # Let's take a look at the change in elevation over a year (4 ICESat-2 cycles).
  221. # From our loaded dataset (ds), we'll select Cycles 3 and 7,
  222. # and subtract the height (h_corr) between them to get a height difference (dh).
  223. # %%
  224. dh: xr.DataArray = deepicedrain.calculate_delta(
  225. dataset=ds, oldcyclenum=3, newcyclenum=7, variable="h_corr"
  226. )
  227. # %%
  228. # Persist data in memory
  229. dh = dh.persist()
  230. # %%
  231. delta_h: xr.Dataset = dh.dropna(dim="ref_pt").to_dataset(name="delta_height")
  232. print(delta_h)
  233. # %%
  234. df_dh: pd.DataFrame = delta_h.to_dataframe()
  235. print(df_dh.head())
  236. # %%
  237. # Save or Load delta height data
  238. # df_dh.to_parquet(f"ATLXI/df_dh_{placename}.parquet")
  239. # df_dh: pd.DataFrame = pd.read_parquet(f"ATLXI/df_dh_{placename}.parquet")
  240. # df_dh = df_dh.sample(n=1_000_000)
  241. # %% [markdown]
  242. # ## Plot elevation difference for a region
  243. #
  244. # Using [datashader](https://datashader.org) to make the plotting real fast,
  245. # it actually rasterizes the vector points into a raster grid,
  246. # since our eyes can't see millions of points that well anyway.
  247. # You can choose any region, but we'll focus on the Siple Coast Ice Streams.
  248. # Using [PyGMT](https://pygmt.org), we'll plot the Antarctic grounding line
  249. # as well as the ATL11 height changes overlaid with Subglacial Lake outlines
  250. # from [Smith et al., 2009](https://doi.org/10.3189/002214309789470879).
  251. # %%
  252. # Select region here, see also geodataframe of regions at top
  253. placename: str = "antarctica"
  254. region: deepicedrain.Region = deepicedrain.Region.from_gdf(gdf=regions.loc[placename])
  255. # %%
  256. # Find subglacial lakes (Smith et al., 2009) within region of interest
  257. subglacial_lakes_gdf = gpd.read_file(
  258. filename=r"Quantarctica3/Glaciology/Subglacial Lakes/SubglacialLakes_Smith.shp"
  259. )
  260. subglacial_lakes_gdf = subglacial_lakes_gdf.loc[
  261. subglacial_lakes_gdf.within(
  262. shapely.geometry.Polygon.from_bounds(*region.bounds(style="lbrt"))
  263. )
  264. ]
  265. subglacial_lakes_geom = [g for g in subglacial_lakes_gdf.geometry]
  266. subglacial_lakes = [
  267. np.dstack(g.exterior.coords.xy).squeeze().astype(np.float32)
  268. for g in subglacial_lakes_geom
  269. ]
  270. # %%
  271. # Datashade our height values (vector points) onto a grid (raster image)
  272. agg_grid: xr.DataArray = region.datashade(df=df_dh, z_dim="delta_height")
  273. print(agg_grid)
  274. # %%
  275. # Plot our map!
  276. scale: int = region.scale
  277. fig = pygmt.Figure()
  278. # fig.grdimage(
  279. # grid="Quantarctica3/SatelliteImagery/MODIS/MODIS_Mosaic.tif",
  280. # region=region,
  281. # projection=f"x{scale}",
  282. # I="+d",
  283. # )
  284. pygmt.makecpt(cmap="roma", series=[-2, 2])
  285. fig.grdimage(
  286. grid=agg_grid,
  287. region=region.bounds(),
  288. projection=f"x1:{scale}",
  289. frame=["afg", f'WSne+t"ICESat-2 Ice Surface Change over {region.name}"'],
  290. Q=True,
  291. )
  292. for subglacial_lake in subglacial_lakes:
  293. fig.plot(data=subglacial_lake, L=True, pen="thinnest")
  294. fig.colorbar(
  295. position="JCR+e", frame=["af", 'x+l"Elevation Change from Cycle 3 to 7"', "y+lm"]
  296. )
  297. fig.coast(
  298. region=region.bounds(),
  299. projection=f"s0/-90/-71/1:{scale}",
  300. area_thresh="+ag",
  301. resolution="i",
  302. shorelines="0.5p",
  303. # land="snow4",
  304. # water="snow3",
  305. V="q",
  306. )
  307. fig.savefig(f"figures/plot_atl11_dh37_{placename}.png")
  308. fig.show(width=600)
  309. # %%
  310. # %% [markdown]
  311. # #### Non-PyGMT plotting code on PyViz stack
  312. #
  313. # Meant to be a bit more interactive but slightly buggy,
  314. # need to sort out python dependency issues.
  315. # %%
  316. shade_grid = datashader.transfer_functions.shade(
  317. agg=agg_grid, cmap=matplotlib.cm.RdYlBu, how="linear", span=[-2, 2]
  318. )
  319. spread_grid = datashader.transfer_functions.dynspread(shade_grid)
  320. spread_grid
  321. # %%
  322. df_dh.hvplot.points(
  323. # title="Elevation Change (metres) from Cycle 5 to 6",
  324. x="x",
  325. y="y",
  326. c="delta_height",
  327. # cmap="RdYlBu",
  328. # aggregator=datashader.mean("delta_height"),
  329. rasterize=True,
  330. # responsive=True,
  331. # datashade=True,
  332. # dynamic=True,
  333. # dynspread=True,
  334. hover=True,
  335. height=400,
  336. symmetric=True,
  337. clim=(-20, 20),
  338. )
  339. # %%
  340. points = hv.Points(
  341. data=df_dh,
  342. kdims=["x", "y"],
  343. vdims=["delta_height"],
  344. # datatype=["xarray"],
  345. )
  346. # %%
  347. hv.operation.datashader.datashade(points)
  348. # %%
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...