Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

atlxi_xover.py 12 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
  1. # -*- coding: utf-8 -*-
  2. # ---
  3. # jupyter:
  4. # jupytext:
  5. # formats: ipynb,py:hydrogen
  6. # text_representation:
  7. # extension: .py
  8. # format_name: hydrogen
  9. # format_version: '1.3'
  10. # jupytext_version: 1.11.3
  11. # kernelspec:
  12. # display_name: deepicedrain
  13. # language: python
  14. # name: deepicedrain
  15. # ---
  16. # %% [markdown]
  17. # # **ICESat-2 Crossover Track Analysis**
  18. #
  19. # To increase the temporal resolution of
  20. # our ice elevation change analysis
  21. # (i.e. at time periods less than
  22. # the 91 day repeat cycle of ICESat-2),
  23. # we can look at the locations where the
  24. # ICESat-2 tracks intersect and get the
  25. # height values there!
  26. # Uses [pygmt.x2sys_cross](https://www.pygmt.org/v0.2.0/api/generated/pygmt.x2sys_cross.html).
  27. #
  28. # References:
  29. # - Wessel, P. (2010). Tools for analyzing intersecting tracks: The x2sys package.
  30. # Computers & Geosciences, 36(3), 348–354. https://doi.org/10.1016/j.cageo.2009.05.009
  31. # %%
  32. import itertools
  33. import os
  34. import dask
  35. import deepicedrain
  36. import geopandas as gpd
  37. import numpy as np
  38. import pandas as pd
  39. import pint
  40. import pint_pandas
  41. import pygmt
  42. import shapely.geometry
  43. import tqdm
  44. import uncertainties
  45. # %%
  46. ureg = pint.UnitRegistry()
  47. pint_pandas.PintType.ureg = ureg
  48. tag: str = "X2SYS"
  49. os.environ["X2SYS_HOME"] = os.path.abspath(tag)
  50. client = dask.distributed.Client(
  51. n_workers=4, threads_per_worker=1, env={"X2SYS_HOME": os.environ["X2SYS_HOME"]}
  52. )
  53. client
  54. # %%
  55. min_date, max_date = ("2019-03-29", "2020-12-24")
  56. # %%
  57. # Initialize X2SYS database in the X2SYS/ICESAT2 folder
  58. pygmt.x2sys_init(
  59. tag="ICESAT2",
  60. fmtfile=f"{tag}/ICESAT2/xyht",
  61. suffix="tsv",
  62. units=["de", "se"], # distance in metres, speed in metres per second
  63. gap="d250e", # distance gap up to 250 metres allowed
  64. force=True,
  65. verbose="q",
  66. )
  67. # %% [markdown]
  68. # # Select a subglacial lake to examine
  69. # %%
  70. # Save or load dhdt data from Parquet file
  71. placename: str = "whillans_upstream" # "slessor_downstream"
  72. df_dhdt: pd.DataFrame = pd.read_parquet(f"ATLXI/df_dhdt_{placename.lower()}.parquet")
  73. # %%
  74. # Choose one Antarctic active subglacial lake polygon with EPSG:3031 coordinates
  75. lake_name: str = "Whillans IX"
  76. lake_catalog = deepicedrain.catalog.subglacial_lakes()
  77. lake_ids, transect_id = (
  78. pd.json_normalize(lake_catalog.metadata["lakedict"])
  79. .query("lakename == @lake_name")[["ids", "transect"]]
  80. .iloc[0]
  81. )
  82. lake = (
  83. lake_catalog.read()
  84. .loc[lake_ids]
  85. .dissolve(by=np.zeros(shape=len(lake_ids), dtype="int64"), as_index=False)
  86. .squeeze()
  87. )
  88. region = deepicedrain.Region.from_gdf(gdf=lake, name=lake_name)
  89. draining: bool = lake.inner_dhdt < 0
  90. print(lake)
  91. lake.geometry
  92. # %%
  93. # Subset data to lake of interest
  94. placename: str = region.name.lower().replace(" ", "_")
  95. df_lake: pd.DataFrame = region.subset(data=df_dhdt) # bbox subset
  96. gdf_lake = gpd.GeoDataFrame(
  97. df_lake, geometry=gpd.points_from_xy(x=df_lake.x, y=df_lake.y, crs=3031)
  98. )
  99. df_lake: pd.DataFrame = df_lake.loc[gdf_lake.within(lake.geometry)] # polygon subset
  100. # %%
  101. # Run crossover analysis on all tracks
  102. track_dict: dict = deepicedrain.split_tracks(df=df_lake)
  103. rgts, tracks = track_dict.keys(), track_dict.values()
  104. # Parallelized paired crossover analysis
  105. futures: list = []
  106. for rgt1, rgt2 in itertools.combinations(rgts, r=2):
  107. # skip if same referencegroundtrack but different laser pair
  108. # as they are parallel and won't cross
  109. if rgt1[:4] == rgt2[:4]:
  110. continue
  111. track1 = track_dict[rgt1][["x", "y", "h_corr", "utc_time"]]
  112. track2 = track_dict[rgt2][["x", "y", "h_corr", "utc_time"]]
  113. shape1 = shapely.geometry.LineString(coordinates=track1[["x", "y"]].to_numpy())
  114. shape2 = shapely.geometry.LineString(coordinates=track2[["x", "y"]].to_numpy())
  115. if not shape1.intersects(shape2):
  116. continue
  117. future = client.submit(
  118. key=f"{rgt1}x{rgt2}",
  119. func=pygmt.x2sys_cross,
  120. tracks=[track1, track2],
  121. tag="ICESAT2",
  122. # region=[-460000, -400000, -560000, -500000],
  123. interpolation="l", # linear interpolation
  124. coe="e", # external crossovers
  125. trackvalues=True, # Get track 1 height (h_1) and track 2 height (h_2)
  126. # trackvalues=False, # Get crossover error (h_X) and mean height value (h_M)
  127. # outfile="xover_236_562.tsv"
  128. )
  129. futures.append(future)
  130. # %%
  131. crossovers: dict = {}
  132. for f in tqdm.tqdm(
  133. iterable=dask.distributed.as_completed(futures=futures), total=len(futures)
  134. ):
  135. if f.status != "error": # skip those track pairs which don't intersect
  136. crossovers[f.key] = f.result().dropna().reset_index(drop=True)
  137. df_cross: pd.DataFrame = pd.concat(objs=crossovers, names=["track1_track2", "id"])
  138. df: pd.DataFrame = df_cross.reset_index(level="track1_track2").reset_index(drop=True)
  139. # Report on how many unique crossover intersections there were
  140. # df.plot.scatter(x="x", y="y") # quick plot of our crossover points
  141. print(
  142. f"{len(df.groupby(by=['x', 'y']))} crossover intersection point locations found "
  143. f"with {len(df)} crossover height-time pairs "
  144. f"over {len(tracks)} tracks"
  145. )
  146. # %%
  147. # Calculate crossover error
  148. df["h_X"]: pd.Series = df.h_2 - df.h_1 # crossover error (i.e. height difference)
  149. df["t_D"]: pd.Series = df.t_2 - df.t_1 # elapsed time in ns (i.e. time difference)
  150. ns_in_yr: int = 365.25 * 24 * 60 * 60 * 1_000_000_000 # nanoseconds in a year
  151. df["dhdt"]: pd.Series = df.h_X / (df.t_D.astype(np.int64) / ns_in_yr)
  152. # %%
  153. # Get some summary statistics of our crossover errors
  154. sumstats: pd.DataFrame = df[["h_X", "t_D", "dhdt"]].describe()
  155. # Find location with highest absolute crossover error, and most sudden height change
  156. max_h_X: pd.Series = df.iloc[np.nanargmax(df.h_X.abs())] # highest crossover error
  157. max_dhdt: pd.Series = df.iloc[df.dhdt.argmax()] # most sudden change in height
  158. # %% [markdown]
  159. # ### 2D Map view of crossover points
  160. #
  161. # Bird's eye view of the crossover points
  162. # overlaid on top of the ICESat-2 tracks.
  163. # %%
  164. # 2D plot of crossover locations
  165. var: str = "h_X"
  166. fig = pygmt.Figure()
  167. # Setup basemap
  168. plotregion = pygmt.info(table=df[["x", "y"]], spacing=1000)
  169. pygmt.makecpt(cmap="batlow", series=[sumstats[var]["25%"], sumstats[var]["75%"]])
  170. # Map frame in metre units
  171. fig.basemap(frame="f", region=plotregion, projection="X8c")
  172. # Plot actual track points in green
  173. for track in tracks:
  174. tracklabel = f"{track.iloc[0].referencegroundtrack} {track.iloc[0].pairtrack}"
  175. fig.plot(
  176. x=track.x,
  177. y=track.y,
  178. pen="thinnest,green,.",
  179. style=f'qN+1:+l"{tracklabel}"+f3p,Helvetica,darkgreen',
  180. )
  181. # Plot crossover point locations
  182. fig.plot(x=df.x, y=df.y, color=df.h_X, cmap=True, style="c0.1c", pen="thinnest")
  183. # Plot lake boundary in blue
  184. lakex, lakey = lake.geometry.exterior.coords.xy
  185. fig.plot(x=lakex, y=lakey, pen="thin,blue,-.")
  186. # Map frame in kilometre units
  187. fig.basemap(
  188. frame=[
  189. f'WSne+t"Crossover points at {region.name}"',
  190. 'xaf+l"Polar Stereographic X (km)"',
  191. 'yaf+l"Polar Stereographic Y (km)"',
  192. ],
  193. region=plotregion / 1000,
  194. projection="X8c",
  195. )
  196. fig.colorbar(position="JMR+e", frame=['x+l"Crossover Error"', "y+lm"])
  197. fig.savefig(f"figures/{placename}/crossover_area_{placename}_{min_date}_{max_date}.png")
  198. fig.show()
  199. # %% [markdown]
  200. # ### Plot Crossover Elevation time-series
  201. #
  202. # Plot elevation change over time at:
  203. #
  204. # 1. One single crossover point location
  205. # 2. Many crossover locations over an area
  206. # %%
  207. # Tidy up dataframe first using pd.wide_to_long
  208. # I.e. convert 't_1', 't_2', 'h_1', 'h_2' columns into just 't' and 'h'.
  209. df_th: pd.DataFrame = deepicedrain.wide_to_long(
  210. df=df.loc[:, ["track1_track2", "x", "y", "t_1", "t_2", "h_1", "h_2"]],
  211. stubnames=["t", "h"],
  212. j="track",
  213. )
  214. df_th: pd.DataFrame = df_th.drop_duplicates(ignore_index=True)
  215. df_th: pd.DataFrame = df_th.sort_values(by="t").reset_index(drop=True)
  216. # %%
  217. # Plot at single location with **maximum** absolute crossover height error (max_h_X)
  218. df_max = df_th.query(expr="x == @max_h_X.x & y == @max_h_X.y")
  219. track1, track2 = df_max.track1_track2.iloc[0].split("x")
  220. print(f"{max_h_X.h_X:.2f} metres height change at {max_h_X.x}, {max_h_X.y}")
  221. plotregion = np.array(
  222. [df_max.t.min(), df_max.t.max(), *pygmt.info(table=df_max[["h"]], spacing=2.5)[:2]]
  223. )
  224. plotregion += np.array([-pd.Timedelta(2, unit="W"), +pd.Timedelta(2, unit="W"), 0, 0])
  225. fig = pygmt.Figure()
  226. with pygmt.config(
  227. FONT_ANNOT_PRIMARY="9p", FORMAT_TIME_PRIMARY_MAP="abbreviated", FORMAT_DATE_MAP="o"
  228. ):
  229. fig.basemap(
  230. projection="X12c/8c",
  231. region=plotregion,
  232. frame=[
  233. f'WSne+t"Max elevation change over time at {region.name}"',
  234. "pxa1Of1o+lDate", # primary time axis, 1 mOnth annotation and minor axis
  235. "sx1Y", # secondary time axis, 1 Year intervals
  236. 'yaf+l"Elevation at crossover (m)"',
  237. ],
  238. )
  239. fig.text(
  240. text=f"Track {track1} and {track2} crossover",
  241. position="TC",
  242. offset="jTC0c/0.2c",
  243. verbose="q",
  244. )
  245. # Plot data points
  246. fig.plot(x=df_max.t, y=df_max.h, style="c0.15c", color="darkblue", pen="thin")
  247. # Plot dashed line connecting points
  248. fig.plot(x=df_max.t, y=df_max.h, pen=f"faint,blue,-")
  249. fig.savefig(
  250. f"figures/{placename}/crossover_point_{placename}_{track1}_{track2}_{min_date}_{max_date}.png"
  251. )
  252. fig.show()
  253. # %%
  254. # Plot all crossover height points over time over the lake area
  255. fig = deepicedrain.plot_crossovers(df=df_th, regionname=region.name)
  256. fig.savefig(f"figures/{placename}/crossover_many_{placename}_{min_date}_{max_date}.png")
  257. fig.show()
  258. # %%
  259. # Calculate height anomaly at crossover point as
  260. # height at t=n minus height at t=0 (first observation date at crossover point)
  261. anomfunc = lambda h: h - h.iloc[0] # lambda h: h - h.mean()
  262. df_th["h_anom"] = df_th.groupby(by="track1_track2").h.transform(func=anomfunc)
  263. # Calculate ice volume displacement (dvol) in unit metres^3
  264. # and rolling mean height anomaly (h_roll) in unit metres
  265. surface_area: pint.Quantity = lake.geometry.area * ureg.metre ** 2
  266. ice_dvol: pd.Series = deepicedrain.ice_volume_over_time(
  267. df_elev=df_th.astype(dtype={"h_anom": "pint[metre]"}),
  268. surface_area=surface_area,
  269. time_col="t",
  270. outfile=f"figures/{placename}/ice_dvol_dt_{placename}.txt",
  271. )
  272. df_th["h_roll"]: pd.Series = uncertainties.unumpy.nominal_values(
  273. arr=ice_dvol.pint.magnitude / surface_area.magnitude
  274. )
  275. # %%
  276. # Plot all crossover height point anomalies over time over the lake area
  277. fig = deepicedrain.plot_crossovers(
  278. df=df_th,
  279. regionname=region.name,
  280. elev_var="h_anom",
  281. outline_points=f"figures/{placename}/{placename}.gmt",
  282. )
  283. fig.plot(x=df_th.t, y=df_th.h_roll, pen="thick,-") # plot rolling mean height anomaly
  284. fig.savefig(
  285. f"figures/{placename}/crossover_anomaly_{placename}_{min_date}_{max_date}.png"
  286. )
  287. fig.show()
  288. # %%
  289. # %% [markdown]
  290. # ## Combined ice volume displacement plot
  291. #
  292. # Showing how subglacial water cascades down a drainage basin!
  293. # %%
  294. fig = pygmt.Figure()
  295. fig.basemap(
  296. region=f"2019-02-28/2020-09-30/-0.3/0.5",
  297. frame=["wSnE", "xaf", 'yaf+l"Ice Volume Displacement (km@+3@+)"'],
  298. )
  299. pygmt.makecpt(cmap="davosS", color_model="+c", series=(-2, 4, 0.5))
  300. for i, (_placename, linestyle) in enumerate(
  301. iterable=zip(
  302. ["whillans_ix", "subglacial_lake_whillans", "lake_12", "whillans_7"],
  303. ["", ".-", "-", "..-"],
  304. )
  305. ):
  306. fig.plot(
  307. data=f"figures/{_placename}/ice_dvol_dt_{_placename}.txt",
  308. cmap=True,
  309. pen=f"thick,{linestyle}",
  310. zvalue=i,
  311. label=_placename,
  312. columns="0,3", # time column (0), ice_dvol column (3)
  313. )
  314. fig.text(
  315. position="TL",
  316. offset="j0.2c",
  317. text="Whillans Ice Stream Central Catchment active subglacial lakes",
  318. )
  319. fig.legend(position="jML+jML+o0.2c", box="+gwhite")
  320. fig.savefig("figures/cascade_whillans_ice_stream.png")
  321. fig.show()
  322. # %%
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...