forked from demiangomez/Parallel.GAMIT
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathDRA.py
430 lines (348 loc) · 14.8 KB
/
DRA.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
#!/usr/bin/env python
"""
Project: Parallel.Stacker
Date: 6/12/18 10:28 AM
Author: Demian D. Gomez
"""
import argparse
import os
from datetime import datetime
import json
# deps
import numpy as np
from tqdm import tqdm
import matplotlib
if not os.environ.get('DISPLAY', None):
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import io
import base64
import numpy
# app
from pgamit import dbConnection
from pgamit import pyETM
from pgamit import pyDate
from pgamit import pyJobServer
from pgamit import pyOptions
from pgamit.Utils import process_date, file_write, json_converter
from pgamit.pyStack import Polyhedron, np_array_vertices
from pgamit.pyDate import Date
stn_stats = []
wrms_n = []
wrms_e = []
wrms_u = []
project = 'default'
LIMIT = 2.5
def sql_select(project, fields, date2):
return '''SELECT %s from gamit_soln
WHERE "Project" = \'%s\' AND "Year" = %i AND "DOY" = %i
ORDER BY "NetworkCode", "StationCode"''' % (
fields, project, date2.year, date2.doy)
def compute_dra(ts, NetworkCode, StationCode,
pdates, project, histogram=False):
try:
# load from the db
cnn = dbConnection.Cnn('gnss_data.cfg')
# to pass the filename back to the callback_handler
filename = project + '_dra/' + NetworkCode + '.' + StationCode
if ts.size:
if ts.shape[0] > 2:
dts = numpy.append(numpy.diff(ts[:, 0:3], axis=0),
ts[1:, -3:], axis=1)
dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode,
StationCode, project)
etm = pyETM.DailyRep(cnn, NetworkCode, StationCode,
False, False, dra_ts)
figfile = ''
hisfile = ''
if etm.A is not None:
figfile = etm.plot(fileio=io.BytesIO(),
plot_missing=False, t_win=pdates)
if histogram:
hisfile = etm.plot_hist(fileio=io.BytesIO())
# save the wrms
return (etm.factor[0] * 1000, etm.factor[1] * 1000,
etm.factor[2] * 1000, figfile, hisfile,
filename, NetworkCode,
StationCode, etm.soln.lat[0],
etm.soln.lon[0], etm.soln.height[0])
else:
return (None, None, None, figfile, hisfile,
filename, NetworkCode,
StationCode, etm.soln.lat[0],
etm.soln.lon[0], etm.soln.height[0])
except Exception as e:
raise Exception('While working on %s.%s' % (NetworkCode,
StationCode) + '\n') from e
class DRA(list):
def __init__(self, cnn, project, start_date, end_date, verbose=False):
super(DRA, self).__init__()
self.project = project
self.cnn = cnn
self.transformations = []
self.stats = {}
self.verbose = verbose
if end_date is None:
end_date = Date(datetime=datetime.now())
print(' >> Loading GAMIT solutions for project %s...' % project)
gamit_vertices = self.cnn.query_float(
'SELECT "NetworkCode" || \'.\' || "StationCode", "X", "Y", "Z",'
' "Year", "DOY", "FYear" '
'FROM gamit_soln WHERE "Project" = \'%s\' AND ("Year", "DOY")'
' BETWEEN (%i, %i) AND (%i, %i) '
'ORDER BY "NetworkCode", "StationCode"' % (
project, start_date.year, start_date.doy,
end_date.year, end_date.doy))
self.gamit_vertices = np_array_vertices(gamit_vertices)
dates = self.cnn.query_float(
'SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \'%s\' '
'AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"'
% (project, start_date.year, start_date.doy,
end_date.year, end_date.doy))
self.dates = [Date(year=int(d[0]), doy=int(d[1])) for d in dates]
self.stations = self.cnn.query_float(
'SELECT "NetworkCode", "StationCode" FROM gamit_soln '
'WHERE "Project" = \'%s\' AND ("Year", "DOY") '
'BETWEEN (%i, %i) AND (%i, %i) '
'GROUP BY "NetworkCode", "StationCode" '
'ORDER BY "NetworkCode", "StationCode"'
% (project, start_date.year, start_date.doy,
end_date.year, end_date.doy), as_dict=True)
i = 0
for d in tqdm(self.dates, ncols=160,
desc=' >> Initializing the stack polyhedrons'):
self.append(Polyhedron(self.gamit_vertices, project, d))
if i < len(self.dates) - 1:
if d != self.dates[i + 1] - 1:
for dd in [Date(mjd=md) for md in list(range(
d.mjd + 1, self.dates[i + 1].mjd))]:
tqdm.write(' -- Missing DOY detected: %s'
% dd.yyyyddd())
i += 1
def stack_dra(self):
for j in tqdm(list(range(len(self) - 1)),
desc=' >> Daily repetitivity analysis progress',
ncols=160):
# should only attempt to align a polyhedron that is unaligned
# do not set the polyhedron as aligned
# unless we are in the max iteration step
self[j + 1].align(self[j], scale=False, verbose=self.verbose)
# write info to the screen
tqdm.write(' -- %s (%04i) %2i it wrms: %4.1f T %6.1f %6.1f %6.1f '
'R (%6.1f %6.1f %6.1f)*1e-9 D-W: %5.3f IQR: %4.1f' %
(self[j + 1].date.yyyyddd(),
self[j + 1].stations_used,
self[j + 1].iterations,
self[j + 1].wrms * 1000,
self[j + 1].helmert[3] * 1000,
self[j + 1].helmert[4] * 1000,
self[j + 1].helmert[5] * 1000,
self[j + 1].helmert[0],
self[j + 1].helmert[1],
self[j + 1].helmert[2],
self[j + 1].down_frac,
self[j + 1].iqr * 1000
))
self.transformations.append([poly.info() for poly in self[1:]])
def get_station(self, NetworkCode, StationCode):
"""
Obtains the time series for a given station
:param NetworkCode:
:param StationCode:
:return: a numpy array with the time series [x, y, z, yr, doy, fyear]
"""
stnstr = NetworkCode + '.' + StationCode
ts = []
for poly in self:
p = poly.vertices[poly.vertices['stn'] == stnstr]
if p.size:
ts.append([p['x'][0],
p['y'][0],
p['z'][0],
p['yr'][0],
p['dd'][0],
p['fy'][0]])
return np.array(ts)
def to_json(self, json_file):
# print(repr(self.transformations))
file_write(json_file,
json.dumps({'transformations': self.transformations,
'stats': self.stats},
indent=4, sort_keys=False, default=json_converter
))
def callback_handler(job):
global stn_stats, wrms_n, wrms_e, wrms_u
if job.exception:
tqdm.write(
' -- Fatal error on node %s message from node follows -> \n%s' % (
job.ip_addr, job.exception))
elif job.result is not None:
if job.result[0] is not None:
# pass information about the station's stats to save in the json
stn_stats.append({'NetworkCode': job.result[6],
'StationCode': job.result[7],
'lat': job.result[8],
'lon': job.result[9],
'height': job.result[10],
'wrms_n': job.result[0],
'wrms_e': job.result[1],
'wrms_u': job.result[2]})
wrms_n.append(job.result[0])
wrms_e.append(job.result[1])
wrms_u.append(job.result[2])
# save the figures, if any
if job.result[3]:
with open('%s.png' % job.result[5], "wb") as fh:
fh.write(base64.b64decode(job.result[3]))
if job.result[4]:
with open('%s_hist.png' % job.result[5], "wb") as fh:
fh.write(base64.b64decode(job.result[4]))
else:
tqdm.write(' -- Station %s.%s did not produce valid statistics' % (
job.result[6], job.result[7]))
def main():
global stn_stats, wrms_n, wrms_e, wrms_u, project
parser = argparse.ArgumentParser(
description='GNSS daily repetitivities analysis (DRA)')
parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
help='''Specify the project name used to process
the GAMIT solutions in Parallel.GAMIT.''')
parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
help='''Date range filter. Can be specified in
yyyy/mm/dd yyyy_doy wwww-d format''')
parser.add_argument('-w', '--plot_window', nargs='+', metavar='date',
help='''Date window range to plot. Can be specified
in yyyy/mm/dd yyyy_doy wwww-d format''')
parser.add_argument('-hist', '--histogram', action='store_true',
help="Plot a histogram of the daily repetitivities")
parser.add_argument('-v', '--verbose', action='store_true',
help='''Provide additional information during the
alignment process (for debugging purposes)''')
parser.add_argument('-np', '--noparallel', action='store_true',
help="Execute command without parallelization.")
args = parser.parse_args()
cnn = dbConnection.Cnn("gnss_data.cfg")
project = args.project[0]
dates = [pyDate.Date(year=1980, doy=1),
pyDate.Date(year=2100, doy=1)]
Config = pyOptions.ReadOptions("gnss_data.cfg")
# type: pyOptions.ReadOptions
JobServer = pyJobServer.JobServer(
Config, run_parallel=not args.noparallel)
# type: pyJobServer.JobServer
try:
dates = process_date(args.date_filter)
except ValueError as e:
parser.error(str(e))
pdates = None
if args.plot_window is not None:
if len(args.plot_window) == 1:
try:
pdates = process_date(args.plot_window,
missing_input=None,
allow_days=False)
pdates = (pdates[0].fyear,)
except ValueError:
# an integer value
pdates = float(args.plot_window[0])
else:
pdates = process_date(args.plot_window)
pdates = (pdates[0].fyear,
pdates[1].fyear)
# create folder for plots
path_plot = project + '_dra'
if not os.path.isdir(path_plot):
os.makedirs(path_plot)
########################################
# load polyhedrons
# create the DRA object
dra = DRA(cnn, args.project[0], dates[0], dates[1], args.verbose)
dra.stack_dra()
tqdm.write(''' >> Daily repetitivity analysis done. DOYs with wrms > 8 mm
are shown below:''')
for i, d in enumerate(dra):
if d.wrms is not None:
if d.wrms > 0.008:
tqdm.write(''' -- %s (%04i) %2i it wrms: %4.1f
D-W: %5.3f IQR: %4.1f''' %
(d.date.yyyyddd(),
d.stations_used,
d.iterations,
d.wrms * 1000,
d.down_frac,
d.iqr * 1000))
qbar = tqdm(total=len(dra.stations), desc=' >> Computing DRAs',
ncols=160, disable=None)
modules = ('pgamit.pyETM', 'pgamit.dbConnection',
'traceback', 'io', 'numpy')
JobServer.create_cluster(compute_dra, progress_bar=qbar,
callback=callback_handler,
modules=modules)
# plot each DRA
for stn in dra.stations:
NetworkCode = stn['NetworkCode']
StationCode = stn['StationCode']
ts = dra.get_station(NetworkCode, StationCode)
JobServer.submit(ts, NetworkCode, StationCode, pdates,
project, args.histogram)
JobServer.wait()
qbar.close()
JobServer.close_cluster()
# add the station stats to the json output
dra.stats = stn_stats
dra.to_json(project + '_dra.json')
wrms_n = np.array(wrms_n)
wrms_e = np.array(wrms_e)
wrms_u = np.array(wrms_u)
# plot the WRM of the DRA stack and number of stations
# type: plt.subplots
f, axis = plt.subplots(nrows=3, ncols=2, figsize=(15, 10))
# WRMS
ax = axis[0][0]
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['wrms'] * 1000 for t in dra.transformations[0]], 'ob',
markersize=2)
ax.set_ylabel('WRMS [mm]')
ax.grid(True)
ax.set_ylim(0, 10)
# station count
ax = axis[1][0]
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['stations_used'] for t in dra.transformations[0]], 'ob',
markersize=2)
ax.set_ylabel('Station count')
ax.grid(True)
# d-w fraction
ax = axis[2][0]
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['downweighted_fraction'] for t in dra.transformations[0]],
'ob', markersize=2)
ax.set_ylabel('DW fraction')
ax.grid(True)
ax = axis[0][1]
ax.hist(wrms_n[wrms_n <= 8], 40, alpha=0.75, facecolor='blue')
ax.grid(True)
ax.set_ylabel('# stations')
ax.set_xlabel('WRMS misfit N [mm]')
ax.set_title('Daily repetitivities NEU')
ax = axis[1][1]
ax.hist(wrms_e[wrms_e <= 8], 40, alpha=0.75, facecolor='blue')
ax.grid(True)
ax.set_xlim(0, 8)
ax.set_ylabel('# stations')
ax.set_xlabel('WRMS misfit E [mm]')
ax = axis[2][1]
ax.hist(wrms_u[wrms_u <= 10], 40, alpha=0.75, facecolor='blue')
ax.grid(True)
ax.set_xlim(0, 10)
ax.set_ylabel('# stations')
ax.set_xlabel('WRMS misfit U [mm]')
f.suptitle('Daily repetitivity analysis for project %s\n'
'Solutions with WRMS > 10 mm are not shown' % project,
fontsize=12, family='monospace')
plt.savefig(project + '_dra.png')
plt.close()
ax.set_xlim(0, 8)
if __name__ == '__main__':
main()