17
17
if not os .environ .get ('DISPLAY' , None ):
18
18
matplotlib .use ('Agg' )
19
19
import matplotlib .pyplot as plt
20
- import traceback
21
20
import io
22
21
import base64
23
22
import numpy
44
43
def sql_select (project , fields , date2 ):
45
44
return '''SELECT %s from gamit_soln
46
45
WHERE "Project" = \' %s\' AND "Year" = %i AND "DOY" = %i
47
- ORDER BY "NetworkCode", "StationCode"''' % (fields , project , date2 .year , date2 .doy )
46
+ ORDER BY "NetworkCode", "StationCode"''' % (
47
+ fields , project , date2 .year , date2 .doy )
48
48
49
49
50
- def compute_dra (ts , NetworkCode , StationCode , pdates , project , histogram = False ):
50
+ def compute_dra (ts , NetworkCode , StationCode ,
51
+ pdates , project , histogram = False ):
51
52
try :
52
53
# load from the db
53
54
cnn = dbConnection .Cnn ('gnss_data.cfg' )
@@ -57,27 +58,37 @@ def compute_dra(ts, NetworkCode, StationCode, pdates, project, histogram=False):
57
58
if ts .size :
58
59
59
60
if ts .shape [0 ] > 2 :
60
- dts = numpy .append (numpy .diff (ts [:, 0 :3 ], axis = 0 ), ts [1 :, - 3 :], axis = 1 )
61
+ dts = numpy .append (numpy .diff (ts [:, 0 :3 ], axis = 0 ),
62
+ ts [1 :, - 3 :], axis = 1 )
61
63
62
- dra_ts = pyETM .GamitSoln (cnn , dts , NetworkCode , StationCode , project )
64
+ dra_ts = pyETM .GamitSoln (cnn , dts , NetworkCode ,
65
+ StationCode , project )
63
66
64
- etm = pyETM .DailyRep (cnn , NetworkCode , StationCode , False , False , dra_ts )
67
+ etm = pyETM .DailyRep (cnn , NetworkCode , StationCode ,
68
+ False , False , dra_ts )
65
69
66
70
figfile = ''
67
71
hisfile = ''
68
72
69
73
if etm .A is not None :
70
- figfile = etm .plot (fileio = io .BytesIO (), plot_missing = False , t_win = pdates )
74
+ figfile = etm .plot (fileio = io .BytesIO (),
75
+ plot_missing = False , t_win = pdates )
71
76
if histogram :
72
77
hisfile = etm .plot_hist (fileio = io .BytesIO ())
73
78
# save the wrms
74
- return etm .factor [0 ] * 1000 , etm .factor [1 ] * 1000 , etm .factor [2 ] * 1000 , figfile , hisfile , \
75
- filename , NetworkCode , StationCode , etm .soln .lat [0 ], etm .soln .lon [0 ], etm .soln .height [0 ]
79
+ return (etm .factor [0 ] * 1000 , etm .factor [1 ] * 1000 ,
80
+ etm .factor [2 ] * 1000 , figfile , hisfile ,
81
+ filename , NetworkCode ,
82
+ StationCode , etm .soln .lat [0 ],
83
+ etm .soln .lon [0 ], etm .soln .height [0 ])
76
84
else :
77
- return None , None , None , figfile , hisfile , filename , NetworkCode , StationCode , etm .soln .lat [0 ], \
78
- etm .soln .lon [0 ], etm .soln .height [0 ]
85
+ return (None , None , None , figfile , hisfile ,
86
+ filename , NetworkCode ,
87
+ StationCode , etm .soln .lat [0 ],
88
+ etm .soln .lon [0 ], etm .soln .height [0 ])
79
89
except Exception as e :
80
- raise Exception ('While working on %s.%s' % (NetworkCode , StationCode ) + '\n ' ) from e
90
+ raise Exception ('While working on %s.%s' % (NetworkCode ,
91
+ StationCode ) + '\n ' ) from e
81
92
82
93
83
94
class DRA (list ):
@@ -86,55 +97,67 @@ def __init__(self, cnn, project, start_date, end_date, verbose=False):
86
97
87
98
super (DRA , self ).__init__ ()
88
99
89
- self .project = project
90
- self .cnn = cnn
100
+ self .project = project
101
+ self .cnn = cnn
91
102
self .transformations = []
92
- self .stats = {}
93
- self .verbose = verbose
103
+ self .stats = {}
104
+ self .verbose = verbose
94
105
95
106
if end_date is None :
96
107
end_date = Date (datetime = datetime .now ())
97
108
98
109
print (' >> Loading GAMIT solutions for project %s...' % project )
99
110
100
111
gamit_vertices = self .cnn .query_float (
101
- 'SELECT "NetworkCode" || \' .\' || "StationCode", "X", "Y", "Z", "Year", "DOY", "FYear" '
102
- 'FROM gamit_soln WHERE "Project" = \' %s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
103
- 'ORDER BY "NetworkCode", "StationCode"' % (project , start_date .year , start_date .doy ,
104
- end_date .year , end_date .doy ))
112
+ 'SELECT "NetworkCode" || \' .\' || "StationCode", "X", "Y", "Z",'
113
+ ' "Year", "DOY", "FYear" '
114
+ 'FROM gamit_soln WHERE "Project" = \' %s\' AND ("Year", "DOY")'
115
+ ' BETWEEN (%i, %i) AND (%i, %i) '
116
+ 'ORDER BY "NetworkCode", "StationCode"' % (
117
+ project , start_date .year , start_date .doy ,
118
+ end_date .year , end_date .doy ))
105
119
106
120
self .gamit_vertices = np_array_vertices (gamit_vertices )
107
121
108
- dates = self .cnn .query_float ('SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \' %s\' '
109
- 'AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
110
- 'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"'
111
- % (project , start_date .year , start_date .doy , end_date .year , end_date .doy ))
122
+ dates = self .cnn .query_float (
123
+ 'SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \' %s\' '
124
+ 'AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
125
+ 'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"'
126
+ % (project , start_date .year , start_date .doy ,
127
+ end_date .year , end_date .doy ))
112
128
113
129
self .dates = [Date (year = int (d [0 ]), doy = int (d [1 ])) for d in dates ]
114
130
115
- self .stations = self .cnn .query_float ('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
116
- 'WHERE "Project" = \' %s\' AND ("Year", "DOY") '
117
- 'BETWEEN (%i, %i) AND (%i, %i) '
118
- 'GROUP BY "NetworkCode", "StationCode" '
119
- 'ORDER BY "NetworkCode", "StationCode"'
120
- % (project , start_date .year , start_date .doy ,
121
- end_date .year , end_date .doy ), as_dict = True )
131
+ self .stations = self .cnn .query_float (
132
+ 'SELECT "NetworkCode", "StationCode" FROM gamit_soln '
133
+ 'WHERE "Project" = \' %s\' AND ("Year", "DOY") '
134
+ 'BETWEEN (%i, %i) AND (%i, %i) '
135
+ 'GROUP BY "NetworkCode", "StationCode" '
136
+ 'ORDER BY "NetworkCode", "StationCode"'
137
+ % (project , start_date .year , start_date .doy ,
138
+ end_date .year , end_date .doy ), as_dict = True )
122
139
123
140
i = 0
124
- for d in tqdm (self .dates , ncols = 160 , desc = ' >> Initializing the stack polyhedrons' ):
141
+ for d in tqdm (self .dates , ncols = 160 ,
142
+ desc = ' >> Initializing the stack polyhedrons' ):
125
143
self .append (Polyhedron (self .gamit_vertices , project , d ))
126
144
if i < len (self .dates ) - 1 :
127
145
if d != self .dates [i + 1 ] - 1 :
128
- for dd in [Date (mjd = md ) for md in list (range (d .mjd + 1 , self .dates [i + 1 ].mjd ))]:
129
- tqdm .write (' -- Missing DOY detected: %s' % dd .yyyyddd ())
146
+ for dd in [Date (mjd = md ) for md in list (range (
147
+ d .mjd + 1 , self .dates [i + 1 ].mjd ))]:
148
+ tqdm .write (' -- Missing DOY detected: %s'
149
+ % dd .yyyyddd ())
130
150
i += 1
131
151
132
152
def stack_dra (self ):
133
153
134
- for j in tqdm (list (range (len (self ) - 1 )), desc = ' >> Daily repetitivity analysis progress' , ncols = 160 ):
154
+ for j in tqdm (list (range (len (self ) - 1 )),
155
+ desc = ' >> Daily repetitivity analysis progress' ,
156
+ ncols = 160 ):
135
157
136
158
# should only attempt to align a polyhedron that is unaligned
137
- # do not set the polyhedron as aligned unless we are in the max iteration step
159
+ # do not set the polyhedron as aligned
160
+ # unless we are in the max iteration step
138
161
self [j + 1 ].align (self [j ], scale = False , verbose = self .verbose )
139
162
# write info to the screen
140
163
tqdm .write (' -- %s (%04i) %2i it wrms: %4.1f T %6.1f %6.1f %6.1f '
@@ -182,7 +205,8 @@ def get_station(self, NetworkCode, StationCode):
182
205
def to_json (self , json_file ):
183
206
# print(repr(self.transformations))
184
207
file_write (json_file ,
185
- json .dumps ({'transformations' : self .transformations , 'stats' : self .stats },
208
+ json .dumps ({'transformations' : self .transformations ,
209
+ 'stats' : self .stats },
186
210
indent = 4 , sort_keys = False , default = json_converter
187
211
))
188
212
@@ -192,7 +216,9 @@ def callback_handler(job):
192
216
global stn_stats , wrms_n , wrms_e , wrms_u
193
217
194
218
if job .exception :
195
- tqdm .write (' -- Fatal error on node %s message from node follows -> \n %s' % (job .ip_addr , job .exception ))
219
+ tqdm .write (
220
+ ' -- Fatal error on node %s message from node follows -> \n %s' % (
221
+ job .ip_addr , job .exception ))
196
222
elif job .result is not None :
197
223
if job .result [0 ] is not None :
198
224
# pass information about the station's stats to save in the json
@@ -218,29 +244,36 @@ def callback_handler(job):
218
244
with open ('%s_hist.png' % job .result [5 ], "wb" ) as fh :
219
245
fh .write (base64 .b64decode (job .result [4 ]))
220
246
else :
221
- tqdm .write (' -- Station %s.%s did not produce valid statistics' % (job .result [6 ], job .result [7 ]))
247
+ tqdm .write (' -- Station %s.%s did not produce valid statistics' % (
248
+ job .result [6 ], job .result [7 ]))
222
249
223
250
224
251
def main ():
225
252
226
253
global stn_stats , wrms_n , wrms_e , wrms_u , project
227
254
228
- parser = argparse .ArgumentParser (description = 'GNSS daily repetitivities analysis (DRA)' )
255
+ parser = argparse .ArgumentParser (
256
+ description = 'GNSS daily repetitivities analysis (DRA)' )
229
257
230
258
parser .add_argument ('project' , type = str , nargs = 1 , metavar = '{project name}' ,
231
- help = "Specify the project name used to process the GAMIT solutions in Parallel.GAMIT." )
259
+ help = '''Specify the project name used to process
260
+ the GAMIT solutions in Parallel.GAMIT.''' )
232
261
233
262
parser .add_argument ('-d' , '--date_filter' , nargs = '+' , metavar = 'date' ,
234
- help = 'Date range filter. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format' )
263
+ help = '''Date range filter. Can be specified in
264
+ yyyy/mm/dd yyyy_doy wwww-d format''' )
235
265
236
266
parser .add_argument ('-w' , '--plot_window' , nargs = '+' , metavar = 'date' ,
237
- help = 'Date window range to plot. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format' )
267
+ help = '''Date window range to plot. Can be specified
268
+ in yyyy/mm/dd yyyy_doy wwww-d format''' )
238
269
parser .add_argument ('-hist' , '--histogram' , action = 'store_true' ,
239
270
help = "Plot a histogram of the daily repetitivities" )
240
271
parser .add_argument ('-v' , '--verbose' , action = 'store_true' ,
241
- help = "Provide additional information during the alignment process (for debugging purposes)" )
272
+ help = '''Provide additional information during the
273
+ alignment process (for debugging purposes)''' )
242
274
243
- parser .add_argument ('-np' , '--noparallel' , action = 'store_true' , help = "Execute command without parallelization." )
275
+ parser .add_argument ('-np' , '--noparallel' , action = 'store_true' ,
276
+ help = "Execute command without parallelization." )
244
277
245
278
args = parser .parse_args ()
246
279
@@ -251,8 +284,11 @@ def main():
251
284
dates = [pyDate .Date (year = 1980 , doy = 1 ),
252
285
pyDate .Date (year = 2100 , doy = 1 )]
253
286
254
- Config = pyOptions .ReadOptions ("gnss_data.cfg" ) # type: pyOptions.ReadOptions
255
- JobServer = pyJobServer .JobServer (Config , run_parallel = not args .noparallel ) # type: pyJobServer.JobServer
287
+ Config = pyOptions .ReadOptions ("gnss_data.cfg" )
288
+ # type: pyOptions.ReadOptions
289
+ JobServer = pyJobServer .JobServer (
290
+ Config , run_parallel = not args .noparallel )
291
+ # type: pyJobServer.JobServer
256
292
257
293
try :
258
294
dates = process_date (args .date_filter )
@@ -263,7 +299,9 @@ def main():
263
299
if args .plot_window is not None :
264
300
if len (args .plot_window ) == 1 :
265
301
try :
266
- pdates = process_date (args .plot_window , missing_input = None , allow_days = False )
302
+ pdates = process_date (args .plot_window ,
303
+ missing_input = None ,
304
+ allow_days = False )
267
305
pdates = (pdates [0 ].fyear ,)
268
306
except ValueError :
269
307
# an integer value
@@ -285,32 +323,37 @@ def main():
285
323
286
324
dra .stack_dra ()
287
325
288
- missing_doys = []
289
-
290
- tqdm .write (' >> Daily repetitivity analysis done. DOYs with wrms > 8 mm are shown below:' )
326
+ tqdm .write (''' >> Daily repetitivity analysis done. DOYs with wrms > 8 mm
327
+ are shown below:''' )
291
328
for i , d in enumerate (dra ):
292
329
if d .wrms is not None :
293
330
if d .wrms > 0.008 :
294
- tqdm .write (' -- %s (%04i) %2i it wrms: %4.1f D-W: %5.3f IQR: %4.1f' %
331
+ tqdm .write (''' -- %s (%04i) %2i it wrms: %4.1f
332
+ D-W: %5.3f IQR: %4.1f''' %
295
333
(d .date .yyyyddd (),
296
334
d .stations_used ,
297
335
d .iterations ,
298
336
d .wrms * 1000 ,
299
337
d .down_frac ,
300
338
d .iqr * 1000 ))
301
339
302
- qbar = tqdm (total = len (dra .stations ), desc = ' >> Computing DRAs' , ncols = 160 , disable = None )
340
+ qbar = tqdm (total = len (dra .stations ), desc = ' >> Computing DRAs' ,
341
+ ncols = 160 , disable = None )
303
342
304
- modules = ('pgamit.pyETM' , 'pgamit.dbConnection' , 'traceback' , 'io' , 'numpy' )
305
- JobServer .create_cluster (compute_dra , progress_bar = qbar , callback = callback_handler , modules = modules )
343
+ modules = ('pgamit.pyETM' , 'pgamit.dbConnection' ,
344
+ 'traceback' , 'io' , 'numpy' )
345
+ JobServer .create_cluster (compute_dra , progress_bar = qbar ,
346
+ callback = callback_handler ,
347
+ modules = modules )
306
348
307
349
# plot each DRA
308
350
for stn in dra .stations :
309
351
NetworkCode = stn ['NetworkCode' ]
310
352
StationCode = stn ['StationCode' ]
311
353
312
354
ts = dra .get_station (NetworkCode , StationCode )
313
- JobServer .submit (ts , NetworkCode , StationCode , pdates , project , args .histogram )
355
+ JobServer .submit (ts , NetworkCode , StationCode , pdates ,
356
+ project , args .histogram )
314
357
315
358
JobServer .wait ()
316
359
qbar .close ()
@@ -325,27 +368,31 @@ def main():
325
368
wrms_u = np .array (wrms_u )
326
369
327
370
# plot the WRM of the DRA stack and number of stations
328
- f , axis = plt .subplots (nrows = 3 , ncols = 2 , figsize = (15 , 10 )) # type: plt.subplots
371
+ # type: plt.subplots
372
+ f , axis = plt .subplots (nrows = 3 , ncols = 2 , figsize = (15 , 10 ))
329
373
330
374
# WRMS
331
375
ax = axis [0 ][0 ]
332
- ax .plot ([t ['fyear' ] for t in dra .transformations [0 ]],
333
- [t ['wrms' ] * 1000 for t in dra .transformations [0 ]], 'ob' , markersize = 2 )
376
+ ax .plot ([t ['fyear' ] for t in dra .transformations [0 ]],
377
+ [t ['wrms' ] * 1000 for t in dra .transformations [0 ]], 'ob' ,
378
+ markersize = 2 )
334
379
ax .set_ylabel ('WRMS [mm]' )
335
380
ax .grid (True )
336
381
ax .set_ylim (0 , 10 )
337
382
338
383
# station count
339
384
ax = axis [1 ][0 ]
340
- ax .plot ([t ['fyear' ] for t in dra .transformations [0 ]],
341
- [t ['stations_used' ] for t in dra .transformations [0 ]], 'ob' , markersize = 2 )
385
+ ax .plot ([t ['fyear' ] for t in dra .transformations [0 ]],
386
+ [t ['stations_used' ] for t in dra .transformations [0 ]], 'ob' ,
387
+ markersize = 2 )
342
388
ax .set_ylabel ('Station count' )
343
389
ax .grid (True )
344
390
345
391
# d-w fraction
346
392
ax = axis [2 ][0 ]
347
- ax .plot ([t ['fyear' ] for t in dra .transformations [0 ]],
348
- [t ['downweighted_fraction' ] for t in dra .transformations [0 ]], 'ob' , markersize = 2 )
393
+ ax .plot ([t ['fyear' ] for t in dra .transformations [0 ]],
394
+ [t ['downweighted_fraction' ] for t in dra .transformations [0 ]],
395
+ 'ob' , markersize = 2 )
349
396
ax .set_ylabel ('DW fraction' )
350
397
ax .grid (True )
351
398
0 commit comments