Skip to content

pep8 changes at file com/DRA.py #122

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Oct 17, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
173 changes: 110 additions & 63 deletions com/DRA.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
if not os.environ.get('DISPLAY', None):
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import traceback
import io
import base64
import numpy
Expand All @@ -44,10 +43,12 @@
def sql_select(project, fields, date2):
return '''SELECT %s from gamit_soln
WHERE "Project" = \'%s\' AND "Year" = %i AND "DOY" = %i
ORDER BY "NetworkCode", "StationCode"''' % (fields, project, date2.year, date2.doy)
ORDER BY "NetworkCode", "StationCode"''' % (
fields, project, date2.year, date2.doy)


def compute_dra(ts, NetworkCode, StationCode, pdates, project, histogram=False):
def compute_dra(ts, NetworkCode, StationCode,
pdates, project, histogram=False):
try:
# load from the db
cnn = dbConnection.Cnn('gnss_data.cfg')
Expand All @@ -57,27 +58,37 @@ def compute_dra(ts, NetworkCode, StationCode, pdates, project, histogram=False):
if ts.size:

if ts.shape[0] > 2:
dts = numpy.append(numpy.diff(ts[:, 0:3], axis=0), ts[1:, -3:], axis=1)
dts = numpy.append(numpy.diff(ts[:, 0:3], axis=0),
ts[1:, -3:], axis=1)

dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode, StationCode, project)
dra_ts = pyETM.GamitSoln(cnn, dts, NetworkCode,
StationCode, project)

etm = pyETM.DailyRep(cnn, NetworkCode, StationCode, False, False, dra_ts)
etm = pyETM.DailyRep(cnn, NetworkCode, StationCode,
False, False, dra_ts)

figfile = ''
hisfile = ''

if etm.A is not None:
figfile = etm.plot(fileio=io.BytesIO(), plot_missing=False, t_win=pdates)
figfile = etm.plot(fileio=io.BytesIO(),
plot_missing=False, t_win=pdates)
if histogram:
hisfile = etm.plot_hist(fileio=io.BytesIO())
# save the wrms
return etm.factor[0] * 1000, etm.factor[1] * 1000, etm.factor[2] * 1000, figfile, hisfile, \
filename, NetworkCode, StationCode, etm.soln.lat[0], etm.soln.lon[0], etm.soln.height[0]
return (etm.factor[0] * 1000, etm.factor[1] * 1000,
etm.factor[2] * 1000, figfile, hisfile,
filename, NetworkCode,
StationCode, etm.soln.lat[0],
etm.soln.lon[0], etm.soln.height[0])
else:
return None, None, None, figfile, hisfile, filename, NetworkCode, StationCode, etm.soln.lat[0], \
etm.soln.lon[0], etm.soln.height[0]
return (None, None, None, figfile, hisfile,
filename, NetworkCode,
StationCode, etm.soln.lat[0],
etm.soln.lon[0], etm.soln.height[0])
except Exception as e:
raise Exception('While working on %s.%s' % (NetworkCode, StationCode) + '\n') from e
raise Exception('While working on %s.%s' % (NetworkCode,
StationCode) + '\n') from e


class DRA(list):
Expand All @@ -86,55 +97,67 @@ def __init__(self, cnn, project, start_date, end_date, verbose=False):

super(DRA, self).__init__()

self.project = project
self.cnn = cnn
self.project = project
self.cnn = cnn
self.transformations = []
self.stats = {}
self.verbose = verbose
self.stats = {}
self.verbose = verbose

if end_date is None:
end_date = Date(datetime=datetime.now())

print(' >> Loading GAMIT solutions for project %s...' % project)

gamit_vertices = self.cnn.query_float(
'SELECT "NetworkCode" || \'.\' || "StationCode", "X", "Y", "Z", "Year", "DOY", "FYear" '
'FROM gamit_soln WHERE "Project" = \'%s\' AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
'ORDER BY "NetworkCode", "StationCode"' % (project, start_date.year, start_date.doy,
end_date.year, end_date.doy))
'SELECT "NetworkCode" || \'.\' || "StationCode", "X", "Y", "Z",'
' "Year", "DOY", "FYear" '
'FROM gamit_soln WHERE "Project" = \'%s\' AND ("Year", "DOY")'
' BETWEEN (%i, %i) AND (%i, %i) '
'ORDER BY "NetworkCode", "StationCode"' % (
project, start_date.year, start_date.doy,
end_date.year, end_date.doy))

self.gamit_vertices = np_array_vertices(gamit_vertices)

dates = self.cnn.query_float('SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \'%s\' '
'AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"'
% (project, start_date.year, start_date.doy, end_date.year, end_date.doy))
dates = self.cnn.query_float(
'SELECT "Year", "DOY" FROM gamit_soln WHERE "Project" = \'%s\' '
'AND ("Year", "DOY") BETWEEN (%i, %i) AND (%i, %i) '
'GROUP BY "Year", "DOY" ORDER BY "Year", "DOY"'
% (project, start_date.year, start_date.doy,
end_date.year, end_date.doy))

self.dates = [Date(year=int(d[0]), doy=int(d[1])) for d in dates]

self.stations = self.cnn.query_float('SELECT "NetworkCode", "StationCode" FROM gamit_soln '
'WHERE "Project" = \'%s\' AND ("Year", "DOY") '
'BETWEEN (%i, %i) AND (%i, %i) '
'GROUP BY "NetworkCode", "StationCode" '
'ORDER BY "NetworkCode", "StationCode"'
% (project, start_date.year, start_date.doy,
end_date.year, end_date.doy), as_dict=True)
self.stations = self.cnn.query_float(
'SELECT "NetworkCode", "StationCode" FROM gamit_soln '
'WHERE "Project" = \'%s\' AND ("Year", "DOY") '
'BETWEEN (%i, %i) AND (%i, %i) '
'GROUP BY "NetworkCode", "StationCode" '
'ORDER BY "NetworkCode", "StationCode"'
% (project, start_date.year, start_date.doy,
end_date.year, end_date.doy), as_dict=True)

i = 0
for d in tqdm(self.dates, ncols=160, desc=' >> Initializing the stack polyhedrons'):
for d in tqdm(self.dates, ncols=160,
desc=' >> Initializing the stack polyhedrons'):
self.append(Polyhedron(self.gamit_vertices, project, d))
if i < len(self.dates) - 1:
if d != self.dates[i + 1] - 1:
for dd in [Date(mjd=md) for md in list(range(d.mjd + 1, self.dates[i + 1].mjd))]:
tqdm.write(' -- Missing DOY detected: %s' % dd.yyyyddd())
for dd in [Date(mjd=md) for md in list(range(
d.mjd + 1, self.dates[i + 1].mjd))]:
tqdm.write(' -- Missing DOY detected: %s'
% dd.yyyyddd())
i += 1

def stack_dra(self):

for j in tqdm(list(range(len(self) - 1)), desc=' >> Daily repetitivity analysis progress', ncols=160):
for j in tqdm(list(range(len(self) - 1)),
desc=' >> Daily repetitivity analysis progress',
ncols=160):

# should only attempt to align a polyhedron that is unaligned
# do not set the polyhedron as aligned unless we are in the max iteration step
# do not set the polyhedron as aligned
# unless we are in the max iteration step
self[j + 1].align(self[j], scale=False, verbose=self.verbose)
# write info to the screen
tqdm.write(' -- %s (%04i) %2i it wrms: %4.1f T %6.1f %6.1f %6.1f '
Expand Down Expand Up @@ -182,7 +205,8 @@ def get_station(self, NetworkCode, StationCode):
def to_json(self, json_file):
# print(repr(self.transformations))
file_write(json_file,
json.dumps({'transformations': self.transformations, 'stats': self.stats},
json.dumps({'transformations': self.transformations,
'stats': self.stats},
indent=4, sort_keys=False, default=json_converter
))

Expand All @@ -192,7 +216,9 @@ def callback_handler(job):
global stn_stats, wrms_n, wrms_e, wrms_u

if job.exception:
tqdm.write(' -- Fatal error on node %s message from node follows -> \n%s' % (job.ip_addr, job.exception))
tqdm.write(
' -- Fatal error on node %s message from node follows -> \n%s' % (
job.ip_addr, job.exception))
elif job.result is not None:
if job.result[0] is not None:
# pass information about the station's stats to save in the json
Expand All @@ -218,29 +244,36 @@ def callback_handler(job):
with open('%s_hist.png' % job.result[5], "wb") as fh:
fh.write(base64.b64decode(job.result[4]))
else:
tqdm.write(' -- Station %s.%s did not produce valid statistics' % (job.result[6], job.result[7]))
tqdm.write(' -- Station %s.%s did not produce valid statistics' % (
job.result[6], job.result[7]))


def main():

global stn_stats, wrms_n, wrms_e, wrms_u, project

parser = argparse.ArgumentParser(description='GNSS daily repetitivities analysis (DRA)')
parser = argparse.ArgumentParser(
description='GNSS daily repetitivities analysis (DRA)')

parser.add_argument('project', type=str, nargs=1, metavar='{project name}',
help="Specify the project name used to process the GAMIT solutions in Parallel.GAMIT.")
help='''Specify the project name used to process
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I also prefer the triple quotes for handling text blocks. This LGM, test pass-- thanks for the PR

the GAMIT solutions in Parallel.GAMIT.''')

parser.add_argument('-d', '--date_filter', nargs='+', metavar='date',
help='Date range filter. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format')
help='''Date range filter. Can be specified in
yyyy/mm/dd yyyy_doy wwww-d format''')

parser.add_argument('-w', '--plot_window', nargs='+', metavar='date',
help='Date window range to plot. Can be specified in yyyy/mm/dd yyyy_doy wwww-d format')
help='''Date window range to plot. Can be specified
in yyyy/mm/dd yyyy_doy wwww-d format''')
parser.add_argument('-hist', '--histogram', action='store_true',
help="Plot a histogram of the daily repetitivities")
parser.add_argument('-v', '--verbose', action='store_true',
help="Provide additional information during the alignment process (for debugging purposes)")
help='''Provide additional information during the
alignment process (for debugging purposes)''')

parser.add_argument('-np', '--noparallel', action='store_true', help="Execute command without parallelization.")
parser.add_argument('-np', '--noparallel', action='store_true',
help="Execute command without parallelization.")

args = parser.parse_args()

Expand All @@ -251,8 +284,11 @@ def main():
dates = [pyDate.Date(year=1980, doy=1),
pyDate.Date(year=2100, doy=1)]

Config = pyOptions.ReadOptions("gnss_data.cfg") # type: pyOptions.ReadOptions
JobServer = pyJobServer.JobServer(Config, run_parallel=not args.noparallel) # type: pyJobServer.JobServer
Config = pyOptions.ReadOptions("gnss_data.cfg")
# type: pyOptions.ReadOptions
JobServer = pyJobServer.JobServer(
Config, run_parallel=not args.noparallel)
# type: pyJobServer.JobServer

try:
dates = process_date(args.date_filter)
Expand All @@ -263,7 +299,9 @@ def main():
if args.plot_window is not None:
if len(args.plot_window) == 1:
try:
pdates = process_date(args.plot_window, missing_input=None, allow_days=False)
pdates = process_date(args.plot_window,
missing_input=None,
allow_days=False)
pdates = (pdates[0].fyear,)
except ValueError:
# an integer value
Expand All @@ -285,32 +323,37 @@ def main():

dra.stack_dra()

missing_doys = []

tqdm.write(' >> Daily repetitivity analysis done. DOYs with wrms > 8 mm are shown below:')
tqdm.write(''' >> Daily repetitivity analysis done. DOYs with wrms > 8 mm
are shown below:''')
for i, d in enumerate(dra):
if d.wrms is not None:
if d.wrms > 0.008:
tqdm.write(' -- %s (%04i) %2i it wrms: %4.1f D-W: %5.3f IQR: %4.1f' %
tqdm.write(''' -- %s (%04i) %2i it wrms: %4.1f
D-W: %5.3f IQR: %4.1f''' %
(d.date.yyyyddd(),
d.stations_used,
d.iterations,
d.wrms * 1000,
d.down_frac,
d.iqr * 1000))

qbar = tqdm(total=len(dra.stations), desc=' >> Computing DRAs', ncols=160, disable=None)
qbar = tqdm(total=len(dra.stations), desc=' >> Computing DRAs',
ncols=160, disable=None)

modules = ('pgamit.pyETM', 'pgamit.dbConnection', 'traceback', 'io', 'numpy')
JobServer.create_cluster(compute_dra, progress_bar=qbar, callback=callback_handler, modules=modules)
modules = ('pgamit.pyETM', 'pgamit.dbConnection',
'traceback', 'io', 'numpy')
JobServer.create_cluster(compute_dra, progress_bar=qbar,
callback=callback_handler,
modules=modules)

# plot each DRA
for stn in dra.stations:
NetworkCode = stn['NetworkCode']
StationCode = stn['StationCode']

ts = dra.get_station(NetworkCode, StationCode)
JobServer.submit(ts, NetworkCode, StationCode, pdates, project, args.histogram)
JobServer.submit(ts, NetworkCode, StationCode, pdates,
project, args.histogram)

JobServer.wait()
qbar.close()
Expand All @@ -325,27 +368,31 @@ def main():
wrms_u = np.array(wrms_u)

# plot the WRM of the DRA stack and number of stations
f, axis = plt.subplots(nrows=3, ncols=2, figsize=(15, 10)) # type: plt.subplots
# type: plt.subplots
f, axis = plt.subplots(nrows=3, ncols=2, figsize=(15, 10))

# WRMS
ax = axis[0][0]
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['wrms'] * 1000 for t in dra.transformations[0]], 'ob', markersize=2)
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['wrms'] * 1000 for t in dra.transformations[0]], 'ob',
markersize=2)
ax.set_ylabel('WRMS [mm]')
ax.grid(True)
ax.set_ylim(0, 10)

# station count
ax = axis[1][0]
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['stations_used'] for t in dra.transformations[0]], 'ob', markersize=2)
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['stations_used'] for t in dra.transformations[0]], 'ob',
markersize=2)
ax.set_ylabel('Station count')
ax.grid(True)

# d-w fraction
ax = axis[2][0]
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['downweighted_fraction'] for t in dra.transformations[0]], 'ob', markersize=2)
ax.plot([t['fyear'] for t in dra.transformations[0]],
[t['downweighted_fraction'] for t in dra.transformations[0]],
'ob', markersize=2)
ax.set_ylabel('DW fraction')
ax.grid(True)

Expand Down