1#!/usr/bin/env python3
2
3"""Timeseries tool.
4
5Allows retrieval of timeseries tables.
6
7Or display of timeseries table configuration.
8
9Types of output:
10
11- Table of results from querying one or more tables, merged,
12 any number of sources or fields
13- List of tables matching spec
14- List of fields matching spec
15- Full info of table(s)
16- Full info of field(s)
17
18I.e.:
19
20> charts3 ts --list-tables [--sys][--table-name %MHSS%]
21> charts3 ts --show-table --table-name MHS_NEDT
22> micmicschart ts --field-description %y% --sid m0[1,2] --start yesterday
23> [--stats [min,max]][--region orb]
24
25"""
26
27from datetime import timedelta
28import logging
29import sys
30from collections import defaultdict
31from collections import Counter
32from operator import itemgetter
33
34from chart.common.path import Path
35from chart.common.args import ArgumentParser
36from chart.common.prettyprint import Table
37from chart.db.model.table import TableInfo
38from chart.db.model.table import TableType
39from chart.products.sf00 import sf
40from chart.db.model.table import find_param_by_name
41from chart.common.xml import xml_filename
42from chart.common.xml import datetime_to_xml
43from chart.project import settings
44from chart.db.connection import db_connect
45from chart.db import ts
46from chart.common.traits import name_of_thing
47from chart.common.traits import to_term
48from chart.db.db_tool import keyboard_confirm
49from chart.db.model.exceptions import NoDataRange
50from chart.project import SID
51from chart.plots.sampling import sampling_from_name
52from chart.db.model.field import RowcountFieldInfo
53from chart.db.func import SensingTime
54from chart.db.model.table import TableStorage
55
56# special fake field name that displays sensing time deltas
57META_FIELD_DELTA = '%delta'
58META_FIELD_LAT = '%lat'
59META_FIELD_LON = '%lon'
60
61logger = logging.getLogger()
62
63# Default rows to show from --find-interesting-params function
64DEFAULT_INTERESTING_RANKS = 50
65
66
67def list_params(table_info, scalar):
68 t = Table(headings=('name', 'dimensions'))
69 for f in table_info.fields.values():
70 if (not scalar) or (scalar and f.dimensions == 0):
71 print(f.name, f.datatype.value, f.dimensions)
72
73
74def find_interesting_params(table_info, sid, start, stop, ranks=20, target=sys.stdout):
75 """Find parameters with many distinct values over a timerange.
76
77 Each parameters score is calculated as:
78
79 score = ((values / 2) + deltas + (rowcount / 20) * cal_boost)
80
81 where:
82 - score -> the final score
83 - values -> number of unique values over the time range
84 - deltas -> number unique deltas over the time range
85 - rowcount -> number of values over the time range
86 - cal_boost -> 1 if the parameter has a calibration function, 0.5 otherwise
87
88 Only scalar integer and floating point values are considered.
89 """
90 if table_info.storage is not TableStorage.JSONB:
91 raise ValueError('This tool only works with JSONB tables')
92
93 # from collections import namedtuple
94 # number of values, number of distance
95 # Interestingness = namedtuple('Interestingness', 'count last_value values deltas')
96 class Interestingness:
97 def __init__(self, param : str):
98 self.param = param
99 self.count = 0
100 self.last_value = None
101 self.values = set()
102 self.deltas = set()
103 self.score = None
104 # self.field_info = None
105 self.calibration = None
106 self.choices = None
107 # parameter name against Interestingness
108 interests = {}
109
110 # map of param names against interestingness
111 # values = defaultdict(Counter)
112
113 first_time = None
114 last_time = None
115 # count of packets
116 cc = 0
117 # report every # packets
118 cc_report = 10000
119 # iterate through payloads
120 for timestamp, params in ts.select(sid=sid,
121 sensing_start=start,
122 sensing_stop=stop,
123 table=table_info.storage_table,
124 fields=('SENSING_TIME', 'PAYLOAD')):
125 if first_time is None:
126 first_time = timestamp
127
128 last_time = timestamp
129 # iterate through parameters
130 for k, v in params.items():
131 if not isinstance(v, (int, float)):
132 continue
133
134 if k not in interests:
135 interests[k] = Interestingness(k)
136 if k in table_info.fields:
137 interests[k].calibration = table_info.fields[k].calibration_name
138 interests[k].choices = table_info.fields[k].choice_name
139
140 interest = interests[k]
141 interest.count += 1
142 interest.values.add(v)
143 if interest.last_value:
144 interest.deltas.add(v-interest.last_value)
145
146 interest.last_value = v
147
148 cc += 1
149 if cc % cc_report == 0:
150 logger.info('Read {cc} packets'.format(cc=cc))
151
152 results = list(interests.values())
153 for v in results:
154 v.score = ((len(v.values) / 2) +
155 len(v.deltas) -
156 v.count / 3) * (0.3 if v.calibration is None else 1)
157
158 from operator import attrgetter
159 results.sort(key=attrgetter('score'), reverse=True)
160
161 for r in results[:ranks]:
162 target.write('Param {p} score {score} (distinct values {values} deltas {deltas} count {cc} cal {cal} choice {cho})\n'.format(
163 p=r.param, cc=r.count, score=r.score, values=len(r.values), deltas=len(r.deltas), cal=r.calibration, cho=r.choices))
164
165 # # map param names against count of distinct values
166 # distinct_values = {k: len(v) for k, v in values.items()}
167
168 # # param names most interesting first
169 # interesting_params = sorted(distinct_values.items(), key=itemgetter(1), reverse=True)
170 # interesting_params = [i[0] for i in interesting_params]
171
172 # target.write('Start time {strt} stop time {stop}\n'.format(strt=first_time, stop=last_time))
173 # for cc in range(min(len(interesting_params), ranks)):
174 # name=interesting_params[cc]
175 # dist=distinct_values[interesting_params[cc]]
176 # low=min(values[interesting_params[cc]].keys())
177 # high=max(values[interesting_params[cc]].keys())
178 # target.write('Rank {cc} param {name} cal {cal} distinct {dist} '
179 # 'low {low} high {high}\n'.format(
180 # cc=cc,
181 # name=name,
182 # cal=table_info.fields[name].calibration_name,
183 # dist=distinct_values[interesting_params[cc]],
184 # low=min(values[interesting_params[cc]].keys()),
185 # high=max(values[interesting_params[cc]].keys())))
186
187
188# def purge(sids, start, stop, tables, include_stats):
189# def purge(sids, start, stop, tables):
190# """Delete data in range, a day at a time, for a number of SIDs and tables.
191
192# This is useful for deleting from big timeseries tables in Oracle, which helpfully
193# refuses to perform large DELETE operations because of undo tablespaces or rollbacks or
194# suchlike."""
195# print('This command will perform a daily delete for delete all data for {sid} in '
196# '{tables} from {strt} to {stop}'.format(
197# sid=sids, tables=', '.join(t.name for t in tables), strt=start, stop=stop))
198# if not keyboard_confirm('Continue?'):
199# return
200
201# BLOCK = timedelta(days=1)
202# for sid in sids:
203# for table in tables:
204# acc = start
205# while acc < stop:
206# logging.info('Purge SID {sid} table {table} day {strt}'.format(
207# sid=sid, table=table.name, strt=acc))
208# ts.delete(sid=sid,
209# table=table,
210# sensing_start=acc,
211# sensing_stop=acc + BLOCK,
212# commit=True)
213# acc += BLOCK
214
215
216# def show_sizes(): # tablename):
217# """Show space used by all segments."""
218# db_conn = db_connect()
219# # t = Table(headings=('owner','segment name','segment type','tablespace name', 'bytes',
220# # 'partition name'))
221# from chart.common.csvfile import CSV
222# csv = CSV(Path('sizes.csv'),
223# 'w',
224# headings=(
225# 'owner', 'segment name', 'segment type', 'tablespace name', 'bytes',
226# 'partition name'))
227# for r in db_conn.query('select owner,segment_name,segment_type,tablespace_name,bytes,'
228# 'partition_name from dba_Segments'):
229# # t.append(r)
230# csv.writerow(r)
231
232# # t.write()
233
234
235# def show_table_size(tablename, level):
236# """Show space used by `tablename`.
237
238# Levels:
239
240# 0: Total bytes only.
241# 1: Per SCID show bytes and min/max sensing time and days range.
242# 2: also show frequency, total rows and predicted total rows.
243# 3: show actual count of distinct days and rate per day per scid and total rate per day.
244
245# """
246
247# def scid_from_tablespace(tbl):
248# """Given tablespace name extract the SCID."""
249# # MSG_OPE_MSG3_PRIM
250# _, _, scid, _ = tbl.split('_')
251# # proj, env, scid, ogsid = tbl.split('_')
252# return scid
253
254# db_conn = db_connect()
255# table = TableInfo(tablename)
256
257# logging.debug('DBA segments')
258# t = Table(headings=('SCID', 'Tablespace', 'Segment size (b)'))
259# for size, tblspace in db_conn.query("select bytes, tablespace_name "
260# "from dba_segments "
261# "where segment_type='TABLE SUBPARTITION' "
262# "AND SEGMENT_NAME=:tbl "
263# "AND owner='MSG_OPE'",
264# tbl=table.name):
265# t.append((scid_from_tablespace(tblspace), tblspace, size))
266
267# t.write()
268
269# if level is None:
270# return
271
272# logging.debug('Rows')
273# # disable 'unused imports' here otherwise the jenkins core pylint check fails
274# from chart.sids.sid_msg import OGSID # pylint: disable=F0401
275# from chart.sids.sid_eps import SID_EPS # pylint: disable=F0401
276# t = Table(headings=('SCID', 'OGSID', 'Min', 'Max'))
277# for sid in SID_EPS.all():
278# print(sid)
279# for ogsid in OGSID:
280# # one of the many wonderful features of Oracle 11 is that retrieving
281# # min() and max() of an indexed field in two separate statements is orders of magnitude
282# # faster than retrieving both in single statement.
283# # Another handy feature is that avoiding bind variables speeds it up even more.
284# mintime = db_conn.query(
285# "SELECT min(SENSING_TIME) FROM {tbl} "
286# "WHERE scid='{scid}' AND ogsid='{ogsid}'".format(
287# tbl=table.name, scid=sid.scid, ogsid=ogsid.name)).fetchone()[0]
288# maxtime = db_conn.query(
289# "SELECT max(SENSING_TIME) FROM {tbl} "
290# "WHERE scid='{scid}' AND ogsid='{ogsid}'".format(
291# tbl=table.name, scid=sid.scid, ogsid=ogsid.name)).fetchone()[0]
292# # maxtime = db_conn.query(
293# # 'SELECT max(SENSING_TIME) FROM {tbl} WHERE scid=:scid AND ogsid=:ogsid'.format(
294# # tbl=table.name),
295# # scid=sid.scid, ogsid=ogsid.name).fetchone()[0]
296
297# if mintime is not None:
298# mintime = mintime.date()
299
300# if maxtime is not None:
301# maxtime = maxtime.date()
302
303# t.append((sid.scid, ogsid.name, mintime, maxtime))
304
305# t.write()
306
307
308# def decode_tables_fields(args_tables, args_fields):
309# """Take `tables` and `fields` (from args object, both a list of strings)
310# and return a set of TableInfo and a list of FieldInfo to be displayed.
311# """
312# # decode args.fields
313# fields = []
314# if args_fields is None or len(args_fields) == 0:
315# # show all fields if the user didn't specify any
316# args_fields = ['SENSING_TIME', '*']
317
318# # split fields by commas if the user didn't use spaces like argumentparser expects
319# for f in args_fields:
320# if ',' in f:
321# fields.extend(f.split(','))
322
323# else:
324# fields.append(f)
325
326# fields = [f.strip() for f in fields]
327
328# # expand any wildcards in the specified fields
329# exp_fields = []
330# tables = set()
331
332# if args_tables is not None:
333# tables.add(args_tables)
334
335# for f in fields:
336# if '(' in f:
337# # just pass through any weird looking fields
338# exp_fields.append(f)
339
340# if f == META_FIELD_DELTA:
341# exp_fields.append('SENSING_TIME - LAG(SENSING_TIME) OVER (ORDER BY SENSING_TIME)')
342
343# elif f == META_FIELD_LAT:
344# exp_fields.append("'{meta}'".format(meta=META_FIELD_LAT))
345
346# elif f == META_FIELD_LON:
347# exp_fields.append("'{meta}'".format(meta=META_FIELD_LON))
348
349# else:
350# if f.upper() in ('SENSING_TIME', 'GEN_TIME', 'SCID', 'CFID', 'OGSID'):
351# exp_fields.append(f.upper())
352
353# else:
354# logging.info('Searching for table to use with field {f}'.format(f=f))
355# found = False
356# for field_info in find_param_by_name(f, table=args_tables, multiple=True):
357# tables.add(field_info.table)
358# exp_fields.append(field_info)
359# found = True
360
361# if not found:
362# raise ValueError('Unknown field {f}'.format(f=f))
363
364# return tables, exp_fields
365
366
367def show_ts(table,
368 fields,
369 sid,
370 sensing_start,
371 sensing_stop,
372 sensing_time,
373 calibrated,
374 stat,
375 region,
376 kv,
377 table_format,
378 limit):
379 """Dump values from a timeseries table."""
380 if "'{meta}'".format(meta=META_FIELD_LAT) in fields or\
381 "'{meta}'".format(meta=META_FIELD_LON) in fields:
382
383 from chart.products.fdf.geoloc import Geoloc
384 geo = Geoloc(sid, sensing_start, sensing_stop)
385 time_pos = fields.index('SENSING_TIME')
386
387 cur = ts.select(table=table,
388 fields=fields,
389 sid=sid,
390 sensing_start=sensing_start,
391 sensing_stop=sensing_stop,
392 sensing_time=sensing_time,
393 calibrated=calibrated,
394 stat=stat,
395 region=region)
396
397 if kv:
398 rows = cur.fetchall()
399 t = Table() # headings=['Field'] + [r[0] for r in rows])
400 for row in zip(*([[name_of_thing(f) for f in fields]] + list(rows))):
401 t.append(row)
402
403 t.write()
404
405 elif table_format:
406 t = Table(headings=(f.name for f in fields))
407 for row in cur:
408 t.append(row)
409
410 t.write()
411
412 else:
413 print(', '.join(name_of_thing(f) for f in fields))
414 cc = 0
415 for row in cur:
416 if META_FIELD_LAT in row or META_FIELD_LON in row:
417 row = list(row)
418 pos = geo.lat_lon(row[time_pos])
419 if META_FIELD_LAT in row:
420 row[row.index(META_FIELD_LAT)] = pos[0]
421
422 if META_FIELD_LON in row:
423 row[row.index(META_FIELD_LON)] = pos[1]
424
425 print(', '.join(to_term(i) for i in row))
426 cc += 1
427 if limit and cc >= limit:
428 break
429
430
431def list_tables(tables, target=sys.stdout):
432 """List all timeseries table names."""
433 result = Table(headings=('Name', ))
434 for t in tables:
435 result.append((t.name, ))
436
437 result.write(target)
438
439
440# def test_all_sys():
441 # """List all system table names."""
442 # for x in TableInfo.all_sys():
443 # print(x.name)
444
445
446def test_show_table(table_name):
447 """Display a basic summary of `table_name`."""
448 table_info = TableInfo(table_name)
449 t = Table(column_split=' : ')
450 t.append(('Name', table_info.name))
451 t.append(('Type', table_info.table_type))
452 t.append(('Has cal', table_info.has_cal))
453 t.append(('Storage', table_info.storage))
454 t.append(('Split stats', table_info.stats_storage))
455 t.append(('Derived views', ', '.join(str(d) for d in table_info.derived_views)))
456 t.append(('Stats storage', table_info.stats_storage.name))
457 # t.append(('Browse URL', table_info.browse_url))
458 t.write()
459
460 print('\n')
461
462 t = Table(title='Fields', headings=('Name',))
463 for name in table_info.fields.keys():
464 t.append((name,))
465
466 t.write()
467
468
469def display_field(field_info):
470 """Display info for field `field_name` or table `table_name`."""
471 t = Table(column_split=' : ')
472 t.append(('Name', field_info.name))
473 t.append(('Table', field_info.table.name))
474 t.append(('Description', field_info.description))
475 t.append(('Unit', field_info.unit))
476 t.append(('Data type', field_info.datatype.value))
477 t.append(('Default limits', field_info.limits()))
478 if field_info.table.table_type is TableType.RAW:
479 assy = field_info.table.source_assembly_id
480 if assy in sf.assemblies:
481 assy_info = sf.assemblies[assy]
482 t.append(('Assembly ID', '{id} ({name}, {len} bytes)'.format(
483 id=assy,
484 name=assy_info['name'],
485 len=(assy_info['snack-length']))))
486 t.append(('Position', field_info.position))
487 t.append(('Shift', field_info.shift))
488 t.append(('Mask', field_info.mask))
489
490 try:
491 raw_range = field_info.raw_data_range
492 t.append(('Raw data range', raw_range))
493
494 except NoDataRange:
495 pass
496
497 cals = field_info.cal
498
499 def print_sids(sids):
500 """Helper to show `sids` in a nice format."""
501 return ', '.join(['all' if sid is None else str(sid) for sid in sids])
502
503 if field_info.calibration_name is not None:
504 t.append(('Calibration name', field_info.calibration_name))
505
506 if len(cals) == 0:
507 t.append(('Calibration', 'none'))
508
509 else:
510 t.append(('Calibration files',
511 ' '.join(str(
512 xml_filename(e).name) for e in field_info.table.cals_elems.values())))
513 # for sids, cal in cals.items():
514 # t.append(('Calibration type ({sids})'.format(sids=print_sids(sids)),
515 # cal.type))
516
517 for sid in cals.sids():
518 cal = cals.get_cal(sid)
519 t.append((('Calibration type ({sids})'.format(sids=sid)),
520 cal.calibration_type))
521
522 # for sids, cal in cals.iteritems():
523 # t.append(('Cal data range ({sids})'.format(
524 # sids=print_sids(sids)),
525 # field_info.cal_data_range(sids[0].expand()[0])))
526
527 # if cal.type == 'linear':
528 # for raw, eng in zip(*cal.linear_pairs):
529 # t.append(('', '{raw}: {cal}'.format(raw=float_to_str(raw, SF_LINEAR_RAW),
530 # cal=float_to_str(eng, SF_LINEAR_CAL))))
531
532 # avoid weird circular dependency
533 # from chart.db.manage.manage import get_engine
534 # t.append(('Cal func', sql_cal(get_engine(), field_info)))
535
536 cal_limits = field_info.limits()
537 if cal_limits is not None:
538 # for sid, limits in cal_limits.items():
539 # t.append(('Limits ({sid})'.format(sid=sid), limits))
540 t.append(('Limits', cal_limits))
541
542 t.write()
543
544
545# def calibrate_file(filename, field_info):
546# """Read raw values one per line from `filename`, calibrate for `field_info`,
547# and write the calibrated values to stdout.
548# """
549
550# cal = field_info.cal_sid(None)
551
552# for line in open(filename, 'r'):
553# raw = int(line, 0)
554
555# # print 'raw {raw} cal {cal}'.format(raw=raw, cal=cal.calibrate_value(raw))
556# print(cal.calibrate_value(raw))
557
558
559# def show_data(parser, args):
560# """Print values retrieved from db."""
561
562# if args.sid is None:
563# parser.error('No --sid specified')
564
565# if args.orbit is not None:
566# if args.start is not None or args.stop is not None:
567# parser.error('Cannot specify --start or --stop if --orbit is used')
568
569# args.start, args.stop = args.sid.orbit.get_orbit_times(args.orbit)
570
571# if args.count:
572# if args.table is None:
573# parser.error('No --table specified')
574
575# print(ts.count(table=args.table,
576# sid=args.sid,
577# sensing_start=args.start,
578# sensing_stop=args.stop))
579# parser.exit()
580
581# elif args.rowcount:
582# cur = ts.select_stats(table=args.table,
583# fields=['SENSING_TIME', 'ORBIT', 'ROWCOUNT'],
584# sid=args.sid,
585# sensing_start=args.start,
586# sensing_stop=args.stop,
587# types=['min'])['min']
588# for row in cur:
589# print(row)
590
591# parser.exit()
592
593# tables, fields = decode_tables_fields(args.table, args.field)
594
595# if len(tables) == 0:
596# parser.error('Cannot find table to retrieve requested fields from')
597
598# if len(tables) > 1:
599# parser.error('This tool cannot retrieve from multiple tables ({t})'.format(
600# t=', '.join(t.name for t in tables)))
601
602# if args.time is not None:
603# # user has requested data for a single timestamp
604# from chart.common.xml import xml_to_datetime
605# sensing_time = xml_to_datetime(args.time)
606# args.kv = True
607
608# else:
609# sensing_time = None
610
611# # if args.vali:
612# # args.sid.cfid = CFID.VALI
613
614# show_ts(tables.pop(), fields, args.sid, args.start, args.stop, sensing_time,
615# calibrated=not args.raw, stat=args.stat, region=args.region, kv=args.kv,
616# table_format=args.format,
617# limit=args.limit)
618
619
620# def show_list(title, things, description, csv, table):
621# """Display a list of named things, optionally with description, as either
622# a simple list, a CSV file or a table."""
623# if csv and table:
624# raise ValueError('Cannot specify both table and CSV output')
625
626# if csv:
627# from chart.common.csvfile import CSV
628# if description:
629# # h = CSV(headings=(title, 'Description'))
630# h = CSV(Path('test.csv'), 'w')
631# for thing in things:
632# h.writerow((thing.name, thing.description))
633# # from datetime import datetime
634# # h.writerow((datetime.utcnow(), 'hello , good'))
635
636# else:
637# csv = CSV(headings=(title,))
638# for thing in things:
639# csv.writerow((thing.name,))
640
641# elif table:
642# raise NotImplementedError()
643
644# else:
645# if description:
646# for thing in things:
647# print('{n}, {d}'.format(n=thing.name, d=thing.description))
648
649# else:
650# for thing in things:
651# print(thing.name,)
652
653
654# def show_definition(parser, args):
655# """Print a table definition."""
656# if args.all_ts:
657# test_all_ts(assembly_id=args.assembly, with_stats=args.has_stats)
658# parser.exit()
659
660# if args.all_sys:
661# test_all_sys()
662# parser.exit()
663
664# if args.all_raw:
665# show_list('Raw table', TableInfo.all_raw(), args.description, args.csv, args.pretty_print)
666# parser.exit()
667
668# if args.all_derived:
669# for x in TableInfo.all_ts():
670# if x.table_type is TableType.DERIVED:
671# print(x.name)
672
673# parser.exit()
674
675# if args.all_views:
676# for x in TableInfo.all_ts():
677# if x.table_type is TableType.VIEW:
678# print(x.name)
679
680# parser.exit()
681
682# if args.all_stats:
683# for i in TableInfo.all_ts():
684# if args.shadow:
685# for j in i.stats_tables_with_shadow():
686# print(j['tablename'])
687
688# else:
689# for j in i.stats_tables():
690# print(j['tablename'])
691
692# parser.exit()
693
694# if args.field:
695# if args.table is not None:
696# args.table = args.table.upper()
697
698# if len(args.field) > 1:
699# parser.error('Only 1 field definition at a time')
700
701# field_info = find_param_by_name(args.field[0].upper(),
702# args.table,
703# True,
704# True,
705# False)
706
707# if field_info is None:
708# parser.error('Cannot find field {f}'.format(f=args.field))
709
710# if args.calibrate is not None:
711# if args.sid is None:
712# parser.error('--sid not specified')
713
714# cal = field_info.cal_sid(args.sid)
715# print(cal.calibrate_value(args.calibrate))
716# parser.exit()
717
718# # if args.calibrate_file is None:
719# display_field(field_info)
720# parser.exit()
721
722# if args.table:
723# test_show_table(args.table)
724# parser.exit()
725
726# # if args.calibrate_file:
727# # if args.field is None:
728# # parser.error('Must specify --field to use')
729
730# # field_info = find_param_by_name(args.field,
731# # None, # table
732# # True, # search_ts
733# # False, # search_sys
734# # False) # multiple
735
736# # calibrate_file(args.calibrate_file, field_info)
737
738# if args.all_size:
739# show_sizes()
740# parser.exit()
741
742# if args.size is not None:
743# show_table_size(args.size, args.verbose)
744# parser.exit()
745
746# parser.error('No actions specified')
747
748
749def show_ts_plain(sid, table, field, start, stop):
750 """Simple TS display of sensing_time plus an one field from an AP table."""
751 # use our CSV module instead for neatness
752 for row in ts.select(sid=sid, table=table, fields=('SENSING_TIME', field, ), sensing_start=start, sensing_stop=stop):
753 # for row in ts.select(sid=sid, table=table, fields=(SensingTime, field, ), sensing_start=start, sensing_stop=stop):
754 print(datetime_to_xml(row[0]), ', ', row[1])
755
756
757def show_ts_stat(sid, table, field, start, stop, region, stat):
758 """Simple TS display of sensing_time plus an one field from an AP table."""
759 # use our CSV module instead for neatness
760 for row in ts.select(sid=sid, table=table, fields=(SensingTime, RowcountFieldInfo(field=field), field, ), sensing_start=start, sensing_stop=stop,
761 region=region, stat=stat):
762 print(', '.join((datetime_to_xml(row[0]), str(row[1]), str(row[2]))))
763
764
765def main():
766 """Entry point."""
767 parser = ArgumentParser()
768 parser.add_argument('--db',
769 metavar='CONN',
770 help='Use database connection CONN')
771 # parser.add_argument('--count', '-c',
772 # action='store_true',
773 # help='Show rowcount')
774 # parser.add_argument('--rowcount',
775 # action='store_true',
776 # help='Show stats table rowcounts')
777 parser.add_argument('--table', '--tablename', '--table-name', '-t',
778 # type=ArgumentParser.table,
779 help='Timeseries table name to query for')
780 parser.add_argument('--sid', '-s',
781 type=ArgumentParser.sid,
782 nargs='+',
783 help='Source ID to query for')
784 parser.add_argument('--start', '--start-time',
785 type=ArgumentParser.start_time,
786 help='Start of time range')
787 parser.add_argument('--stop', '--stop-time',
788 type=ArgumentParser.stop_time,
789 help='Stop of time range')
790 # parser.add_argument('--time',
791 # help='Locate single line at exact time')
792 # parser.add_argument('--limit',
793 # type=int,
794 # help='Limit number of rows returned')
795 parser.add_argument('--field', '--fieldname', '--field-name', '-f',
796 nargs='+',
797 help='Display fields')
798 parser.add_argument('--raw',
799 action='store_true',
800 help='Display uncalibrated values OR display raw tables')
801 # parser.add_argument('--kv',
802 # action='store_true',
803 # help='Rotate displayed table to show field names as rows')
804 # parser.add_argument('--format',
805 # action='store_true',
806 # help='Output as a formatted table')
807 # parser.add_argument('--orbit', '-o',
808 # type=int,
809 # help='Orbit number')
810 # parser.add_argument('--size',
811 # metavar='TABLE',
812 # help='Show size of TABLE')
813 # parser.add_argument('--all-size',
814 # action='store_true',
815 # help='Show size of all database things')
816 # parser.add_argument('--verbose', '-v',
817 # action='count',
818 # help='Increase verbosity in --all-size option')
819 parser.add_argument('--stat',
820 help='Stat[s] to retrieve')
821 parser.add_argument('--region',
822 help='Stats region to use if not default')
823 # parser.add_argument('--all-ts',
824 # action='store_true',
825 # help='Show all timeseries table names')
826 # parser.add_argument('--raw',
827 # action='store_true',
828 # help='Show all timeseries table names')
829 parser.add_argument('--sys',
830 action='store_true',
831 help='Show all sys table names')
832 parser.add_argument('--derived',
833 action='store_true',
834 help='Show all derived table and view names')
835 # parser.add_argument('--all-stats',
836 # action='store_true',
837 # help='Show all stats table names')
838 # parser.add_argument('--all-views',
839 # action='store_true',
840 # help='Show all view names')
841 parser.add_argument('--has-stats',
842 action='store_true',
843 help='Filter for only tables with stats')
844 parser.add_argument('--assembly',
845 type=int,
846 metavar='SFID',
847 help='Show tables with source-sf00 SFID')
848 parser.add_argument('--find-interesting-params', '--find-interesting-parameters',
849 action='store_true',
850 help='Scan a table for parameters which show a lot of variation')
851 parser.add_argument('--ranks',
852 type=int,
853 default=DEFAULT_INTERESTING_RANKS,
854 help='If showing interesting parameters set the number of display')
855 # parser.add_argument('--vali',
856 # action='store_true',
857 # help='Retrieve CFID VALI data')
858 # parser.add_argument('--shadow',
859 # action='store_true',
860 # help='Include shadow stats views in --stats output')
861 # parser.add_argument('--calibrate',
862 # metavar='RAW',
863 # help='Calibrate RAW for field and SID')
864 # parser.add_argument('--description', '--desc',
865 # action='store_true',
866 # help='If one of the --all-xxx options is used output table descriptions '
867 # 'too')
868 # parser.add_argument('--csv',
869 # action='store_true',
870 # help='CSV output format')
871 # parser.add_argument('--pretty-print', '--pp',
872 # action='store_true',
873 # help='Pretty print list as a formatted table')
874 # parser.add_argument('--purge',
875 # action='store_true',
876 # help='Delete data from tables')
877 parser.add_argument('--list-tables',
878 action='store_true',
879 help='List tables matching criteria (default is all timeseries')
880 parser.add_argument('--list-params',
881 action='store_true',
882 help='List all matching parameters')
883 parser.add_argument('--static-type',
884 action='store_true',
885 help='Only include parameters with static (i.e. non-deduced) data')
886 parser.add_argument('--deduced',
887 action='store_true',
888 help='Only include parameters with dynamic (deduced) datatype')
889 parser.add_argument('--scalar',
890 action='store_true',
891 help='Only include parameters which are no arrays')
892 args = parser.parse_args()
893
894 # Let the user switch database, for retrieval functions
895 if args.db:
896 settings.set_db_name(args.db)
897
898 # Special and dangerous function to delete from database
899 # if args.purge:
900 # if args.sid is None or args.start is None or args.stop is None or args.table is None:
901 # parser.error('Specify sid, start, stop and table')
902
903 # purge(sids=args.sid,
904 # start=args.start,
905 # stop=args.stop,
906 # tables=list(TableInfo.all(name=args.table)))
907 # parser.exit()
908
909 if args.list_tables:
910 tables = TableInfo.all()
911 list_tables(tables)
912 parser.exit()
913
914 if args.find_interesting_params:
915 if args.table is None: # or len(args.table) != 1:
916 parser.error('Pass one --table option')
917
918 if args.sid is None or len(args.sid) != 1:
919 parser.error('No --sid selected')
920
921 find_interesting_params(TableInfo(args.table), args.sid[0], args.start, args.stop, args.ranks)
922 parser.exit()
923
924 # Timeseries retrieval
925 if args.start or args.stop:
926 # if args.all_ts or args.all_sys or args.all_raw or args.all_derived or args.all_views or\
927 # args.all_stats:
928 # parser.error('Cannot use --start or --stop when scanning through tables')
929
930 # else:
931 # show_data(parser, args)
932
933 # print(args.sid, args.sid[0].sid_num)
934 sid = args.sid[0]
935 if sid is None:
936 parser.error('SID {s} not found'.format(s=args.sid[0]))
937
938 # sid = SID(args.sid[0])
939 field_info = find_param_by_name(args.field[0])
940 if field_info is None:
941 parser.error('Field {f} not found'.format(f=args.field[0]))
942
943 if args.region and args.stat:
944 region = sampling_from_name(args.region)
945 show_ts_stat(sid=sid, table=field_info.table, field=field_info, start=args.start, stop=args.stop,
946 region=region, stat=args.stat)
947
948 else:
949 show_ts_plain(sid=sid, table=field_info.table, field=field_info, start=args.start, stop=args.stop)
950
951 parser.exit()
952
953 # Quick hack to display single fields.
954 # If the user passes a wildcard or multiple fields we should spot this and give
955 # the option to show a summary table or to list each full field definition
956 if args.field:
957 field_info = find_param_by_name(args.field[0], sid=args.sid[0])
958 display_field(field_info)
959 parser.exit()
960
961 if args.list_params:
962 list_params(TableInfo(args.table), args.scalar)
963 parser.exit()
964
965 show_definition(parser, args)
966
967
968if __name__ == '__main__':
969 main()