1#!/usr/bin/env python3
  2
  3"""Handle inserting, updating and retrieving files from the report archive."""
  4
  5import os
  6import zipfile
  7import logging
  8from datetime import datetime
  9import shutil
 10import tempfile
 11from urllib.parse import urlparse
 12
 13from django.template import Template
 14from django.template import Context
 15
 16from chart.common.ssh import ssh_open
 17from chart.common.path import Path
 18from chart.db.connection import db_connect
 19from chart.db.func import DONT_CHANGE
 20from chart.project import settings
 21from chart.common.util import ensure_dir_exists
 22from chart.reports.report_template import ReportTemplate
 23from chart.backend.activity import Activity
 24from chart.common.traits import name_of_thing
 25from chart.common.traits import is_listlike
 26from chart.project import SID
 27from chart.reports.manifest import Manifest
 28import chart.alg.settings
 29from chart.db.settings import DatabaseEngine
 30
 31db_conn = db_connect('REPORTS')
 32
 33logger = logging.getLogger()
 34
 35# fields to retreive to count number of rows
 36COUNT = ('count(*)',)
 37
 38# for report templates that don't specify a <prefix> (used for archive and pdf names)
 39# or <pdf-filename> we use this macro for naming
 40# There should also be the chance for a theme level default
 41DEFAULT_PDF_FILENAME = '{{sat_acronym}}_{{year}}_WK{{week}}_{{activity}}.pdf'
 42
 43# Allow report archives to be specified in format ssh://connection/path
 44SCHEME_SSH = 'ssh'
 45
 46
 47class NoSuchReport(Exception):
 48    """Cannot find report by ID or by searching."""
 49
 50    def __init__(self, report):
 51        super(NoSuchReport, self).__init__()
 52        self.report = report
 53
 54    def __str__(self):
 55        return 'Cannot find report {report}'.format(report=self.report)
 56
 57
 58class MissingReport(Exception):
 59    """A report is listed in the REPORTs table bu the archive does not exist."""
 60
 61    def __init__(self, path):
 62        super(MissingReport, self).__init__()
 63        self.path = path
 64
 65    def __str__(self):
 66        return 'Missing archive file {path}'.format(
 67            path=self.path.relative_to(settings.ARCHIVE_DIR))
 68
 69
 70def get_report_name(activity, sid, sensing_start):
 71    """Return the name only of a report archive file (no directory, no file extension)."""
 72    return '{activity}_{sid}_{date}'.format(
 73        activity=name_of_thing(activity),
 74        sid=SID.get_reportname_part(sid),
 75        date=sensing_start.strftime('%Y%m%d'))
 76
 77
 78def get_report_relpath(activity=None, sid=None, sensing_start=None, report_name=None):
 79    """Compute relative path to report archive.
 80
 81    Given some report attributes, compute the relative path (dir and filename) inside
 82    settings.ARCHIVE_DIR where the report will be stored.
 83    Inputs must be either `report_name`, or all or `activity`, `sid` and `sensing_start`."""
 84    # sid_part = SID.get_reportname_part(sid)
 85    # logger.debug('sid ' + str(sid))
 86    # logger.debug('building path from sid_path ' + str(sid_part) + ' activity ' +
 87                 # activity.name + ' sensinig_start ' + str(sensing_start))
 88    if report_name is not None:
 89        parts = decode_report_name(report_name)
 90        activity = parts['activity']
 91        sid = parts['sid']
 92        sensing_start = parts['sensing_start']
 93
 94    return Path(activity.name,
 95                SID.get_reportname_part(sid),
 96                str(sensing_start.year),
 97                get_report_name(activity, sid, sensing_start) + '.zip')
 98
 99
100def get_report_abspath(report_name):
101    """For a given report name return a full absolute path to the archive file."""
102    # ADCS_TRENDING_REPORT_M02_20150202)
103    return settings.ARCHIVE_DIR.joinpath(get_report_relpath(report_name=report_name))
104
105
106def create_report(activity, sid, sensing_start, sensing_stop, export=None, gen_time=None):
107    """Create a new report and write a row into `reports` table.
108
109    Return tuple of the report id and a Zipfile object ready to insert files into,
110
111    If `export` is None the report is placed as normal inside settings.ARCHIVE_DIR.
112    Otherwise `export` gives a directory to store the new archive in.
113    (this is used by the report_ingester tool if it needs to scp the archive somewhere else)
114
115    If a similar report already exists then we:
116
117    - Update the reports take for the new report.
118    - Delete any existing change records.
119    - Delete the existing .zip file and return a fresh one.
120    """
121
122    # compute filename with no directory part
123    # filename = '{activity}_{sid}_{date}.zip'.format(
124        # activity=activity,
125        # sid=sid,
126        # date=sensing_start.strftime('%Y%m%d'))
127
128    # directory and filename, relative to archive dir
129    # rel_filename = Path(activity, sid, str(sensing_start.year), filename)
130
131    # filename = rel_filename.name
132
133    # Update REPORTS table entry
134    update_reports_db(activity, sid, sensing_start, sensing_stop, gen_time)
135    logger.info('Created report {activity} {sid} {start} {stop}'.format(
136            activity=activity.name,
137            sid=sid,
138            start=sensing_start,
139            stop=sensing_stop))
140
141    rel_path = get_report_relpath(activity, sid, sensing_start)
142    if export is None:
143        # store the archive in the settings archive location
144        abs_path = settings.ARCHIVE_DIR.child(rel_path)
145        ensure_dir_exists(abs_path.parent, umask=0o02)  # group writeable
146
147    else:
148        # otherwise we put it somewhere else
149        abs_path = export.child(rel_path.name)
150
151    logger.info('Creating archive {filename}'.format(filename=abs_path))
152    # will re-create as an empty archive if `abs_filename` already exists
153    res = zipfile.ZipFile(str(abs_path),
154                                     mode='w',
155                                     compression=zipfile.ZIP_DEFLATED)
156
157    if export is not None:
158        try:
159            os.chmod(str(abs_path), 0o664)  # group writeable
160        except OSError:
161            # this might happen if someone has manually moved archives around
162            logger.error('Ignoring chmod error')
163
164    return res
165
166
167def update_reports_db(activity, sid, sensing_start, sensing_stop, gen_time=None, only_create=False):
168    """Create or update REPORTS table entry for a report.
169
170    If `only_create` is set we will create missing entries but not create new ones."""
171    # see if the database already has an entry like this one
172
173    if settings.DATABASE_PROJECT_ID is not None:
174        project_where = ' AND project={project}'.format(project=settings.DATABASE_PROJECT_ID)
175        project_columns = ',project'
176        project_values = ',{pid}'.format(pid=settings.DATABASE_PROJECT_ID)
177
178    else:
179        project_where = ''
180        project_columns = ''
181        project_values = ''
182
183    existing = db_conn.query(
184        'SELECT count(*) '
185        'FROM reports '
186        'WHERE activity=:activity '
187        'AND {sid} '
188        'AND sensing_start=:sensing_start{project}'.format(
189            sid=SID.sql_sys_where('REPORTS', sid),
190            project=project_where),
191        activity=activity.name,
192        sensing_start=sensing_start).fetchone()[0]
193
194    rel_path = get_report_relpath(activity, sid, sensing_start)
195
196    if existing == 0:
197        # no existing entry, so create a fresh one
198        sid_fields, sid_binds = SID.sql_sys_insert('REPORTS')
199        db_conn.query(
200            'INSERT INTO reports '
201            '  (activity, sensing_start, sensing_stop, gen_time, archive_filename{fields}'
202            '{project_columns}) '
203            'VALUES '
204            '  (:activity, :sensing_start, :sensing_stop, :gen_time, '
205            ' :archive_filename{binds}{project_values})'.format(
206                fields=sid_fields,
207                project_columns=project_columns,
208                binds=sid_binds,
209                project_values=project_values),
210            activity=activity.name,
211            sensing_start=sensing_start,
212            sensing_stop=sensing_stop,
213            gen_time=gen_time if gen_time else datetime.utcnow(),
214            archive_filename=str(rel_path),
215            **SID.bind_sys_insert('REPORTS', sid))
216
217    elif not only_create:
218        # update the existing entry
219        logger.debug('Updating existing report {id}'.format(id=rel_path))
220        existing = db_conn.query(
221        'UPDATE reports '
222        'SET gen_time=:gen_time '
223        'WHERE activity=:activity '
224        'AND {sid} '
225        'AND sensing_start=:sensing_start{project}'.format(
226            sid=SID.sql_sys_where('REPORTS', sid),
227            project=project_where),
228        activity=activity.name,
229        sensing_start=sensing_start,
230        gen_time=gen_time if gen_time else datetime.utcnow())
231
232    db_conn.commit()
233
234
235# @cached
236def retrieve_archive(report_name):
237    """Return a zipfile object containing `report_name`, either from local file    or over ssh."""
238    # if report_name == retrieve_archive.cache_filename and\
239
240    if isinstance(settings.ARCHIVE_DIR, str):
241        # should cache here
242        match = urlparse(settings.ARCHIVE_DIR)
243        if match.scheme == SCHEME_SSH:
244            res = ssh_open(match.netloc,
245                            '{base}/{rel}'.format(
246                                base=match.path,
247                                rel=str(get_report_relpath(report_name=report_name))))
248            archive = zipfile.ZipFile(res)
249
250        else:
251            raise ValueError('Unknown scheme {s}'.format(s=match.scheme))
252
253    else:
254        # convert the single archive filename to a full path
255        # this could be optimised quite easily
256        zipfile_path = settings.ARCHIVE_DIR.joinpath(
257            get_report_relpath(report_name=report_name))
258
259        if not zipfile_path.exists():
260            logger.error('Cannot find {p}'.format(p=zipfile_path))
261            raise MissingReport(zipfile_path)
262
263        mtime = zipfile_path.stat().st_mtime
264        if report_name == retrieve_archive.cache_name and\
265           retrieve_archive.cache_mtime == mtime:
266            return retrieve_archive.cache_zipfile
267
268        archive = zipfile.ZipFile(str(zipfile_path))
269
270        retrieve_archive.cache_name = report_name
271        retrieve_archive.cache_mtime = mtime
272        retrieve_archive.cache_zipfile = archive
273
274    return archive
275
276retrieve_archive.cache_name = None
277retrieve_archive.cache_mtime = None
278retrieve_archive.cache_zipfile = None
279
280
281def retrieve_file(report_name,
282                  filename,
283                  revision=None):
284    """Read `filename` as a blob from `report_name`.
285
286    If `revision` is not specified the current version of the file."""
287    archive = retrieve_archive(report_name)
288    # logger.debug('retrieve file from {a}'.format(a=settings.ARCHIVE_DIR))
289    if revision == 0 or revision is None:
290        try:
291            return archive.read(str(filename))
292            # throw a test error to simulate the weird TCE zipfile "Bad number" error
293            # raise zipfile.BadZipfile('This is a test message')
294        # except KeyError:
295            # raise IOError('Cannot find archive item {filename}'.format(filename=filename))
296
297        except zipfile.BadZipfile as bzf:
298            raise zipfile.BadZipfile('{e} opening {m} from {f}'.format(
299                e=bzf, m=filename, f=settings.ARCHIVE_DIR.joinpath(
300                    get_report_relpath(report_name=report_name))))
301
302    else:
303        return archive.read('{filename}.{rev}'.format(filename=filename,
304                                                      rev=revision))
305
306
307def update_file(report_name, filename, username, content):
308    """Add new revision of `filename` (usually report.html) to archive `storage_filename`
309    consisting of `content` (unicode).
310    The new content becomes report.html and the previous content is given an increasing index
311    number report.html.1, report.html.2 etc."""
312
313    # open zip file with report contents
314    zipfile_relpath = get_report_relpath(report_name=report_name)
315    zipfile_path = settings.ARCHIVE_DIR.joinpath(zipfile_relpath)
316    orig = zipfile.ZipFile(str(zipfile_path), 'a')
317
318    # find out what is the max revision
319    current_revision = -1
320    for item in orig.infolist():
321        if item.filename.startswith(str(filename)) and item.filename != str(filename):
322            current_revision += 1
323
324    # copy all content from original zip file to a new one (except report.html)
325    temp = tempfile.NamedTemporaryFile()
326    # by default zip files are not compressed
327    with zipfile.ZipFile(temp.name, 'w', zipfile.ZIP_DEFLATED) as zipwrite:
328        # zip files cannot be changed so we make a new one
329        # with all the original content plus new file
330        for item in orig.infolist():
331            if item.filename != str(filename):
332                data = orig.read(item.filename)
333                if item.filename == str(chart.alg.settings.MANIFEST_FILENAME):
334                    # modify manifest with new revision info
335                    manifest = Manifest(buff=data)
336                    manifest.add_revision(username)
337                    data = manifest.tostring()
338
339                    # get the revision count to later update the reports table
340                    revision_count = manifest.revision_count()
341
342                zipwrite.writestr(item.filename, data)
343
344        # we must rename the old 'report.html' to its new version number
345        new_filename = '{file}.{rev}'.format(file=filename, rev=current_revision + 1)
346        content_old = orig.read(str(filename))
347        zipwrite.writestr(new_filename, content_old)
348
349        # also copy the latest report modification
350        zipwrite.writestr(str(filename), content.encode('UTF-8'))
351
352    # copy zip file to the original report name
353    shutil.copyfile(temp.name, str(zipfile_path))
354
355    # now update the report revisions table
356    parts = decode_report_name(report_name)
357
358    # update
359    db_conn.query(
360        'UPDATE reports '
361        'SET last_revision_time=:now, revision_count=:count '
362        'WHERE activity=:activity '
363        'AND {sid} '
364        'AND sensing_start=:sensing_start'.format(
365            sid=SID.sql_sys_where('REPORTS', parts['sid'])),
366            activity=parts['activity'].name,
367            sensing_start=parts['sensing_start'],
368            now=datetime.utcnow(), count=revision_count)
369
370    db_conn.commit()
371
372
373def update_manifest(report_name, content):
374    """Replace manifest with `content`.
375
376    `content` can be the actual rendered buffer or a Manifest object.
377    """
378    # Accept a Manifest object instead of buffer
379    if isinstance(content, Manifest):
380        content = content.tostring()
381
382    # open original zip file with report contents
383    zipfile_relpath = get_report_relpath(report_name=report_name)
384    zipfile_path = settings.ARCHIVE_DIR.joinpath(zipfile_relpath)
385    orig = zipfile.ZipFile(str(zipfile_path), 'a')
386
387    # we copy everything from the original file into the new one, apart from the file to be replaced
388    temp = tempfile.NamedTemporaryFile()
389
390    with zipfile.ZipFile(temp.name, 'w') as zipwrite:
391        for item in orig.infolist():
392            if item.filename == str(chart.alg.settings.MANIFEST_FILENAME):
393                data = content
394
395            else:
396                data = orig.read(item.filename)
397
398            zipwrite.writestr(item.filename, data)
399
400    # Replace the original with the new archive
401    shutil.copyfile(temp.name, zipfile_path)
402
403
404def make_pdf_filename(activity_or_template, sid, start_time):
405    """Generate a PDF filename for a report with the given settings.
406
407    If the report template has a <pdf-filename> section, this is used.
408    Otherwise we use something like 'MA_2012_W37_IASI_WEEKLY_REPORT.pdf'.
409    """
410
411    if isinstance(activity_or_template, str):
412        activity = Activity(activity_or_template)
413        template = ReportTemplate(activity.template)
414        activity_name = activity.name
415
416    elif isinstance(activity_or_template, Activity):
417        template = ReportTemplate(activity_or_template.template)
418        activity_name = activity_or_template.name
419
420    elif isinstance(activity_or_template, ReportTemplate):
421        template = activity_or_template
422        activity_name = template.name
423
424    # allow the template to specify a <pdf-filename> pattern
425    if template.pdf_filename is not None:
426        pdf_macro = template.pdf_filename
427
428    # if no specific PDF filename pattern, re-use the generic <prefix>
429    elif template.prefix is not None:
430        pdf_macro = template.prefix + '.pdf'
431
432    # otherwise make up a default value
433    else:
434        pdf_macro = DEFAULT_PDF_FILENAME
435
436    year, week = start_time.isocalendar()[0:2]
437
438    return Template(pdf_macro).render(Context({
439                'sat_acronym': getattr(sid, 'name', ''),
440                'year': year,
441                'syear': year % 100,
442                'week': '{week:02}'.format(week=week),
443                'activity': activity_name}))
444
445
446def find_reports(fields,
447                 sid=None,
448                 start_time_eq=None,
449                 start_time_ge=None,
450                 start_time_lt=None,
451                 activity=None,
452                 archive_filename=None,
453                 ordering='SENSING_START',
454                limit=None):
455    """Return a cursor which yields report db entries.
456
457    Args:
458        `fields` (list of str): DB fields to retrieve
459        `start_time` (datetime): Search for reports where sensing_start is greater than or equal
460            `start_time`
461        `stop_time` (datetime): Search for reports where sensing_start is less than `stop_time`
462        `activity` (str): Search for reports created by certain activity.
463
464    Fields can include: activity, scid, sensing_start, sensing_stop, archive_filename,
465    gen_time.
466
467    """
468
469    clauses = []
470    bindvars = {}
471
472    if sid is not None:
473        clauses.append(SID.sql_sys_where('REPORTS', sid))
474
475    if start_time_eq is not None:
476        clauses.append('sensing_start=:start_eq')
477        bindvars['start_eq'] = start_time_eq
478
479    if start_time_ge is not None:
480        clauses.append('sensing_start>=:start_ge')
481        bindvars['start_ge'] = start_time_ge
482
483    if start_time_lt is not None:
484        clauses.append('sensing_start<:stop_lt')
485        bindvars['stop_lt'] = start_time_lt
486
487    if archive_filename is not None:
488        clauses.append('archive_filename like :name')
489        bindvars['name'] = '%{name}'.format(name=archive_filename)
490
491    if activity is not None:
492        # would be nicer to use unix wildcards here but
493        # oracle style is simpler. Don't remove the Oracle wildcard option
494        # as users are using it. Also '%' is easier to type in bash, since you need
495        # to escape a '*' character
496        if isinstance(activity, Activity):
497            clauses.append('activity LIKE :activity')
498            bindvars['activity'] = activity.name
499
500        else:
501            if '%' in activity:
502                clauses.append('activity LIKE :activity')
503
504            else:
505                Activity(activity)
506                clauses.append('activity=:activity')
507
508            bindvars['activity'] = name_of_thing(activity).upper()
509
510    if ordering is not None:
511        if not is_listlike(ordering):
512            ordering = (ordering, )
513
514        order_by = ' ORDER BY {orders}'.format(orders=','.join(ordering))
515
516    else:
517        if limit is None:
518            order_by = ''  # ?
519
520        else:
521            order_by = ' ORDER BY sensing_start'
522
523    if len(clauses) == 0:
524        clauses = ('1=1', )
525
526    if limit is None:
527        return db_conn.query('SELECT {fields} FROM REPORTS WHERE {where} {order}'.format(
528                fields=','.join(fields),
529                where=' AND '.join(clauses),
530                order=order_by), **bindvars)
531
532    if db_conn.engine is DatabaseEngine.ORACLE:
533        bindvars['limit'] = limit
534        # oracle doesn't have 'LIMIT'
535        # mysql doesn't have ROW_NUMBER
536        # postgres accepts both
537        return db_conn.query('SELECT {fields} FROM ('
538                             'SELECT {fields},ROW_NUMBER() OVER ({order}) rn '
539                             'FROM REPORTS '
540                             'WHERE {where}{order}) q '
541                             'WHERE rn<=:limit'.format(
542                fields=','.join(fields),
543                where=' AND '.join(clauses),
544                order=order_by),
545                        **bindvars)
546
547    else:
548        return db_conn.query('SELECT {fields} '
549                             'FROM REPORTS '
550                             'WHERE {where}{order} LIMIT :limit'.format(
551                fields=','.join(fields),
552                where=' AND '.join(clauses),
553                order=order_by),
554                        **bindvars)
555
556
557def delete_report(report_name=None, archive_filename=None, commit=True):
558    """Delete archive file and entry from REPORTS table."""
559    assert report_name is None
560    db_conn.query('DELETE FROM REPORTS WHERE ARCHIVE_FILENAME=:archive_filename',
561        archive_filename=archive_filename)
562    if commit:
563        db_conn.commit()
564
565
566def decode_report_name(report_name):
567    """Decode the given zip file name into a dictionary.
568    Returns SID, activity, unit, week number.
569
570    Args:
571        `zip_file_name` (str): filename of the report archive
572    """
573    details = report_name.split('_')
574    sid_fragment = details[-2]
575    activity = '_'.join(details[0:-2])
576    unit = details[0]
577    sdate = details[-1]
578    year = int(sdate[:4])
579    month = int(sdate[4:6])
580    day = int(sdate[6:8])
581    date = datetime(year, month, day)
582    _, week, _ = date.isocalendar()
583
584    details = {'sid': SID.from_reportname_part(sid_fragment),
585               'activity': Activity(activity),
586               'unit': unit,
587               'week': week,
588               'sensing_start': date}
589
590    return details
591
592
593def get_report_url(activity, sid, sensing_start, external=False):
594    """Compute the URL which will be used to find a report.
595
596    Uses the webserver identified by settings.CHART_WEB.
597    Does not use Django reverse because we may be running on a different system to the one
598    we want links to."""
599    # TBD: get rid of this string, at least move to a module constant
600    if external:
601        base = settings.CHART_EXTERNAL_WEB
602
603    else:
604        base = settings.CHART_WEB
605
606    return '{base}/reports/report/{name}'.format(
607        base=base,
608        name=get_report_name(activity, sid, sensing_start))
609
610
611def update_report(
612        sid=None,
613        start_time_eq=None,
614        activity=None,
615        archive_filename=None,
616        set_rev_count=DONT_CHANGE,
617        set_last_revision_time=DONT_CHANGE,
618        set_last_publish_time=DONT_CHANGE,
619        commit=True):
620    """Update an entry in REPORTS table.
621
622    Args:
623        `archive_filename` (str): Filter for filename field
624        `set_rev_count` (int): number of revisions in report
625        `set_last_revision_time` (datetime): Latest revision time
626        `set_last_publish_time` (datetime): Latest publish time
627
628    Returns:
629        Number of rows changed.
630    """
631    clauses = []
632    updates = []
633    bindvars = {}
634
635    # search clauses
636    if archive_filename is not None:
637        clauses.append('ARCHIVE_FILENAME=:archive_filename')
638        bindvars['archive_filename'] = archive_filename
639
640    if sid is not None:
641        clauses.append(SID.sql_sys_where('REPORTS', sid))
642
643    if start_time_eq is not None:
644        clauses.append('sensing_start=:start_eq')
645        bindvars['start_eq'] = start_time_eq
646
647    if archive_filename is not None:
648        clauses.append('archive_filename like :name')
649        bindvars['name'] = '%{name}'.format(name=archive_filename)
650
651    if activity is not None:
652        clauses.append('activity=:activity')
653        bindvars['activity'] = activity.name
654
655    # change clauses
656    if set_rev_count is not DONT_CHANGE:
657        updates.append('REVISION_COUNT=:set_rev_count')
658        bindvars['set_rev_count'] = set_rev_count
659
660    if set_last_revision_time is not DONT_CHANGE:
661        updates.append('LAST_REVISION_TIME=:set_last_revision_time')
662        bindvars['set_last_revision_time'] = set_last_revision_time
663
664    if set_last_publish_time is not DONT_CHANGE:
665        updates.append('LAST_PUBLISH_TIME=:set_last_publish_time')
666        bindvars['set_last_publish_time'] = set_last_publish_time
667
668    # make actual update query
669    cur = db_conn.query('UPDATE REPORTS SET {updates} WHERE {where}'.format(
670            updates=','.join(updates), where=' AND '.join(clauses)),
671                  **bindvars)
672
673    if commit:
674        db_conn.commit()
675
676    return cur.rowcount
677
678
679def add_report(activity,
680                sid,
681                sensing_start,
682                sensing_stop,
683                archive_filename,
684                gen_time=None,
685                username=None,
686                last_revision_time=None,
687                revision_count=None,
688                last_publish_time=None,
689                commit=True):
690    """Create a new entry in REPORTS table.
691    Args:
692        `id` (datetime):
693        `activity` (str):
694        `scid` (str):
695        `sensing_start` (datetime):
696        `sensing_stop` (datetime):
697        `archive_filename` (str):
698        `gen_time` (datetime):
699        `username` (str):
700        `last_revision_time` (datetime):
701        `revision_count` (int):
702        `last_publish_time` (datetime):
703    """
704
705    # we create an entry in the database for these archives
706    db_conn.query('INSERT INTO reports (ACTIVITY, SCID, SENSING_START, SENSING_STOP, '
707                  'ARCHIVE_FILENAME, GEN_TIME, USERNAME, LAST_REVISION_TIME, '
708                  'REVISION_COUNT, LAST_PUBLISH_TIME) VALUES (:activity, :scid, '
709                  ':sensing_start, :sensing_stop, :archive_filename, :gen_time, '
710                  ':username, :last_revision_time, :revision_count, :last_publish_time)',
711                    activity=activity,
712                    scid=sid.scid,
713                    sensing_start=sensing_start,
714                    sensing_stop=sensing_stop,
715                    archive_filename=archive_filename,
716                    gen_time=gen_time,
717                    username=username,
718                    last_revision_time=last_revision_time,
719                    revision_count=revision_count,
720                    last_publish_time=last_publish_time)
721
722    if commit:
723        db_conn.commit()
724
725
726def count_reports(sid=None,
727                  start_time_eq=None,
728                  start_time_ge=None,
729                  start_time_lt=None,
730                  activity=None,
731                  archive_filename=None):
732    """Return number of reports matching spec."""
733    return find_reports(fields=COUNT,
734                        sid=sid,
735                        start_time_eq=start_time_eq,
736                        start_time_ge=start_time_ge,
737                        start_time_lt=start_time_lt,
738                        activity=activity,
739                        archive_filename=archive_filename,
740                        ordering=None).fetchone()[0]