1#!/usr/bin/env python3
  2
  3"""CHART web site system jobs page."""
  4
  5import os
  6import re
  7import html
  8import json
  9import logging
 10from io import StringIO
 11import mimetypes
 12from datetime import datetime, timedelta
 13
 14from django.shortcuts import render
 15from django.http import HttpResponse
 16from django.urls import reverse
 17import pygments
 18import pygments.lexers
 19import pygments.formatters
 20
 21from chart.project import settings
 22import chart.alg.settings
 23from chart.db.connection import db_connect
 24from chart.common.prettyprint import Table
 25from chart.common.texttime import texttime_to_datetime
 26from chart.backend.activity import Activity
 27from chart.common.prettyprint import show_timedelta
 28from chart.common.prettyprint import show_time
 29from chart.schemas.schema_tool import document_rnc_name
 30from chart.common.xml import load_xml
 31from chart.common.xml import XMLElement
 32from chart.events.event import Event
 33from chart.backend.jobs import find_jobs
 34from chart.backend.processes import find_single_process
 35from chart.backend.result import Result
 36from chart.db.func import Count
 37from chart.db.func import Sum
 38from chart.backend.activity import CallingConvention
 39from chart.project import SID
 40from chart.common.resource import Resource
 41from chart.common.xml import XMLSyntaxError
 42from chart.common.path import Path
 43from chart.backend.job import JobStatus
 44
 45ELEM_EVENT = 'event'
 46
 47logger = logging.getLogger()
 48
 49db_conn = db_connect('JOBS')
 50
 51# Text to show instead of log file analysis results if they're not available in the database
 52LFA_FAILED = 'unknown'
 53
 54# Text to show if data is not present in the database
 55MISSING_VALUE = 'n/a'
 56
 57CATEGORY = 'SCHEDULER'
 58
 59# clip Oracle queries to a maximum number of jobs to stop them taking too long to execute
 60TABLE_MAX_LENGTH = 100000
 61
 62# Prefix to base URL to display individual jobs
 63# (django.reverse isn't easy to use in a single page app)
 64URL_FRAGMENT_SINGLE_JOB = '?info={jobid}'
 65
 66# Text to display in SID column for jobs with no SID
 67NO_SID_TEXT = ''
 68
 69
 70# def nice_timedelta(obj):
 71#     """Return `obj` in format like 10d5h1m50s."""
 72#     result = ''
 73#     # if obj.microseconds != 0:
 74#         # result =
 75#     if obj.days > 0:
 76#         result += obj.days + 'd'
 77
 78#     if obj.seconds >= 3600:
 79#         result += str(obj.seconds//3600) + 'h'
 80
 81#     if (obj.seconds%3600) >= 60:
 82#         result += str(obj.seconds//60) + 'm'
 83
 84#     if (obj.seconds%60) >= 1:
 85#         result += str(obj.seconds) + 's'
 86
 87#     return result
 88
 89def summary(request):
 90    """Compute summary table showing all jobs of all scids for a whole number of days."""
 91    if len(request.GET['start']) == 0:
 92        return HttpResponse()
 93
 94    start_time = texttime_to_datetime(request.GET['start'])
 95    # it's nicer if a search for 2012-01-01 to 2012-01-02 includes
 96    # jobs that occur on the 2nd
 97    stop_time = texttime_to_datetime(request.GET['stop']) + timedelta(days=1)
 98
 99    time_type = 'sensing_start'
100    # browser value should be either "sensing" or "gen"
101    if request.GET['timetype'] == 'gen':
102        time_type = 'gen_time'
103
104    # logger.debug('from {start} to {stop}'.format(start=start_time, stop=stop_time))
105
106    # For jobs that have both start time and stop time, we filter on stop time as that is closest to
107    # execution time.
108    # For jobs with only start time, filter on that.
109    # For jobs with neither start nor stop sensing time (convention: none), filter on gen_time
110    # instead
111    # In some cases it would be useful to let the user filter on generation time instead,
112    # which all jobs have, but this can give counter-intuitive results.
113
114    data = []
115    # last_activity = None
116
117    fields = ['ACTIVITY',
118              'STATUS',
119              Count()]
120    if settings.DATABASE_JOBS_TABLE_LFA:
121        fields.extend([Sum('LOG_CRITICAL_COUNT'),
122                       Sum('LOG_ERROR_COUNT'),
123                       Sum('LOG_WARNING_COUNT'),
124                       Sum('LOG_INFO_COUNT')])
125    else:
126        fields.extend(['null', 'null', 'null', 'null'])
127
128
129    for row in find_jobs(
130            fields=fields + SID.sql_sys_select('JOBS'),
131            category=CATEGORY,
132            group_by=['ACTIVITY', 'STATUS'] + SID.sql_sys_select('JOBS'),
133            order_by=['ACTIVITY', 'STATUS'] + SID.sql_sys_select('JOBS'),
134            clauses=['{timetype}>=:start_time'.format(timetype=time_type),
135                     '{timetype}<=:stop_time'.format(timetype=time_type)],
136            bindvars={'start_time': start_time, 'stop_time': stop_time}):
137        activity = row[0]
138        status = row[1]
139        count = row[2]
140        criticals = row[3]
141        errors = row[4]
142        warnings = row[5]
143        infos = row[6]
144        sid = SID.from_sys_select('JOBS', row[7:])
145
146        # logger.debug('Got activity {act}'.format(act=activity))
147
148        if status is None:
149            status = 'PENDING'
150
151        # if status == 'FAILED':
152        status = '<span class=\'{status}\'>{status}</span>'.format(status=status)
153
154        if criticals is None and errors is None and warnings is None and infos is None:
155            # This could either mean its a PENDING row, or the data is missing from the
156            # table
157            lfa = ''
158
159        else:
160            total_errors = criticals + errors if isinstance(criticals, int) and \
161                isinstance(errors, int) else None
162            lfa = (
163                '<span class=\'{error_class}\'>{errors}</span>/'
164                '<span class=\'{warning_class}\'>{warnings}</span>/'
165                '<span class=\'{info_class}\'>{infos}</span>').format(
166                    error_class='error' if total_errors is not None and total_errors > 0 else '',
167                    errors=total_errors,
168                    warning_class='warning' if warnings is not None and warnings > 0 else '',
169                    warnings=warnings,
170                    info_class='info' if infos is not None and infos > 0 else '',
171                    infos=infos)
172
173        # make the table look nicer by suppressing the activity name if it is the same.
174        # This doesn't work when sorting is enabled though.
175        data.append((activity,
176                     str(sid) if sid is not None else NO_SID_TEXT,
177                     status,
178                     count,
179                     lfa))
180
181    return HttpResponse(json.dumps({
182        'aaData': data,
183        'bPaginate': False,  # disable pagination
184        # 'bAutoWidth': False,
185        # 'bSort': False,  # disable column sorting
186        # 'aaSorting': [[1, 'desc']],
187        'bInfo': False,  # disable misc info (?)
188        'bFilter': False,  # disable search box
189        'bDestroy': True,  # allow table to be recreated with fresh data
190        'aoColumns': [
191            {'sTitle': 'Activity'},
192            {'sTitle': 'SID'},
193            {'sTitle': 'Status'},
194            {'sTitle': 'Count', 'sClass': 'center'},
195            {'sTitle': 'Total error/warn/info'},
196        ]}),
197                        content_type='text/html')  # json?
198
199
200def html_status(status):
201    """Show a job status value in appropriate colour.
202    Replace empty status with PENDING."""
203
204    if status is None:
205        status = 'PENDING'
206
207    return '<span class=\'{status}\'>{status}</span>'.format(status=status)
208
209
210def joblist(request):
211    """Compute a jobs list table.
212
213    Format depends on the activity calling convention: filename, scid+timerange, timerange."""
214    activity = request.GET['activity']
215    if len(activity) == 0:
216        activity = None
217
218    scid = request.GET['scid']
219    if len(scid) == 0:
220        sid = None
221
222    else:
223        sid = SID(scid)
224
225    status_str = request.GET['status']
226    if len(status_str) == 0:
227        status = None
228
229    else:
230        status = JobStatus[status_str]
231
232    print('status str', status_str, 'status', status)
233
234    wildcard = request.GET['filter']
235    if request.GET['start'] == '':
236        start_time = None
237        stop_time = None
238
239    else:
240        start_time = texttime_to_datetime(request.GET['start'])
241        # it's nicer if a search for 2012-01-01 to 2012-01-02 includes
242        # jobs that occur on the 2nd
243        stop_time = texttime_to_datetime(request.GET['stop']) + timedelta(days=1)
244
245    # determine if the user wants to search on job sensing time start or job generation time
246    # (execution time not supported because that would mean a join against the processes table,
247    #  and is probably the least useful anyway)
248    if request.GET['timetype'] == 'sensing':
249        timefield = 'sensing_start'
250
251    else:
252        timefield = 'gen_time'
253
254    echo = request.GET['sEcho']
255    display_start = int(request.GET['iDisplayStart'])
256    display_length = int(request.GET['iDisplayLength'])
257
258    convention = Activity(activity).convention
259
260    if convention is CallingConvention.FILENAME:
261        data = []
262        prev_time = None
263        kwargs = {}
264        clauses = []
265        bindvars = {}
266        if start_time is not None:
267            clauses.append('{field}>=:start_time'.format(field=timefield))
268            bindvars['start_time'] = start_time
269
270        if stop_time is not None:
271            clauses.append('{field}<:stop_time'.format(field=timefield))
272            bindvars['stop_time'] = stop_time
273
274        if wildcard != '':
275            kwargs['filename_like'] = wildcard
276
277        fields = ['ID',
278                  'STATUS',
279                  'DIRNAME',
280                  'FILENAME',
281                  'SENSING_START']
282        if settings.DATABASE_JOBS_TABLE_LFA:
283            fields.extend(['LOG_CRITICAL_COUNT',
284                           'LOG_ERROR_COUNT',
285                           'LOG_WARNING_COUNT',
286                           'LOG_INFO_COUNT'])
287
288        else:
289            fields.extend(['none', 'none', 'none', 'none'])
290
291        for (jobid,
292             status,
293             dirname,
294             filename,
295             sensing_start,
296             crits,
297             errors,
298             warns,
299             infos) in find_jobs(fields=fields,
300                                 category=CATEGORY,
301                                 status=status,
302                                 activity=activity,
303                                 clauses=clauses,
304                                 bindvars=bindvars,
305                                 limit=TABLE_MAX_LENGTH,
306                                 order_by='FILENAME',
307                                 **kwargs):
308            # Compute log file analysis
309            if crits is None or errors is None or warns is None or infos is None:
310                lfa = LFA_FAILED
311
312            else:
313                lfa = ('<span class=\'{error_class}\'>{errors}</span>/'
314                       '<span class=\'{warning_class}\'>{warnings}</span>/'
315                       '<span class=\'{info_class}\'>{infos}</span>').format(
316                           error_class='error' if crits + errors > 0 else '',
317                           errors=crits+errors,
318                           warning_class='warning' if warns > 0 else '',
319                           warnings=warns,
320                           info_class='info' if infos > 0 else '',
321                           infos=infos)
322
323            display_status = html_status(status)
324            # The "missing file" entry shouldn't be computed here because it tests
325            # if the file is missing right now, not at the time of running.
326            # Better solution might be to add a "FAILURE_CODE" column to JOBS table
327            # so show the reason for failure
328            # if dirname is not None and filename is not None:
329                # if not Path(dirname).joinpath(filename).exists():
330                    # display_status += ' (missing&nbsp;file)'
331
332            data.append((jobid,
333                         display_status,
334                         filename,
335                         '' if prev_time is None else show_timedelta(sensing_start - prev_time),
336                         lfa))
337            prev_time = sensing_start
338
339        paginated_data = data[display_start:display_start + display_length]
340
341        return HttpResponse(json.dumps({
342                'iTotalRecords': len(data),
343                'iTotalDisplayRecords': len(data),
344                'sEcho': echo,
345                'aaData': paginated_data}),
346                            content_type='text/html')
347
348    elif convention is CallingConvention.SID_TIMERANGE or \
349            convention is CallingConvention.SID_TIMERANGE_TABLE:
350        kwargs = {}
351
352        if sid is not None:
353            kwargs['sid'] = sid
354
355        if start_time is not None:
356            # timefield is "sensing_start" or "gen_time"
357            kwargs['{field}_ge'.format(field=timefield)] = start_time
358
359        if stop_time is not None:
360            kwargs['{field}_lt'.format(field=timefield)] = stop_time
361
362        if wildcard != '':
363            kwargs['tablename_like'] = wildcard
364
365        data = []
366
367        fields = ['ID',
368                  'STATUS',
369                  'SENSING_START',
370                  'TABLENAME']
371        if settings.DATABASE_JOBS_TABLE_LFA:
372            fields.extend(['LOG_CRITICAL_COUNT',
373                           'LOG_ERROR_COUNT',
374                           'LOG_WARNING_COUNT',
375                           'LOG_INFO_COUNT'])
376
377        else:
378            fields.extend(['none', 'none', 'none', 'none'])
379
380        for row in find_jobs(
381                fields + SID.sql_sys_select('JOBS'),
382                category=CATEGORY,
383                status=status,
384                activity=activity,
385                limit=TABLE_MAX_LENGTH,
386                order_by=('SENSING_START'),
387                **kwargs):
388            jobid = row[0]
389            status = row[1]
390            sensing_start = row[2]
391            tablename = row[3]
392            # Compute log file analysis
393            if row[4] is None or row[5] is None or row[6] is None or row[7] is None:
394                lfa = LFA_FAILED
395
396            else:
397                lfa = '{error}/{warn}/{info}'.format(
398                    error=row[4]+row[5], warn=row[6], info=row[7])
399
400            sid = SID.from_sys_select('JOBS', row[8:])
401
402            data.append((jobid,
403                         html_status(status),
404                         str(sid),
405                         show_time(sensing_start),
406                         tablename,
407                         lfa))
408
409        paginated_data = data[display_start:display_start + display_length]
410
411        return HttpResponse(json.dumps({
412                    'iTotalRecords': len(data),
413                    'iTotalDisplayRecords': len(data),
414                    'sEcho': echo,
415                    # 'sColumns': 'a,b,c,d,e,f',
416                    'aaData': paginated_data}),
417                            content_type='text/html')
418
419    elif convention is CallingConvention.TIMERANGE:
420        kwargs = {}
421        if start_time is not None:
422            # timefield is "sensing_start" or "gen_time"
423            kwargs['{field}_ge'.format(field=timefield)] = start_time
424
425        if stop_time is not None:
426            kwargs['{field}_lt'.format(field=timefield)] = stop_time
427
428        if status is not None:
429            if status == 'PENDING':
430                kwargs['status'] = None
431
432            else:
433                kwargs['status'] = status
434
435        data = []
436        for jobid, status, sensing_start in find_jobs(
437            fields=('ID', 'STATUS', 'SENSING_START'),
438            category=CATEGORY,
439            activity=activity,
440            limit=TABLE_MAX_LENGTH,
441            order_by='SENSING_START',
442            **kwargs):
443            # The '' is the missing LFA
444            data.append((jobid,
445                         html_status(status),
446                         show_time(sensing_start),
447                         ''))
448
449        paginated_data = data[display_start:display_start + display_length]
450
451        return HttpResponse(json.dumps({
452            'iTotalRecords': len(data),
453            'iTotalDisplayRecords': len(data),
454            'sEcho': echo,
455            'aaData': paginated_data}),
456                            content_type='text/html')
457
458    elif convention is CallingConvention.NONE:
459        # (ADCS_MSG_RETRIEVE, PURGE) filter by gen_time not sensing_start
460        kwargs = {}
461        if start_time is not None:
462            kwargs['gen_time_ge'] = start_time
463
464        if stop_time is not None:
465            kwargs['gen_time_lt'] = stop_time
466
467        data = []
468        for jobid, status, sensing_start in find_jobs(
469                fields=('ID', 'STATUS', 'GEN_TIME'),
470                category=CATEGORY,
471                status=status,
472                activity=activity,
473                limit=TABLE_MAX_LENGTH,
474                order_by='GEN_TIME',
475                **kwargs):
476            # The '' is the missing LFA
477            data.append((jobid,
478                         html_status(status),
479                         show_time(sensing_start),
480                         ''))
481
482        paginated_data = data[display_start:display_start + display_length]
483
484        return HttpResponse(json.dumps({
485                'iTotalRecords': len(data),
486                'iTotalDisplayRecords': len(data),
487                'sEcho': echo,
488                'aaData': paginated_data}),
489                            content_type='text/html')
490
491    else:
492        return HttpResponse('Unknown activity convention: {c}'.format(c=convention))
493
494
495def index(request):  # (unused argument) pylint: disable=W0613
496    """Show a summary of jobs attempted by the CHART backend
497    over the last 24 hours."""
498    activities = [{'name': a.name,
499                   'convention': a.convention.value} for a in Activity.all()]
500    sids = ['SYS'] + [str(s) for s in SID.all()]
501
502    return render(request,
503                  'jobviewer/jobs.html',
504                  {'activities': activities,
505                   'scids': sids})
506
507
508def link_to_job(job_id):
509    """Compute a direct link to a job in the job viewer individual job tab."""
510    return '<a href=\'{url}{suffix}\'>{job_id}</a>'.format(
511        url=reverse('jobviewer:index'),
512        suffix=URL_FRAGMENT_SINGLE_JOB.format(jobid=job_id),
513        job_id=job_id)
514
515
516def link_to_activity(activity: str) -> str:
517    """Make a nice HTML fragment linking to an Activity info page."""
518    # We don't validate if it's a valid activity name
519    return '<a href=\'{url}\' target=\'_blank\'>{name}</a>'.format(
520        url=reverse('backend:activities/single',
521                    kwargs=dict(activityname=activity)),
522        name=activity)
523
524
525def job(request):
526    """Display info on a single job."""
527    job_id = int(request.GET['job_id'])
528
529    result = StringIO()
530
531    # job summary section
532    t = Table(headings=('Property', 'Value'),
533              cssclass='table-striped table-bordered')
534
535    # Try to retrieve job details including log file analysis
536    # PROCESS_FIELD = 6
537    # DIRNAME_FIELD = 7
538    job_fields = ['ID',
539                  'CATEGORY',
540                  'STATUS',
541                  'ACTIVITY',
542                  'SENSING_START',
543                  'SENSING_STOP',
544                  'PROCESS_ID',
545                  'DIRNAME',
546                  'FILENAME']
547    if settings.DATABASE_JOBS_TABLE_LFA:
548        job_fields.extend(['LOG_CRITICAL_COUNT',
549                           'LOG_ERROR_COUNT',
550                           'LOG_WARNING_COUNT',
551                           'LOG_INFO_COUNT'])
552    else:
553        job_fields.extend(['\'' + LFA_FAILED + '\''] * 4)
554
555    if settings.DATABASE_JOBS_TABLE_PARENT:
556        job_fields.append('PARENT')
557
558    else:
559        job_fields.append(LFA_FAILED)
560
561    row = find_jobs(fields=job_fields,
562                    job_id=job_id,
563                    limit=1).fetchone()
564
565    if row is None:
566        return HttpResponse('Job not found', content_type='text/html')
567
568    activity = None
569    for field, value in zip(job_fields, row):
570        if field == 'ID':
571            label = 'Job ID'
572
573        elif field == 'CATEGORY':
574            label = 'Category'
575
576        elif field == 'STATUS':
577            label = 'Status'
578            status = value
579            value = html_status(value)
580
581        elif field == 'ACTIVITY':
582            activity = Activity(value)
583            value = link_to_activity(value)
584
585        elif field == 'SENSING_START':
586            sensing_start = value
587            label = 'Start'
588            if value is None:
589                value = MISSING_VALUE
590
591        elif field == 'SENSING_STOP':
592            label = 'Stop'
593            if value is None:
594                value = MISSING_VALUE
595
596            else:
597                value = '{stop} ({duration})'.format(
598                    stop=value, duration=value-sensing_start)
599
600        elif field == 'PROCESS_ID':
601            label = 'Process ID'
602            process_id = value
603
604        elif field == 'DIRNAME':
605            dirname = value
606            continue
607
608        elif field == 'FILENAME':
609            label = 'Filename'
610            if value is None:
611                value = MISSING_VALUE
612
613            else:
614                fullname = Path(dirname).joinpath(value)
615                value = str(fullname)
616                if fullname.exists():
617                    value += '<span class=\'COMPLETED\'> (exists)</span>'.format(status=status)
618
619                else:
620                    value += '<span class=\'FAILED\'> (missing)</span>'.format(status=status)
621
622        elif field.startswith('LOG_'):
623            label = 'Log ({level})'.format(level=field[4:].replace('_', ' ').lower())
624
625        elif field == 'PARENT':
626            label = 'Parent job'
627            if value is None:
628                value = MISSING_VALUE
629
630            else:
631                value = link_to_job(value)
632
633        # italics for the key names, normal text for values
634        t.append(({'em': True, 'text': label}, value))
635
636    if settings.DATABASE_JOBS_TABLE_PARENT:
637        derived = find_jobs(fields=('ID', 'ACTIVITY'),
638                            category=CATEGORY,
639                            parent=job_id,
640                            order_by='ID')
641        t.append(({'em': True, 'text': 'Derived jobs'},
642                  ' '.join('{job} ({activity})'.format(
643                      job=link_to_job(j), activity=a) for j, a in derived)))
644
645    result.write('<h2>Job summary</h2>')
646    t.write_html(result)
647
648    # process summary section
649    if status is None:
650        # if the job is PENDING there is nothing else to report on
651        return HttpResponse(result.getvalue(), content_type='text/html')
652
653    # proc_cursor = db.query("SELECT worker, pid, execute_start, execute_stop, status, working_dir "
654                           # "FROM processes "
655                           # "WHERE id=:proc_id",
656                           # proc_id=v)
657    proc_fields = ('WORKER', 'PID', 'EXECUTE_START', 'EXECUTE_STOP', 'STATUS', 'WORKING_DIR')
658    proc_row = find_single_process(fields=proc_fields,
659                                   process_id=process_id)
660    if proc_row is None:
661        result.write('No process entry found')
662        return HttpResponse(result.getvalue(), content_type='text/html')
663
664    result.write('<h2>Process summary</h2>')
665    t = Table(headings=('Property', 'Value'),
666              cssclass='table-striped table-bordered')
667    for field, value in zip(proc_fields, proc_row):
668        if field == 'WORKER':
669            label = 'Worker'
670
671        elif field == 'PID':
672            label = 'Process ID'
673
674        elif field == 'EXECUTE_START':
675            label = 'Execution start'
676            execute_start = value
677
678        elif field == 'EXECUTE_STOP':
679            label = 'Execution stop'
680            if value is not None:
681                value = '{stop} ({duration})'.format(
682                    stop=value, duration=value-execute_start)
683
684            else:
685                # in progress
686                value = 'running'
687
688        elif field == 'STATUS':
689            label = 'Status'
690            value = html_status(value)
691
692        elif field == 'WORKING_DIR':
693            label = 'Working dir'
694            work_dir = Resource(value)
695            # allow reading of working dir over ssh
696            if settings.WORK_DIR_SERVER is not None:
697                work_dir.relocate(settings.WORK_DIR_SERVER)
698
699        t.append(({'em': True, 'text': label}, value))
700
701    t.write_html(result)
702
703    # Events listing
704    events_filename = work_dir.joinpath(chart.alg.settings.EVENTS_FILENAME)
705    if events_filename.exists():  # (no is_file member) pylint: disable=E1101
706        result.write('<h2>Events raised</h2>')
707        t = Table(headings=('Event class', 'Start time', 'Duration', 'Properties'),
708                  cssclass='table-striped table-bordered')
709        for event_elem in XMLElement(filename=events_filename).findall(ELEM_EVENT):
710            event = Event.build_from_xml(event_elem)
711            url = reverse('events:index') + '#' + event.event_classname
712            t.append(('<a href=\'{url}\' target=\'_blank\'>{name}</a>'.format(
713                url=url,
714                name=event.event_classname),
715                      event.start_time,
716                      event.duration(),
717                      ', '.join('{k}={v}'.format(k=k, v=v)
718                                for k, v in event.instance_properties.items())))
719
720        t.write_html(result)
721        result.write('<p>This list may include events from other jobs that were handled by the '
722                     'same process.</p>')
723
724    # Tables written
725    result_filename = work_dir.joinpath(chart.alg.settings.RESULT_FILENAME)
726    if result_filename.exists():  # (no is_file member) pylint: disable=E1101
727        result_file = Result(result_filename, 'r', activity=activity)
728        # the only type of single results file left is for reports, which
729        # don't write to output tables
730        tables_table = None
731        for result_job in result_file.read_jobs():
732            if result_job['id'] == job_id:
733                if tables_table is None:
734                    tables_table = Table(headings=('Table', 'Start', 'Stop'),
735                                         cssclass='table-striped table-bordered')
736
737                for t in result_job.tables:
738                    tables_table.append(('<a href=\'{url}\' target=\'_blank\'>{name}</a>'.format(
739                        url=t['table'].browse_url,
740                        name=t['table'].name),
741                                         t['sensing_start'],
742                                         t['sensing_stop']))
743
744        if tables_table is not None:
745            result.write('<h2>Tables written</h2>')
746            tables_table.write_html(result)
747
748    else:
749        result.write('<br><p>Warning: No result file found.</p>')
750
751    # Report generated
752    manifest_filename = work_dir.joinpath(chart.alg.settings.MANIFEST_FILENAME)
753    if manifest_filename.exists():
754        manifest_elem = load_xml(manifest_filename)
755        del manifest_elem  # just checked if it initialised
756        result.write('<h2>Report generated</h2>')
757        result.write('Details not shown here')
758
759    # working directory contents
760    result.write('<h2>Working directory</h2>')
761
762    files = Table(headings=('Filename', 'Type', 'Size (bytes)', 'Timestamp'),
763                  cssclass='table-striped table-bordered')
764    if work_dir.exists():
765        for filename in sorted(work_dir.iterdir()):
766            stat = filename.stat()
767            ext = filename.suffix
768            if ext == '.xml':
769                try:
770                    rnc_name = document_rnc_name(load_xml(filename))
771
772                except XMLSyntaxError:
773                    files.append((
774                        filename.path.name,
775                        ext,
776                        stat.st_size,
777                        show_time(datetime.utcfromtimestamp(stat.st_mtime))))
778                    continue
779
780                if rnc_name is not None:
781                    schema = os.path.splitext(rnc_name)[0]
782                    filetype = 'XML (<a href=\'{url}\' target=\'_blank\'>{schema}</a>)'.format(
783                        url=reverse('schemas:single',
784                                    kwargs=dict(filename=schema + '.html')),
785                        schema=schema)
786
787                else:
788                    filetype = 'XML'
789
790            elif ext == '.log':
791                filetype = 'logfile'
792
793            else:
794                filetype = ext[1:]
795
796            filename_url = reverse('jobviewer:fileviewer', kwargs=dict(
797                filename=str(filename.path)))
798            files.append(('<a href=\'{url}\' target=\'_blank\'>{filename}</a>'.format(
799                url=filename_url,
800                filename=filename.path.name),
801                          filetype,
802                          stat.st_size,
803                          show_time(datetime.utcfromtimestamp(stat.st_mtime))))
804
805        files.write_html(result)
806
807    else:
808        result.write('<p>Work directory not found</p>')
809
810    # worker process log
811    result.write('<h2>Worker process</h2>')
812
813    if sensing_start is None:
814        result.write('<p>Disabled for jobs with no sensing start</p>')
815
816    elif settings.ROTATING_LOG_FILE:
817        t = Table(headings=('Worker log file',),
818                  cssclass='table-striped table-bordered')
819
820        # try and guess what the worker log file would be based on our log file.
821        # Only really works with rotating log files
822        basedir = settings.ROTATING_LOG_FILE
823
824        filename = basedir.child('worker.log.{t}'.format(t=sensing_start.strftime('%Y-%m-%d')))
825        # t.append((filename,))
826
827        # result.write('<a href=\'{url}\' target=\'_blank\'>{filename}</a>'.format(
828        t.append(('<a href=\'{url}\' target=\'_blank\'>{filename}</a>'.format(
829                url=reverse('jobviewer:fileviewer',
830                            kwargs=dict(filename=filename)),
831                filename=os.path.basename(filename)),))
832        t.write_html(result)
833
834        # result.write('<h2>Log</h2>')
835        # log_filename = os.path.join(row[5], settings.LOG_FILENAME)
836        # if os.path.exists(log_filename):
837            # print open(log_filename, 'r').read()
838
839    return HttpResponse(result.getvalue(), content_type='text/html')
840
841
842def fileviewer(request, filename):  # (unused argument) pylint: disable=W0613
843    """Colour code a log file for display.
844    For any other file types, pass them through pygments for display.
845    Note, a neater way might be to modify pygments to handle our log files.
846    """
847    spl = re.compile('(.{19}) ([^ ]+) (.*)')
848
849    resource = Resource(filename)
850    # allow reading of content
851    if settings.WORK_DIR_SERVER is not None:
852        resource.relocate(settings.WORK_DIR_SERVER)
853
854    # This commonly returns (None, None) for trace.dump and ('text/plain', None) for log.txt
855    mime = mimetypes.guess_type(filename)
856    if mime[0] is not None and 'image' in mime[0]:
857        content = resource.open('rb').read()
858
859    else:
860        content = resource.open().read()
861
862    if filename.endswith('log.txt') or os.path.basename(filename).startswith('worker.log'):
863        res = StringIO()
864        res.write("""<html>
865  <head>
866    <style>
867      body {
868        font-family: monospace;
869      }
870
871      span.date {
872        color: #9932cc;
873      }
874
875      span.DEBUG {
876        color: #b8860b;
877      }
878
879      span.INFO {
880        color: #0000ff;
881      }
882
883      span.WARNING {
884        color: #ff4500;
885      }
886
887      span.ERROR {
888        color: #ff0000;
889      }
890
891      span.CRITICAL {
892        color: #ff0000;
893      }
894  </style>
895  </head>
896  <body>
897""")
898
899        for line in content.split('\n'):  # readlines():
900            line = html.escape(line)
901            groups = spl.match(line)
902            if groups is not None:
903                res.write('<span class="date">{date}</span> '
904                          '<span class="{severity}">{severity}</span> {message}<br>\n'.format(
905                        date=groups.group(1),
906                        severity=groups.group(2),
907                        message=groups.group(3)))
908            else:
909                res.write(line + '<br>')
910
911        return HttpResponse(res.getvalue(), content_type='text/html')
912
913    # Handle image types
914    if mime[0] is not None and 'image' in mime[0]:
915        return HttpResponse(content, content_type=mime)
916
917    try:
918        lexer = pygments.lexers.get_lexer_for_filename(filename)
919    except pygments.util.ClassNotFound:
920        # we cannot colourise this file so just send it plain
921        return HttpResponse(content, content_type='text/plain')
922
923    return HttpResponse(
924        pygments.highlight(
925            content,
926            lexer,
927            pygments.formatters.HtmlFormatter(full=True)),
928        content_type='text/html')