1#!/usr/bin/env python3
2
3"""Administrative functions for CHART-core for use with the invoke tool.
4Make sure ssh aliases are configured to sue the deployment functions."""
5
6import time
7import stat
8import io
9from pathlib import Path
10import re
11
12from invoke import task
13
14# postgres high level control program
15DEFAULT_PG_CTL = 'pg_ctl'
16
17# postgresl command line client tool
18DEFAULT_PSQL = 'psql'
19
20# postgres low level control program
21DEFAULT_POSTGRES = 'postgres'
22
23# Normal port to operate postgres servers on
24DEFAULT_POSTGRESQL_PORT = 5432
25
26# Default size of postgres shared memory cache
27DEFAULT_PG_CACHE = '1G'
28
29# NEWLINE = '\n'
30# def log(*values):
31 # """Write a status message."""
32 # sys.stdout.write(' '.join(values) + NEWLINE)
33
34
35@task
36def activate(_):
37 """Make an `activate` script for the framework."""
38 here = Path(__file__).parent
39 activate_file = Path('activate')
40 with activate_file.open('w') as h:
41 h.write("""#!/bin/bash
42# echo "Activating {here}"
43export PYTHONPATH={here}:${{PYTHONPATH}}
44""".format(here=here))
45 activate_file.chmod(activate_file.stat().st_mode | stat.S_IEXEC)
46 print('Wrote {activate}'.format(activate=activate_file))
47
48
49def shell(cmd):
50 """Run cmd (a string or list of strings) and return stdout as a string.
51
52 Don't use this in normal CHART code see chart/common/shell.py for better wrappers."""
53 import subprocess
54 if isinstance(cmd, str):
55 cmd = cmd.split()
56
57 return subprocess.run(cmd, capture_output=True).stdout.decode('latin1').rstrip()
58
59
60@task
61def versions(c):
62 """Create a VERSIONS file with framework versions information.
63
64 This is used for tagging docker images and read by parts of the web site and info tool.
65 Sets attributes:
66 BRANCH: last commited git branch
67 TAG: current tag only if we are on a tag - deliberately does not search back to previous tags
68 LAST_COMMIT_DATE: timestamp of last commit
69 LAST_COMMIT_HASH: hash of last commit
70 CLEAN: "Yes" if there are no uncommited changes (untracked files are ok) otherwise "No"
71 VERSION: Set to version number i.e. "v2.4" only if one can be detected if the branch name is "release/[version]"
72 """
73 here = Path(__file__).parent
74 versions_filename = Path('VERSIONS')
75 branch = shell('git rev-parse --abbrev-ref HEAD')
76 tag = shell('git describe --tags --abbrev=0')
77 last_commit_hash = shell('git rev-parse HEAD')
78 last_commit_date = shell('git --no-pager log -1 --format="%ai"')
79 clean = shell('git diff --stat') == ''
80 if branch.startswith('release/'):
81 version = branch[len('release/')]
82
83 else:
84 version = ''
85
86 with versions_filename.open('w') as h:
87 h.write("""BRANCH={branch}
88TAG={tag}
89LAST_COMMIT_DATE={last_commit_date}
90LAST_COMMIT_HASH={last_commit_hash}
91CLEAN={clean}
92VERSION={version}
93""".format(branch=branch,
94 tag=tag,
95 last_commit_date=last_commit_date,
96 last_commit_hash=last_commit_hash,
97 clean=clean,
98 version=version))
99 print('Wrote {version}'.format(version=versions_filename))
100
101
102@task
103def build(c, force_all=False, force_thirdparty=False, force_schemas=False):
104 """Build components needed for website but not in vc."""
105 # Unpack third party web code if not present or if --force-thirdparty passed
106 build_thirdparty = force_thirdparty
107 third_party_dir = Path('chart', 'web', 'static', '3rdparty')
108 if not third_party_dir.exists():
109 build_thirdparty = True
110
111 if force_all:
112 force_thirdparty = True
113 force_schemas = True
114
115 if build_thirdparty or force_thirdparty:
116 c.run('chart/web/3rdparty.packages/unpack.sh')
117
118 if force_schemas:
119 with c.cd('chart/schemas'):
120 c.run('./build.sh') # replace with trang or whatever
121 # local('cp *.xsd *.rnc *.rng /var/www/html/chart/schemas')
122
123 else:
124 from chart.schemas.schema_tool import convert_all_schemas
125 convert_all_schemas()
126
127
128@task
129def lint(c,
130 python=True,
131 python_pylint=True,
132 python_pycodestyle=True,
133 python_pydocstyle=True,
134 python_mypy=True,
135 css=True,
136 css_csslint=True,
137 js=True,
138 js_jslint=True,
139 # html htmllint
140 # prose vale proselint
141 # shell bashline
142 # db postgreslint
143 # django djangolint
144 # sql?
145 # rpm rpmlint
146 # rst restructuredtextlint
147 # xml validity schema compliance check rnc
148):
149 """Static analysis of code for correctness and style."""
150 if python:
151 if python_pylint:
152 c.run('pylint chart')
153
154 if python_pycodestyle:
155 c.run('pycodestyle --repeat chart | grep -v chart/web/static')
156
157 if python_pydocstyle:
158 c.run('pycodestyle --repeat chart | grep -v chart/web/static')
159
160 if python_mypy:
161 c.run('mypy --ignore-missing-imports chart')
162
163 if css:
164 if css_csslint:
165 c.run('find chart -name \'*.css\' -type f | grep -v 3rdparty | grep -v includes | '
166 'PYTHONPATH=$PWD CHART_SETTINGS_MODULE=chart.tests.settings xargs chart/tools/check.py')
167
168 if js:
169 if js_jslint:
170 c.run('find chart -name \'*.js\' -type f | grep -v 3rdparty | grep -v includes | xargs jslint '
171 '--report xml')
172
173
174@task(help={'sloc': 'Run sloccount over all code writting the results to sloccount.sc'})
175def doc(c, sloc=True):
176 """Auto generated documentation files."""
177 # This can probably be removed as, if we ever use auto generated docs, it'll be the
178 # projects that generate them.
179 # There is some scope for autodocumenting the core though especially just code documentation
180 # using sphinx or similar, and reports from sloccount and lint, and system tables
181 # and core widgets and algortihms
182 if sloc:
183 c.run('PYTHONPATH=$PWD:$PWD/chart CHART_SETTINGS_MODULE=tests.settings dist/cloc/cloc.py '
184 'dist chart')
185
186
187def update_conf(path, settings):
188 """Modify an .ini style configuration file inserting values from settings.
189
190 Values are only inserted if an existing commented or non-commented version is found."""
191 print('Modifying {p}'.format(p=path))
192 bak = Path(str(path) + '.bak')
193 path.rename(bak)
194 # whitespace and comments symbols then key = value then whitespace, comment
195 matcher = re.compile('^[ #\t]*(?P<key>[a-z_]+)\s*=\s*(?P<value>\S+)\s*(#(?P<comment>.*))?$')
196 handle = path.open('w')
197 for line in bak.open():
198 # print('LINE ', line)
199 match = matcher.match(line)
200 if match and match.group('key') in settings:
201 # print('match', 'key', match.group('key'), 'value', match.group('value'),
202 # 'comment', match.group('comment'))
203 if match.group('comment'):
204 newline = '{key} = {value} # {comment}\n'.format(
205 key=match.group('key'),
206 value=settings[match.group('key')],
207 comment=match.group('comment'))
208 else:
209 newline = '{key} = {value}\n'.format(
210 key=match.group('key'),
211 value=settings[match.group('key')])
212
213 print('changing', line.rstrip())
214 print('to', newline.rstrip())
215 handle.write(newline)
216
217 else:
218 handle.write(line)
219
220
221def print_bytes(b):
222 print('input',b,'type',type(b))
223 G = 1024 * 1024 * 1024
224 # print('int',int(b),'mod',int(b)%G,'div',b//G,'type',type(b//G))
225 if int(b) % G == 0:
226 return '{b}G'.format(b=int(b)//G)
227 # return '{b}G'.format(b=b//G)
228
229 else:
230 return str(b)
231
232
233@task(help={'make_cluster': 'Location for cluster',
234 'pg_ctl': 'Location of pg_ctl tool',
235 'psql': 'Location of psql tool',
236 'postgres': 'Location of postgres tool',
237 'log': 'Location of log directory (default to log/ inside cluster directory)',
238 'cache-size': 'Amount of memory to assign in bytes to main cache i.e. "10G" (default 1G)',
239 'shared-buffers': 'Amount of memory to assign to shared buffers (default = 1/4 of cache size)',
240 })
241def db(c,
242 make_cluster=None,
243 postgres=DEFAULT_POSTGRES,
244 pg_ctl=DEFAULT_PG_CTL,
245 psql=DEFAULT_PSQL,
246 port=DEFAULT_POSTGRESQL_PORT,
247 log=None,
248 cache_size=DEFAULT_PG_CACHE,
249 shared_buffers=None):
250 """Create a postgres cluster."""
251 def run_psql(database, command):
252 """Helper to call psql."""
253 print('Executing', command)
254 c.run('{psql} -h localhost -p {port} {database} -c "{command}"'.format(
255 psql=psql,
256 port=port,
257 database=database,
258 command=command))
259
260 if make_cluster:
261 cluster_dir = Path(make_cluster)
262 log_dir = cluster_dir.joinpath('log') if log is None else Path(log)
263 if cache_size.endswith('G'):
264 cache_bytes = int(cache_size[:-1])*1024*1024*1024
265
266 else:
267 raise ValueError('Could not decode cache size {c}'.format(c=cache_size))
268
269 if shared_buffers is None:
270 shared_bytes = cache_bytes / 4
271
272 else:
273 if shared_buffers.endswith('G'):
274 shared_bytes = int(shared_buffers)[:-1]*1024*1024*1024
275
276 else:
277 raise ValueError('Could not decode shared buffers size {c}'.format(c=shared_buffers))
278
279 print("""Creating cluster {cluster}
280Log {log_dir}
281Cache size {cache}
282Shared buffers size {shared}""".format(
283 cluster=cluster_dir,
284 log_dir=log_dir,
285 cache=print_bytes(cache_bytes),
286 shared=print_bytes(shared_bytes)))
287
288 # Build the cluster
289 c.run('{pg_ctl} initdb -D {cluster}'.format(pg_ctl=pg_ctl, cluster=cluster_dir))
290
291 # Modify main database configuration file
292 piddir = cluster_dir.joinpath('pid')
293 piddir.mkdir()
294 update_conf(cluster_dir.joinpath('postgresql.conf'), {
295 'log_directory': '\'{log}\''.format(log=log_dir),
296 'log_rotation_age': '1d',
297 'unix_socket_directories': '\'{d}\''.format(d=piddir),
298 'shared_buffers': '{s}B'.format(s=print_bytes(cache_bytes)), # 1/4 physical ram available to database
299 'work_mem': '32MB', # recommended 64-128MB for small number of complex queries, 1-8M for many simple queries
300 'effective_cache_size': '{s}B'.format(s=print_bytes(shared_bytes)), # 1/2 physical ram available to database
301 'wal_level': 'minimal', # this may boost performance. Not sure what the actual downside is
302 'max_wal_senders': 0, # needed for wal_level=minimal to work
303 # 'max_parallel_workers'
304 'fsync': 'off', # switch off for a risky performance boost
305 })
306
307 # Start the daemon in the background
308 stdout = io.StringIO()
309 # use postgres not pg_ctl here because the stdout capture didn't work with pg_ctl
310 c.run('postgres -D {cluster}'.format(pg_ctl=pg_ctl, cluster=cluster_dir),
311 asynchronous=True, pty=True, out_stream=stdout)
312 print('Starting server')
313 # Wait for the subprocess to fully initialise
314 while True:
315 time.sleep(1)
316 value = stdout.getvalue()
317 # print(value)
318 if 'database system is ready to accept connections' in value:
319 break
320
321 # If the script halts at this point, terminate and run
322 # postgres -D <cluster>
323 # to see proper error message
324 print('Backgrounded server started')
325
326 # Create a dba user
327<<<hidden due to potential security issue>>>
328
329 # Lock down the cluster
330 run_psql('postgres', 'REVOKE ALL PRIVILEGES ON DATABASE postgres FROM public')
331 run_psql('postgres', 'REVOKE ALL PRIVILEGES ON DATABASE template1 FROM public')
332 run_psql('postgres', 'REVOKE ALL PRIVILEGES ON DATABASE template0 FROM public')
333 run_psql('postgres', 'DROP SCHEMA public')
334 run_psql('template1', 'DROP SCHEMA public')
335
336 # Make a semi-privileged user who can make new users and databases, but has no rights
337 # to interfere with other users and databases
338<<<hidden due to potential security issue>>>
339 run_psql('postgres', 'GRANT CONNECT ON DATABASE postgres TO creator')
340
341 # Halt the cluster now so the user has to manually start it
342 c.run('{pg_ctl} stop -D {cluster}'.format(pg_ctl=pg_ctl, cluster=cluster_dir))
343
344 print("""Database cluster created in {dir}.
345<<<hidden due to potential security issue>>>
346Run "{pg_ctl} -D {dir} start/stop/status" to use the new cluster.""".format(
347 dir=cluster_dir, pg_ctl=pg_ctl))
348
349
350@task(help={'environment': 'Run Python environment tests',
351 'core': 'Run core application tests',
352 'doctest': 'Include doctests',
353 'project': 'Run the project-requiring tests too',
354 'xunit': 'Output xunit/junit2 style XML result file',
355 'html': 'Generate HTML report',
356 'verbose': 'Name each individual test'})
357def test(c,
358 environment=True,
359 core=True,
360 doctest=False,
361 project=None,
362 xunit=None,
363 html=None,
364 verbose=False):
365 """Run automated tests. """
366 params = []
367 if xunit is not None:
368 params.append('--junit-xml={f}'.format(f=xunit))
369
370 if html is not None:
371 params.append('--html={f} --self-contained-html'.format(f=html))
372
373 if verbose:
374 params.append('-v')
375
376 if environment:
377 params.append('tests/environment')
378
379 if core:
380 params.append('tests/application')
381
382 if project:
383 params.append('--project={project}'.format(project=project))
384
385 if doctest and core:
386 params.append(('--doctest-modules chart --ignore-glob=chart/web '
387 '--ignore=chart/settings.py'))
388
389 c.run('pytest ' + ' '.join(params), pty=True)