-
Notifications
You must be signed in to change notification settings - Fork 0
/
collect.py
executable file
·145 lines (122 loc) · 5.04 KB
/
collect.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
#!/usr/bin/env python3
import os
import sys
import time
import logging
import argparse
from pathlib import Path
from datetime import datetime
from logging import handlers
from configparser import ConfigParser
from lib import *
log = logging.getLogger("harness")
JOB_TYPE_MAP = {
'gridftp': GridFTP,
'perfsonar': perfSONAR,
'iperf3': iperf3,
'default': Job
}
class TestingDaemon:
def __init__(self, conf):
self.conf = conf
self.log_file = conf['log_file']
self.hostlist = conf['hostlist']
self.outdir = conf['outdir']
self.verbose = conf['verbose']
self.quiet = conf['quiet']
self.nic = conf['nic']
self.archive = conf['archive_url']
self.jobs = list()
# Setup logging
form = '[%(asctime)s] [%(threadName)s] %(levelname)s: %(msg)s'
level = logging.DEBUG if self.verbose else logging.INFO
level = logging.CRITICAL if self.quiet else level
if self.log_file == "stdout":
fh = logging.StreamHandler(sys.stdout)
else:
# overwrite any existing file
fh = logging.FileHandler(self.log_file, mode='w')
# or if prefer to save all logs
#fh = logging.handlers.RotatingFileHandler(
# self.log_file, maxBytes=(1024*1024*8), backupCount=7)
fh.setFormatter(logging.Formatter(form))
log.addHandler(fh)
log.setLevel(level)
# Setup output directory
try:
Path(self.outdir).mkdir(exist_ok=True)
except Exception as e:
log.error(f"Could not create result output directory \"{self.outdir}\": {e}")
exit(1)
def _setup(self):
jfile = self.conf['job_file']
parser = ConfigParser(allow_no_value=True)
try:
jobs = parser.read(jfile)
sections = parser.sections()
except Exception as e:
log.error(f"Could not get job definition config file: {jfile}: {e}")
return
log.info(f"Found {len(sections)} job definitions")
log.debug(f"Job list {sections}")
for s in sections:
try:
typ = parser[s]['type'].lower()
jclass = JOB_TYPE_MAP.get(typ, Job)
job = jclass(s, parser[s], self.outdir,
self.hostlist, self.nic,
self.archive)
self.jobs.append(job)
except Exception as e:
log.error(f"Could not create job from config \"{s}\": {e}")
continue
def start(self):
self._setup()
if not self.jobs:
return
log.info("Starting jobs [{}]".format(len(self.jobs)))
for job in self.jobs:
if job.enabled:
job.run()
else:
log.info(f"Skipping disabled job \"{job.name}\"")
def _read_config(fpath):
if not fpath:
return {}
parser = ConfigParser(allow_no_value=True)
try:
parser.read(fpath)
except Exception as e:
raise AttributeErorr(f"Could not read harness config file: {fpath}: {e}")
return {}
def main():
parser = argparse.ArgumentParser(description='Network performance testing harness')
parser.add_argument('-a', '--archive', default=None, type=str,
help='The complete URL of an RabbitMQ host (for ELK collection)')
parser.add_argument('-m', '--mesh', default=None, type=str, help="URL of a pS MeshConfig (instead of MA URL)")
parser.add_argument('-p', '--prometheus', action='store_true', help='Enable Prometheus collector')
parser.add_argument('-l', '--log', default="stdout", help="Path to log file")
parser.add_argument('-c', '--config', default=None, type=str, help="Path to harness configuration file")
parser.add_argument('-i', '--interface', default=None, type=str, help="collect info on this NIC to store in jobmeta file")
parser.add_argument('-j', '--jobs', default="jobs.ini", type=str, help="Path to job configuration file")
parser.add_argument('-H', '--hostlist', default=None, type=str,
help="Path to a file containing a list of hosts to test against")
parser.add_argument('-o', '--outdir', default=datetime.now().isoformat(),
type=str, help="Output directory for writing results")
parser.add_argument('-v', '--verbose', action='store_true', help='Produce verbose output from the app')
parser.add_argument('-q', '--quiet', action='store_true', help='Quiet mode, no logging output')
args = parser.parse_args()
conf = {'job_file': args.jobs,
'archive_url': args.archive,
'mesh_url': args.mesh,
'log_file': args.log,
'outdir': args.outdir,
'nic': args.interface,
'hostlist': args.hostlist}
conf.update(**_read_config(args.config))
conf.update(**{k: v for k, v in args.__dict__.items() if v is not None})
app = TestingDaemon(conf)
app.start()
log.info("All Tests Complete. Exiting.")
if __name__ == "__main__":
main()