dockerisiert
parent
cc3a50efb0
commit
08bd45b7af
|
|
@ -0,0 +1,6 @@
|
||||||
|
FROM python:3.6-alpine3.7
|
||||||
|
|
||||||
|
ADD ["src", "requirements.txt", "/app/"]
|
||||||
|
WORKDIR /app
|
||||||
|
RUN pip install -r requirements.txt --no-cache-dir
|
||||||
|
|
||||||
|
|
@ -0,0 +1,26 @@
|
||||||
|
version: "2"
|
||||||
|
services:
|
||||||
|
influxdb:
|
||||||
|
image: influxdb:1.5-alpine
|
||||||
|
# command: influxd -config /etc/influxdb/influxdb.conf
|
||||||
|
# ports:
|
||||||
|
# - "8086:8086"
|
||||||
|
volumes:
|
||||||
|
- ./data/influx:/var/lib/influxdb/
|
||||||
|
# - "./influxdb.conf:/etc/influxdb/influxdb.con:ro"
|
||||||
|
grafana:
|
||||||
|
image: grafana/grafana:5.0.4
|
||||||
|
ports:
|
||||||
|
- "3000:3000"
|
||||||
|
volumes:
|
||||||
|
# - "./grafana_etc/:/etc/grafana/"
|
||||||
|
- "./data/grafana/:/var/lib/grafana/"
|
||||||
|
environment:
|
||||||
|
- "GF_SECURITY_ADMIN_USER=root"
|
||||||
|
- "GF_SECURITY_ADMIN_PASSWORD=clkl"
|
||||||
|
measurement:
|
||||||
|
image: docker.clkl.de/uni/wiai/rz/measurement:0.1
|
||||||
|
build: .
|
||||||
|
command: python measure.py -o /app/log/results.csv -e /app/log/exceptions.log "http://vc.uni-bamberg.de" "https://vc.uni-bamberg.de" "https://fn2stud.zuv.uni-bamberg.de/FN2AUTH/FN2AuthServlet?op=Login" "https://vc.uni-bamberg.de/moodle/"
|
||||||
|
volumes:
|
||||||
|
- ./log:/app/log
|
||||||
|
|
@ -8,9 +8,27 @@ from datetime import datetime
|
||||||
import requests
|
import requests
|
||||||
import schedule
|
import schedule
|
||||||
|
|
||||||
|
INFLUX_URL = "http://influxdb:8086/write?db=mydb"
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
INFLUX_CHARS = (",", "=", " ")
|
||||||
|
def influx_escape(string):
|
||||||
|
for c in INFLUX_CHARS:
|
||||||
|
string = string.replace(c, "\\"+c)
|
||||||
|
return string
|
||||||
|
|
||||||
|
def send_data(date, status, time, url):
|
||||||
|
data = "response_time,url={url},status={status} value={time} {timestamp}".format(
|
||||||
|
url=influx_escape(url),
|
||||||
|
status=status,
|
||||||
|
time=time,
|
||||||
|
timestamp=int(date.timestamp()*1e9)
|
||||||
|
)
|
||||||
|
r=requests.post(INFLUX_URL, data=data)
|
||||||
|
print(r)
|
||||||
|
log.warn(str(r) + " " + str(r.text))
|
||||||
|
|
||||||
class Measurement(threading.Thread):
|
class Measurement(threading.Thread):
|
||||||
def __init__(self, url, log, start_date):
|
def __init__(self, url, log, start_date):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
|
|
@ -23,24 +41,39 @@ class Measurement(threading.Thread):
|
||||||
log = self.log
|
log = self.log
|
||||||
try:
|
try:
|
||||||
response = requests.get(url, allow_redirects=False, timeout=256)
|
response = requests.get(url, allow_redirects=False, timeout=256)
|
||||||
log.info("{date},{status},{time},{url}".format(
|
csv = "{date},{status},{time},{url}".format(
|
||||||
date=self.start_date,
|
date=self.start_date,
|
||||||
status=response.status_code,
|
status=response.status_code,
|
||||||
time=response.elapsed.total_seconds(),
|
time=response.elapsed.total_seconds(),
|
||||||
url=url
|
url=url
|
||||||
))
|
)
|
||||||
|
datalog.info(csv)
|
||||||
|
print("send data")
|
||||||
|
send_data(
|
||||||
|
date=self.start_date,
|
||||||
|
status=response.status_code,
|
||||||
|
time=response.elapsed.total_seconds(),
|
||||||
|
url=url
|
||||||
|
)
|
||||||
except requests.exceptions.ConnectionError as e:
|
except requests.exceptions.ConnectionError as e:
|
||||||
log.info("{date},{status},{time},{url}".format(
|
csv = "{date},{status},{time},{url}".format(
|
||||||
date=self.start_date,
|
date=self.start_date,
|
||||||
status="TIMEOUT > 256",
|
status="TIMEOUT > 256",
|
||||||
time="-1",
|
time="-1",
|
||||||
url=url
|
url=url
|
||||||
))
|
)
|
||||||
log.exception(e)
|
datalog.info(csv)
|
||||||
|
send_data(
|
||||||
|
date=self.start_date,
|
||||||
|
status="-1",
|
||||||
|
time=256,
|
||||||
|
url=url
|
||||||
|
)
|
||||||
|
exlog.exception(e)
|
||||||
|
|
||||||
def run_measurements(urls):
|
def run_measurements(urls):
|
||||||
for url in args.urls:
|
for url in args.urls:
|
||||||
Measurement(url, log, str(datetime.now())).start()
|
Measurement(url, log, datetime.now()).start()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
@ -48,6 +81,7 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Measure HTTP/HTTPS response time")
|
parser = argparse.ArgumentParser(description="Measure HTTP/HTTPS response time")
|
||||||
parser.add_argument("--output", "-o", default="results.csv", help="Output file")
|
parser.add_argument("--output", "-o", default="results.csv", help="Output file")
|
||||||
|
parser.add_argument("--exceptions", "-e", default="exceptions.log", help="Exception log file")
|
||||||
parser.add_argument("--interval", "-i", default=30, help="Interval", type=int)
|
parser.add_argument("--interval", "-i", default=30, help="Interval", type=int)
|
||||||
parser.add_argument("urls", nargs="+", help="URLs to measure")
|
parser.add_argument("urls", nargs="+", help="URLs to measure")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
@ -55,7 +89,15 @@ if __name__ == "__main__":
|
||||||
filehandler = logging.FileHandler(args.output)
|
filehandler = logging.FileHandler(args.output)
|
||||||
file_formatter = logging.Formatter("%(message)s")
|
file_formatter = logging.Formatter("%(message)s")
|
||||||
filehandler.setFormatter(file_formatter)
|
filehandler.setFormatter(file_formatter)
|
||||||
log.addHandler(filehandler)
|
datalog = logging.getLogger("data")
|
||||||
|
datalog.addHandler(filehandler)
|
||||||
|
|
||||||
|
|
||||||
|
filehandler2 = logging.FileHandler(args.exceptions)
|
||||||
|
file_formatter2 = logging.Formatter("%(asctime)s %(levelname)s %(name)s:%(message)s")
|
||||||
|
filehandler2.setFormatter(file_formatter2)
|
||||||
|
exlog = logging.getLogger("exceptions")
|
||||||
|
exlog.addHandler(filehandler2)
|
||||||
|
|
||||||
schedule.every(args.interval).seconds.do(lambda: run_measurements(args.urls))
|
schedule.every(args.interval).seconds.do(lambda: run_measurements(args.urls))
|
||||||
|
|
||||||
Loading…
Reference in New Issue