diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..2de600e
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,10 @@
+FROM alpine:edge
+
+ADD ["requirements.txt", "/"]
+RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories && \
+ sed -i 's/numpy/#numpy/' requirements.txt && \
+ sed -i 's/scipy/#scipy/' requirements.txt && \
+ apk add --update libpng libpng-dev freetype freetype-dev g++ python3 py3-numpy python3-dev py-numpy-dev py3-scipy&& \
+ pip3 install -r requirements.txt && \
+ apk del libpng-dev freetype-dev g++ python3-dev py-numpy-dev && \
+ rm requirements.txt
\ No newline at end of file
diff --git a/plots/.gitkeep b/analysis/__init__.py
similarity index 100%
rename from plots/.gitkeep
rename to analysis/__init__.py
diff --git a/analyzers/__init__.py b/analysis/analyzers/__init__.py
similarity index 98%
rename from analyzers/__init__.py
rename to analysis/analyzers/__init__.py
index 41623e4..07a8186 100644
--- a/analyzers/__init__.py
+++ b/analysis/analyzers/__init__.py
@@ -12,7 +12,7 @@ from .render import Render
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
- LogEntryCountCSV
+ LogEntryCountCSV, KMLRender
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
LocomotionActionRatioRender
@@ -41,6 +41,7 @@ __MAPPING__ = {
LocationAnalyzer: [
TrackRender,
HeatMapRender,
+ KMLRender,
],
ActivityMapper: [
ActivityMapperRender
diff --git a/analyzers/analyzer/__init__.py b/analysis/analyzers/analyzer/__init__.py
similarity index 97%
rename from analyzers/analyzer/__init__.py
rename to analysis/analyzers/analyzer/__init__.py
index 175ef92..422e30c 100644
--- a/analyzers/analyzer/__init__.py
+++ b/analysis/analyzers/analyzer/__init__.py
@@ -2,7 +2,7 @@ import logging
from collections import KeysView
from typing import Type, Sized, Collection
-from analyzers.settings import LogSettings
+from analysis.analyzers.settings import LogSettings
log: logging.Logger = logging.getLogger(__name__)
diff --git a/analyzers/analyzer/biogames.py b/analysis/analyzers/analyzer/biogames.py
similarity index 96%
rename from analyzers/analyzer/biogames.py
rename to analysis/analyzers/analyzer/biogames.py
index fccc8d0..e87f21f 100644
--- a/analyzers/analyzer/biogames.py
+++ b/analysis/analyzers/analyzer/biogames.py
@@ -3,8 +3,8 @@ from collections import defaultdict, namedtuple, OrderedDict
from types import SimpleNamespace
from typing import List, NamedTuple
-from util import json_path, combinate
-from util.download import download_board, get_board_data
+from analysis.util import json_path, combinate
+from analysis.util.download import download_board, get_board_data
from . import Result, LogSettings, Analyzer, ResultStore
from .default import CategorizerStub, Store
@@ -174,13 +174,13 @@ class ActivityMapper(Analyzer):
board["image"] = "simu.png"
continue
local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
- self.settings.source)
+ self.settings.source)
if local_file:
board['image'] = local_file
else:
board['image'] = "ERROR_FETCHING_FILE"
logger.error("error downloading board! %s %s %s", self.instance_config_id, board["sequence_id"],
- board["board_id"])
+ board["board_id"])
else:
board["image"] = "map.png"
store.add(Result(type(self), {
@@ -202,7 +202,7 @@ class ActivityMapper(Analyzer):
self.add_location(entry)
elif entry[self.settings.type_field] in self.settings.boards:
board_data = get_board_data(self.settings.source, self.instance_config_id, entry["sequence_id"],
- entry["board_id"])
+ entry["board_id"])
entry["extra_data"] = board_data
entry["extra_data"]["activity_type"] = self.last_board_type
entry['coordinate'] = self.new_coordinate()
@@ -218,14 +218,14 @@ class ActivityMapper(Analyzer):
def classify_entry(self, entry):
entry_type = entry[self.settings.type_field]
if self.filters.end(entry):
- data = {"extra_data": {"activity_type": "map"},"coordinate": self.new_coordinate()}
+ data = {"extra_data": {"activity_type": "map"}, "coordinate": self.new_coordinate()}
data.update(entry)
self.timeline.append(data)
return "map"
if not entry_type in self.settings.boards:
return self.last_board_type
board_data = get_board_data(self.settings.source, self.instance_config_id, entry["sequence_id"],
- entry["board_id"])
+ entry["board_id"])
for pattern in self.classes:
if pattern in board_data['class']:
return self.classes[pattern]
diff --git a/analyzers/analyzer/default.py b/analysis/analyzers/analyzer/default.py
similarity index 97%
rename from analyzers/analyzer/default.py
rename to analysis/analyzers/analyzer/default.py
index 8698a5c..fce0e35 100644
--- a/analyzers/analyzer/default.py
+++ b/analysis/analyzers/analyzer/default.py
@@ -1,7 +1,7 @@
import logging
from collections import defaultdict, OrderedDict
-from util import json_path
+from analysis.util import json_path
from . import Result, LogSettings, Analyzer, ResultStore
@@ -16,9 +16,9 @@ class LocationAnalyzer(Analyzer):
super().__init__(settings)
self.entries = []
- def result(self, store: ResultStore) -> None:
+ def result(self, store: ResultStore, **kwargs) -> None:
self.log.debug(len(self.entries))
- store.add(Result(type(self), list(self.entries)))
+ store.add(Result(type(self), list(self.entries), name=kwargs['name']))
def process(self, entry: dict) -> bool:
if entry[self.settings.type_field] in self.settings.spatials:
diff --git a/analyzers/analyzer/locomotion.py b/analysis/analyzers/analyzer/locomotion.py
similarity index 100%
rename from analyzers/analyzer/locomotion.py
rename to analysis/analyzers/analyzer/locomotion.py
diff --git a/analyzers/analyzer/mask.py b/analysis/analyzers/analyzer/mask.py
similarity index 100%
rename from analyzers/analyzer/mask.py
rename to analysis/analyzers/analyzer/mask.py
diff --git a/analyzers/render/__init__.py b/analysis/analyzers/render/__init__.py
similarity index 100%
rename from analyzers/render/__init__.py
rename to analysis/analyzers/render/__init__.py
diff --git a/analyzers/render/biogames.py b/analysis/analyzers/render/biogames.py
similarity index 97%
rename from analyzers/render/biogames.py
rename to analysis/analyzers/render/biogames.py
index 5e2a2f1..608d045 100644
--- a/analyzers/render/biogames.py
+++ b/analysis/analyzers/render/biogames.py
@@ -9,8 +9,8 @@ from scipy.interpolate import interp1d
import networkx as nx
import itertools
-from analyzers import Store, BiogamesStore, SimulationOrderAnalyzer
-from util.meta_temp import CONFIG_NAMES
+from analysis.analyzers import Store, BiogamesStore, SimulationOrderAnalyzer
+from analysis.util.meta_temp import CONFIG_NAMES
from . import Render
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper
diff --git a/analyzers/render/default.py b/analysis/analyzers/render/default.py
similarity index 68%
rename from analyzers/render/default.py
rename to analysis/analyzers/render/default.py
index bf57f61..d0d94c1 100644
--- a/analyzers/render/default.py
+++ b/analysis/analyzers/render/default.py
@@ -2,11 +2,13 @@ import json
import logging
from typing import List
+import datetime
import matplotlib.pyplot as plt
-from analyzers import LogEntryCountAnalyzer
+from analysis.analyzers import LogEntryCountAnalyzer
+from analysis.util.meta_temp import KML_PATTERN
from . import Render, Result
-from .. import LocationAnalyzer
+from analysis.analyzers import LocationAnalyzer
log = logging.getLogger(__name__)
@@ -39,6 +41,31 @@ class TrackRender(Render):
return dumps
+def format_time(ts):
+ return datetime.datetime.fromtimestamp(ts/1000).strftime("%Y-%m-%dT%H:%M:%S.%f")
+
+
+class KMLRender(Render):
+ result_types = [LocationAnalyzer]
+
+ def render(self, results: List[Result], name=None):
+ for result in self.filter(results):
+ times = ["{time}".format(time=format_time(entry["timestamp"])) for entry in result.get()]
+ coords = [
+ "{long} {lat} 0.0"
+ .format(
+ lat=entry['location']['coordinates'][1],
+ long=entry['location']['coordinates'][0])
+ for entry in result.get()
+ ]
+ filename = str(result.name)+".kml"
+ print(filename)
+ with open(filename, "w") as out:
+ out.write(KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
+
+
+
+
class HeatMapRender(TrackRender):
weight = 0.01
diff --git a/analyzers/render/locomotion.py b/analysis/analyzers/render/locomotion.py
similarity index 100%
rename from analyzers/render/locomotion.py
rename to analysis/analyzers/render/locomotion.py
diff --git a/analyzers/render/wip.py b/analysis/analyzers/render/wip.py
similarity index 98%
rename from analyzers/render/wip.py
rename to analysis/analyzers/render/wip.py
index 832c49f..19249ec 100644
--- a/analyzers/render/wip.py
+++ b/analysis/analyzers/render/wip.py
@@ -2,8 +2,8 @@ import json
import numpy as np
-import analyzers
-from util.geo import calc_distance
+import analysis.analyzers
+from analysis.util.geo import calc_distance
def time_distribution(store):
@@ -97,7 +97,7 @@ def time_distribution(store):
from collections import defaultdict
import matplotlib.pyplot as plt
-from util.meta_temp import CONFIG_NAMES
+from analysis.util.meta_temp import CONFIG_NAMES
keys = [
"simu",
diff --git a/analyzers/settings.py b/analysis/analyzers/settings.py
similarity index 87%
rename from analyzers/settings.py
rename to analysis/analyzers/settings.py
index 295f670..36f7682 100644
--- a/analyzers/settings.py
+++ b/analysis/analyzers/settings.py
@@ -1,6 +1,6 @@
import json
import sys
-from sources import SOURCES
+from analysis.sources import SOURCES
def load_source(config):
@@ -28,7 +28,7 @@ class LogSettings:
self.boards = json_dict['boards']
for mod in json_dict['analyzers']:
for name in json_dict['analyzers'][mod]:
- print(mod, name)
+ print(mod, name, getattr(sys.modules[mod], name))
self.analyzers.append(getattr(sys.modules[mod], name))
self.sequences = json_dict['sequences']
if 'custom' in json_dict:
@@ -51,3 +51,7 @@ class LogSettings:
def load_settings(file: str) -> LogSettings:
return LogSettings(json.load(open(file)))
+
+
+def parse_settings(config: str) -> LogSettings:
+ return LogSettings(json.loads(config))
diff --git a/biogames.json b/analysis/biogames.json
similarity index 100%
rename from biogames.json
rename to analysis/biogames.json
diff --git a/biogames2.json b/analysis/biogames2.json
similarity index 100%
rename from biogames2.json
rename to analysis/biogames2.json
diff --git a/filter_todo/pre_filter.py b/analysis/filter_todo/pre_filter.py
similarity index 100%
rename from filter_todo/pre_filter.py
rename to analysis/filter_todo/pre_filter.py
diff --git a/loaders/__init__.py b/analysis/loaders/__init__.py
similarity index 100%
rename from loaders/__init__.py
rename to analysis/loaders/__init__.py
diff --git a/loaders/biogames.py b/analysis/loaders/biogames.py
similarity index 100%
rename from loaders/biogames.py
rename to analysis/loaders/biogames.py
diff --git a/loaders/loader.py b/analysis/loaders/loader.py
similarity index 100%
rename from loaders/loader.py
rename to analysis/loaders/loader.py
diff --git a/log_analyzer.py b/analysis/log_analyzer.py
similarity index 53%
rename from log_analyzer.py
rename to analysis/log_analyzer.py
index 1e053fb..2839abd 100644
--- a/log_analyzer.py
+++ b/analysis/log_analyzer.py
@@ -2,16 +2,16 @@ import json
import logging
from typing import List
-import analyzers
-from analyzers import get_renderer, render
-from analyzers.analyzer import ResultStore
-from analyzers.analyzer.default import write_logentry_count_csv, write_simulation_flag_csv
-from analyzers.render import wip
-from analyzers.render.default import LogEntryCountCSV
-from analyzers.render.wip import time_distribution, plot_data
-from analyzers.settings import LogSettings, load_settings
-from loaders import LOADERS
-from util.processing import grep, run_analysis, src_file
+from analysis import analyzers
+from analysis.analyzers import get_renderer, render
+from analysis.analyzers.analyzer import ResultStore
+from analysis.analyzers.analyzer.default import write_logentry_count_csv, write_simulation_flag_csv
+from analysis.analyzers.render import wip
+from analysis.analyzers.render.default import LogEntryCountCSV, KMLRender
+from analysis.analyzers.render.wip import time_distribution, plot_data
+from analysis.analyzers.settings import LogSettings, load_settings, parse_settings
+from analysis.loaders import LOADERS
+from analysis.util.processing import grep, run_analysis, src_file
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
log: logging.Logger = logging.getLogger(__name__)
@@ -26,36 +26,39 @@ def urach_logs(log_ids, settings):
if __name__ == '__main__':
- settings: LogSettings = load_settings("biogames2.json")
- log_ids_urach: List[str] = urach_logs([
- # "34fecf49dbaca3401d745fb467",
- # "44ea194de594cd8d63ac0314be",
- # "57c444470dbf88605433ca935c",
- # "78e0c545b594e82edfad55bd7f",
- # "91abfd4b31a5562b1c66be37d9",
- # "597b704fe9ace475316c345903",
- # "e01a684aa29dff9ddd9705edf8",
- "597b704fe9ace475316c345903",
- "e01a684aa29dff9ddd9705edf8",
- "fbf9d64ae0bdad0de7efa3eec6",
- # "fbf9d64ae0bdad0de7efa3eec6",
- "fe1331481f85560681f86827ec", # urach
- # "fe1331481f85560681f86827ec"]
- "fec57041458e6cef98652df625",
- ]
- , settings)
- log_ids_gf = grep(["9d11b749c78a57e786bf5c8d28", # filderstadt
- "a192ff420b8bdd899fd28573e2", # eichstätt
- "3a3d994c04b1b1d87168422309", # stadtökologie
- "fe1331481f85560681f86827ec", # urach
- "96f6d9cc556b42f3b2fec0a2cb7ed36e" # oberelsbach
- ],
- "/home/clemens/git/ma/test/src",
- settings)
- log_ids = src_file("/home/clemens/git/ma/test/filtered_5_actions")
+ settings = {}
+ log_ids_gf = []
+ # settings: LogSettings = load_settings("biogames2.json")
+ # log_ids_urach: List[str] = urach_logs([
+ # # "34fecf49dbaca3401d745fb467",
+ # # "44ea194de594cd8d63ac0314be",
+ # # "57c444470dbf88605433ca935c",
+ # # "78e0c545b594e82edfad55bd7f",
+ # # "91abfd4b31a5562b1c66be37d9",
+ # # "597b704fe9ace475316c345903",
+ # # "e01a684aa29dff9ddd9705edf8",
+ # "597b704fe9ace475316c345903",
+ # "e01a684aa29dff9ddd9705edf8",
+ # "fbf9d64ae0bdad0de7efa3eec6",
+ # # "fbf9d64ae0bdad0de7efa3eec6",
+ # "fe1331481f85560681f86827ec", # urach
+ # # "fe1331481f85560681f86827ec"]
+ # "fec57041458e6cef98652df625",
+ # ]
+ # , settings)
+ # log_ids_gf = grep(["9d11b749c78a57e786bf5c8d28", # filderstadt
+ # "a192ff420b8bdd899fd28573e2", # eichstätt
+ # "3a3d994c04b1b1d87168422309", # stadtökologie
+ # "fe1331481f85560681f86827ec", # urach
+ # "96f6d9cc556b42f3b2fec0a2cb7ed36e" # oberelsbach
+ # ],
+ # "/home/clemens/git/ma/test/src",
+ # settings)
+ # log_ids = src_file("/home/clemens/git/ma/test/filtered_5_actions")
- #store: ResultStore = run_analysis(log_ids_gf, settings, LOADERS)
- #store: ResultStore = run_analysis(log_ids, settings, LOADERS)
+ if False:
+ store: ResultStore = run_analysis(log_ids_gf, settings, LOADERS)
+ # store: ResultStore = run_analysis(log_ids, settings, LOADERS)
if False:
for r in get_renderer(analyzers.LocomotionActionAnalyzer):
@@ -69,7 +72,7 @@ if __name__ == '__main__':
# render(analyzers.ProgressAnalyzer, store.get_all())
if False:
- from analyzers.postprocessing import graph
+ from analysis.analyzers.postprocessing import graph
g = graph.Cache(settings)
g.run(store)
@@ -85,7 +88,7 @@ if __name__ == '__main__':
if False:
time_distribution(store)
- if True:
+ if False:
# spatial_data = get_data_distance(store,relative_values=False)
# temporal_data = get_data(store,relative_values=False)
# spatial_data_rel = get_data_distance(store,relative_values=True)
@@ -104,6 +107,18 @@ if __name__ == '__main__':
# plot_time_space_rel(combined, keys)
plot_data(combined, wip.keys)
+ if True:
+ settings: LogSettings = load_settings("../oeb_kml.json")
+ log_ids = src_file("/home/clemens/git/ma/test/oeb_2016_path")
+ log_ids = log_ids[0:2]
+ print(log_ids)
+ store: ResultStore = run_analysis(log_ids, settings, LOADERS)
+ print("render")
+ kml = KMLRender()
+ kml.render(store.get_all())
+ print("done")
+ #for cat in store.get_categories():
+ # render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
# for analyzers in analyzers:
# if analyzers.name() in ["LogEntryCount", "ActionSequenceAnalyzer"]:
diff --git a/sources/__init__.py b/analysis/sources/__init__.py
similarity index 100%
rename from sources/__init__.py
rename to analysis/sources/__init__.py
diff --git a/sources/biogames.py b/analysis/sources/biogames.py
similarity index 98%
rename from sources/biogames.py
rename to analysis/sources/biogames.py
index fa39b38..ac25e10 100644
--- a/sources/biogames.py
+++ b/analysis/sources/biogames.py
@@ -26,7 +26,7 @@ class Biogames(Source):
raise ValueError("missing value " + i)
csrf_request = requests.get(kwargs['url'])
if csrf_request.status_code != 200:
- raise ConnectionError("unable to obtain CSRF token (" + str(csrf_request) + ")")
+ raise ConnectionError("unable to obtain CSRF token (" + str(csrf_request) + ")", csrf_request.url)
self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
log.info("obtained CSRF token (" + self.cookies['csrftoken'] + ")")
login_payload = {
diff --git a/sources/source.py b/analysis/sources/source.py
similarity index 100%
rename from sources/source.py
rename to analysis/sources/source.py
diff --git a/util/__init__.py b/analysis/util/__init__.py
similarity index 100%
rename from util/__init__.py
rename to analysis/util/__init__.py
diff --git a/util/download.py b/analysis/util/download.py
similarity index 100%
rename from util/download.py
rename to analysis/util/download.py
diff --git a/util/geo.py b/analysis/util/geo.py
similarity index 100%
rename from util/geo.py
rename to analysis/util/geo.py
diff --git a/util/iter.py b/analysis/util/iter.py
similarity index 100%
rename from util/iter.py
rename to analysis/util/iter.py
diff --git a/util/meta_temp.py b/analysis/util/meta_temp.py
similarity index 94%
rename from util/meta_temp.py
rename to analysis/util/meta_temp.py
index 7efaf63..0bb928c 100644
--- a/util/meta_temp.py
+++ b/analysis/util/meta_temp.py
@@ -97,3 +97,20 @@ CONFIG_NAMES = {
'fe43a0f0-3dea-11e6-a065-00199963ac6e': u'Vorlagen',
'ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771': u'Bad Urach'
}
+
+
+KML_PATTERN="""
+
+
+
+
+
+ {when}
+ {coordinates}
+
+
+ {coordinates}
+
+
+
+"""
\ No newline at end of file
diff --git a/util/processing.py b/analysis/util/processing.py
similarity index 90%
rename from util/processing.py
rename to analysis/util/processing.py
index 5b3a3ff..7291f60 100644
--- a/util/processing.py
+++ b/analysis/util/processing.py
@@ -1,8 +1,8 @@
import logging
from typing import List
-from analyzers.analyzer import ResultStore, Analyzer
-from analyzers.settings import LogSettings
+from analysis.analyzers.analyzer import ResultStore, Analyzer
+from analysis.analyzers.settings import LogSettings
log: logging.Logger = logging.getLogger(__name__)
@@ -31,6 +31,7 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
def run_analysis(log_ids: list, settings, loaders):
store: ResultStore = ResultStore()
for log_id in log_ids:
+ log.info("LOG_ID: "+ str(log_id))
for analysis in process_log(log_id, settings, loaders):
log.info("* Result for " + analysis.name())
analysis.result(store, name=log_id)
diff --git a/clients/__init__.py b/clients/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/clients/webclients.py b/clients/webclients.py
new file mode 100644
index 0000000..95f5c6c
--- /dev/null
+++ b/clients/webclients.py
@@ -0,0 +1,110 @@
+import json
+import logging
+
+import os
+import shutil
+import tempfile
+import typing
+
+import requests
+
+log: logging.Logger = logging.getLogger(__name__)
+
+
+class Client:
+ host: str = ""
+ cookies: typing.Dict[str, str] = {}
+ headers: typing.Dict[str, str] = {}
+
+ def url(self, path):
+ if self.host:
+ return self.host + path
+ return path
+
+ def get(self, url, **kwargs) -> requests.models.Response:
+ return requests.get(self.url(url), cookies=self.cookies, headers=self.headers, **kwargs)
+
+ def post(self, url, data, **kwargs) -> requests.models.Response:
+ return requests.post(self.url(url), data, cookies=self.cookies, headers=self.headers, **kwargs)
+
+ def download_file(self, url, target, **kwargs) -> bool:
+ with open(target, "wb") as out:
+ try:
+ download = self.get(url, stream=True, **kwargs)
+ shutil.copyfileobj(download.raw, out)
+ except Exception as e:
+ log.exception(e)
+ os.remove(target)
+ return False
+ return True
+
+ def download_files(self, urls, **kwargs) -> tempfile.TemporaryDirectory:
+ target = tempfile.TemporaryDirectory()
+ for path in urls:
+ filename = os.path.join(target.name, path.split("/")[-1])
+ self.download_file(path, filename, **kwargs)
+ return target
+
+
+class BiogamesClient(Client):
+ config_fields: typing.Dict[str, typing.List[str]] = {
+ 'login': ('username', 'password', 'host'),
+ 'session': ('sessionid', 'csrftoken', 'host'),
+ }
+ login_url: str = "/game2/auth/json-login"
+ list_url: str = "/game2/instance/log/list/"
+ headers: typing.Dict[str, str] = {'Accept': 'application/json'}
+
+ def __init__(self, **kwargs):
+ match = {j: all([i in kwargs for i in self.config_fields[j]]) for j in self.config_fields}
+ valid = filter(lambda x: match[x], match)
+ if not valid:
+ raise ValueError("missing parameter (" + str(self.config_fields) + ")")
+ self.config = kwargs
+ self.cookies = {}
+ self.host = self.config['host']
+ if 'session' in valid:
+ self.cookies = kwargs
+
+ def login(self) -> bool:
+ csrf_request = self.get(self.list_url)
+ if not csrf_request.ok:
+ raise ConnectionError("Unable to obtain CSRF token (" + str(csrf_request) + ")")
+ self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
+ login_payload = {
+ 'username': self.config['username'],
+ 'password': self.config['password'],
+ 'next': '',
+ 'csrfmiddlewaretoken': 'csrftoken',
+ }
+ login = self.post(self.login_url, json.dumps(login_payload))
+ if not login.ok:
+ raise ConnectionError("Unable to authenticate", login, login.text)
+ self.cookies['sessionid'] = login.cookies['sessionid']
+ print(self.cookies)
+ return True
+
+ def list(self) -> dict:
+ print(self.cookies)
+ logs = self.get(self.list_url)
+ if not logs.ok:
+ raise ConnectionError("HTTP fail", logs, logs.text)
+ return logs.json()
+
+ def load_all_logs(self) -> tempfile.TemporaryDirectory:
+ return self.download_files([i["file_url"] for i in self.list()])
+
+
+CLIENTS: typing.Dict[str, Client] = {
+ "Biogames": BiogamesClient,
+}
+
+if __name__ == '__main__':
+ # c = BiogamesClient(host="http://biodiv", username="ba", password="853451")
+ # print(c.login())
+ # print(json.dumps(c.list(), indent=1))
+ # print(type(c.load_all_logs()))
+ # print(type(c.get("/")))
+ c = BiogamesClient(host="http://biodiv", **{'csrftoken': 'IgbwP83iEibW6RE7IADIFELYdbx0dvqQ',
+ 'sessionid': 'zntsj09d92tjos1b6ruqjthlzv60xdin'})
+ print(json.dumps(c.list(), indent=1))
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..fc01d0d
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,41 @@
+version: "3"
+
+services:
+ app:
+ image: docker.clkl.de/ma/celery:0.1
+ build: .
+ volumes:
+ - ./:/app
+ working_dir: /app/selector
+ command: python3 webserver.py
+ environment:
+ - PYTHONPATH=/app
+ networks:
+ - default
+ - traefik_net
+ labels:
+ - "traefik.enable=true"
+ - "traefik.port=5000"
+ - "traefik.docker.network=traefik_net"
+ - "traefik.url.frontend.rule=Host:select.ma.potato.kinf.wiai.uni-bamberg.de"
+
+
+ redis:
+ image: redis:4-alpine
+
+
+
+ celery:
+ image: docker.clkl.de/ma/celery:0.1
+ build: .
+ environment:
+ - PYTHONPATH=/app
+ volumes:
+ - ./:/app
+ working_dir: /app/tasks
+ command: celery -A tasks worker --loglevel=info
+
+networks:
+ traefik_net:
+ external:
+ name: traefik_net
diff --git a/requirements.txt b/requirements.txt
index 5813878..8b4ddee 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,13 @@
requests==2.18.4
numpy==1.13.1
matplotlib==2.1.0
-osmnx==0.6
+#osmnx==0.6
networkx==2.0
-pydot==1.2.3
+#pydot==1.2.3
scipy==1.0.0
-ipython==6.2.1
\ No newline at end of file
+#ipython==6.2.1
+
+flask==0.12.2
+
+celery==4.1.0
+redis==2.10.6
\ No newline at end of file
diff --git a/selector/__init__.py b/selector/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/selector/static/script.js b/selector/static/script.js
new file mode 100644
index 0000000..865286c
--- /dev/null
+++ b/selector/static/script.js
@@ -0,0 +1,4 @@
+function validateSettings() {
+ alert(document.getElementById('safety').checked);
+ return false;
+}
\ No newline at end of file
diff --git a/selector/static/style.css b/selector/static/style.css
new file mode 100644
index 0000000..9eb023c
--- /dev/null
+++ b/selector/static/style.css
@@ -0,0 +1,6 @@
+body {
+ background-color: aqua;
+}
+#data{
+ display: none;
+}
\ No newline at end of file
diff --git a/selector/templates/base.html b/selector/templates/base.html
new file mode 100644
index 0000000..2dea66f
--- /dev/null
+++ b/selector/templates/base.html
@@ -0,0 +1,5 @@
+
+
…
+
+
+{% block body %} {% endblock %}
\ No newline at end of file
diff --git a/selector/templates/games.html b/selector/templates/games.html
new file mode 100644
index 0000000..2692bb7
--- /dev/null
+++ b/selector/templates/games.html
@@ -0,0 +1,19 @@
+{% extends "base.html" %}
+{% block body %}
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/selector/templates/index.html b/selector/templates/index.html
new file mode 100644
index 0000000..5a4c6c0
--- /dev/null
+++ b/selector/templates/index.html
@@ -0,0 +1,14 @@
+{% extends "base.html" %}
+{% block body %}
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/selector/webserver.py b/selector/webserver.py
new file mode 100644
index 0000000..de901c2
--- /dev/null
+++ b/selector/webserver.py
@@ -0,0 +1,53 @@
+import typing
+import uuid
+
+from clients.webclients import Client, CLIENTS
+
+from flask import Flask, render_template, request, redirect, session
+
+BIOGAMES_HOST = "http://biogames.potato.kinf.wiai.uni-bamberg.de"
+
+app = Flask(__name__)
+clients: typing.Dict[str, Client] = {}
+
+@app.route("/")
+def index():
+ return render_template("index.html", clients=CLIENTS)
+
+
+@app.route("/login", methods=["POST"])
+def login():
+ game = request.form["game"]
+ if not game in CLIENTS:
+ return redirect("/")
+ client = CLIENTS[game](host=BIOGAMES_HOST, username=request.form['username'], password=request.form['password'])
+ if client.login():
+ session['logged_in'] = True
+ session['uid'] = str(uuid.uuid4())
+ session['cookies'] = client.cookies
+ session['game'] = game
+ session['host'] = BIOGAMES_HOST
+ clients[session['uid']] = client
+ return redirect("/games")
+ return redirect("/")
+
+
+@app.route("/games")
+def games():
+ if not session['logged_in']:
+ return redirect("/")
+ if session['logged_in'] and not session['uid'] in clients:
+ clients[session['uid']] = CLIENTS[session['game']](host=session['host'], **session['cookies'])
+ return render_template("games.html", logs=clients[session['uid']].list())
+
+@app.route("/start", methods=['POST'])
+def start():
+ pass
+
+@app.route("/status")
+def status():
+ pass
+
+if __name__ == '__main__':
+ app.config.update({"SECRET_KEY":"59765798-2784-11e8-8d05-db4d6f6606c9"})
+ app.run(host="0.0.0.0", debug=True)
diff --git a/static/progress/my.js b/static/progress/my.js
index 29c2dd8..309ad7a 100644
--- a/static/progress/my.js
+++ b/static/progress/my.js
@@ -1,8 +1,9 @@
-$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_03b9b6b4-c8ab-4182-8902-1620eebe8889.json", function (data) { //urach
+//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_03b9b6b4-c8ab-4182-8902-1620eebe8889.json", function (data) { //urach
//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_de7df5b5-edd5-4070-840f-68854ffab9aa.json", function (data) { //urach
//$.getJSON("data/90278021-4c57-464e-90b1-d603799d07eb_07da99c9-398a-424f-99fc-2701763a63e9.json", function (data) { //eichstätt
//$.getJSON("data/13241906-cdae-441a-aed0-d57ebeb37cac_d33976a6-8a56-4a63-b492-fe5427dbf377.json", function (data) { //stadtökologie
-//$.getJSON("data/5e64ce07-1c16-4d50-ac4e-b3117847ea43_2f664d7b-f0d8-42f5-8731-c034ef86703e.json", function (data) { //filderstadt
+$.getJSON("data/5e64ce07-1c16-4d50-ac4e-b3117847ea43_2f664d7b-f0d8-42f5-8731-c034ef86703e.json", function (data) { //filderstadt
+//$.getJSON("data/17d401a9-de21-49a2-95bc-7dafa53dda64_98edcb70-03db-4465-b185-a9c9574995ce.json", function (data) { //oeb2016
var images = {};
var tiles = {
"openstreetmap": L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
diff --git a/tasks/__init__.py b/tasks/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tasks/tasks.py b/tasks/tasks.py
new file mode 100644
index 0000000..2423c78
--- /dev/null
+++ b/tasks/tasks.py
@@ -0,0 +1,14 @@
+from celery import Celery
+#from analysis import log_analyzer as la
+
+app = Celery('tasks', backend='redis://redis', broker='redis://redis')
+
+
+@app.task
+def add(x, y):
+ return x + y
+
+#@app.task
+#def analyze(config, log_ids):
+# settings = la.parse_settings(config)
+# store = la.run_analysis(log_ids, settings, la.LOADERS)
\ No newline at end of file