add oeb-new config

master
Clemens Klug 2019-10-07 17:20:23 +02:00
parent 29c04f2fdc
commit 86adc973a2
10 changed files with 400 additions and 36 deletions

View File

@ -3,7 +3,7 @@ from typing import List
from .analyzer import Analyzer, Result from .analyzer import Analyzer, Result
from .analyzer.biogames import BoardDurationAnalyzer, SimulationRoundsAnalyzer, ActivationSequenceAnalyzer, \ from .analyzer.biogames import BoardDurationAnalyzer, SimulationRoundsAnalyzer, ActivationSequenceAnalyzer, \
BiogamesCategorizer, ActivityMapper, BiogamesStore, InstanceConfig, SimulationOrderAnalyzer, SimulationCategorizer, \ BiogamesCategorizer, ActivityMapper, BiogamesStore, InstanceConfig, SimulationOrderAnalyzer, SimulationCategorizer, \
SimulationFlagsAnalyzer, GameField_InstanceCategorizer SimulationFlagsAnalyzer, GameField_InstanceCategorizer, BiogamesTasks, BiogamesDuration, GameFieldInstanceGroup
from .analyzer.default import LogEntryCountAnalyzer, LocationAnalyzer, LogEntrySequenceAnalyzer, ActionSequenceAnalyzer, \ from .analyzer.default import LogEntryCountAnalyzer, LocationAnalyzer, LogEntrySequenceAnalyzer, ActionSequenceAnalyzer, \
CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer, WhitelistAnalyzer CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer, WhitelistAnalyzer
from .analyzer.locomotion import LocomotionActionAnalyzer, CacheSequenceAnalyzer from .analyzer.locomotion import LocomotionActionAnalyzer, CacheSequenceAnalyzer
@ -15,7 +15,7 @@ from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender,
LogEntryCountCSV, KMLRender, GeoJSON LogEntryCountCSV, KMLRender, GeoJSON
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \ from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
LocomotionActionRatioRender LocomotionActionRatioRender
from .render.wip import get_all_data, plot_time_space_rel #from .render.wip import get_all_data, plot_time_space_rel
__FALLBACK__ = PrintRender __FALLBACK__ = PrintRender
__MAPPING__ = { __MAPPING__ = {

View File

@ -150,6 +150,31 @@ class GameField_InstanceCategorizer(CategorizerStub): # TODO: refactor
return False return False
class GameFieldInstanceGroup(Analyzer):
__name__ = "BiogamesGamefieldInstanceGroupAnalizer"
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.metadata = None
def process(self, entry: dict) -> bool:
if not self.metadata:
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
try:
self.metadata = {"instance_config_id": json_path(entry, self.settings.custom['instance_config_id']),
"instance_id": entry[self.settings.custom['instance_id']],
"timestamp": str(entry["timestamp"]),
"player_group_name": entry['player_group_name']
}
except KeyError as e:
print(entry)
raise e
return False
def result(self, store: ResultStore, **kwargs) -> None:
store.add(Result(type(self), self.metadata))
class ActivityMapper(Analyzer): class ActivityMapper(Analyzer):
__name__ = "ActivityMapper" __name__ = "ActivityMapper"
classes = { classes = {
@ -287,6 +312,94 @@ class ActivityMapper(Analyzer):
return {"type": "Feature", "geometry": {"type": "LineString", "coordinates": []}, "properties": {'start_timestamp': timestamp, 'coordTimes': []}} return {"type": "Feature", "geometry": {"type": "LineString", "coordinates": []}, "properties": {'start_timestamp': timestamp, 'coordTimes': []}}
class BiogamesDuration(Analyzer):
__name__ = "BiogamesDuration"
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.first = None
self.last = None
self.sequences = defaultdict(list)
self.filters = SimpleNamespace()
self.filters.start = lambda entry: combinate(self.settings.custom["sequences2"]["start"], entry)
self.filters.end = lambda entry: combinate(self.settings.custom["sequences2"]["end"], entry)
self.sequence = None
self.sequence_start = None
self.cache = "None"
def process(self, entry: dict) -> bool:
if not self.first:
self.first = entry['timestamp']
self.last = entry['timestamp']
if not self.sequence and self.filters.start(entry):
self.sequence = entry['sequence_id']
self.sequence_start = entry['timestamp']
elif self.sequence and self.filters.end(entry):
self.sequences[f"{self.cache}+{self.sequence}"].append((self.sequence_start, entry['timestamp']))
self.sequences[f"only+{self.sequence}"].append((self.sequence_start, entry['timestamp']))
self.sequence = None
self.sequence_start = 0
self.cache = "None"
if entry['@class'] in self.settings.sequences['start']:
if entry['cache']:
self.cache = entry['cache']['@id']
else:
self.cache = "None"
return False
def result(self, store: ResultStore, name=None) -> None:
results = {"start": self.first, "end": self.last, "duration": self.last - self.first}
for sid in self.sequences:
seq = self.sequences[sid]
#print([end-start for start,end in seq])
results[f"sequence_{sid}_duration"] = sum([end-start for start,end in seq])
store.add(Result(type(self), results))
class BiogamesTasks(Analyzer):
__name__ = "BiogamesTasks"
DATA_CLASSES = ("de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion", )
BOARD_CLASSES = ("de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry",)
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.settings: LogSettings = settings
self.tasks = {}
self.last_board = None
self.instance_config_id: str = None
def process(self, entry: dict) -> bool:
if self.instance_config_id is None:
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
self.instance_config_id = json_path(entry, self.settings.custom['instance_config_id'])
if self.is_task(entry) and self.last_board:
entry['__duration'] = entry['timestamp'] - self.last_board['timestamp']
self.tasks[self.ids()] = entry
if self.is_board(entry):
self.last_board = entry
return False
def result(self, store: ResultStore, name=None) -> None:
results = {}
for ids in self.tasks:
task = self.tasks[ids]
for action in task['selected_actions']:
if self.is_dollar_action(action):
results[ids] = {"duration": task['__duration'], "result": action['increment']}
store.add(Result(type(self), results))
def ids(self):
return f"{self.instance_config_id}_{self.last_board['sequence_id']}_{self.last_board['board_id']}"
def is_task(self, entry) -> bool:
return entry['@class'] in self.DATA_CLASSES
def is_board(self, entry) -> bool:
return entry['@class'] in self.BOARD_CLASSES
def is_dollar_action(self, action):
return action['@class'] in ("de.findevielfalt.games.game2.instance.action.IncrementDiversityDollarAction")
class BiogamesStore(Store): class BiogamesStore(Store):
__name__ = "BiogamesStore" __name__ = "BiogamesStore"

View File

@ -1,6 +1,10 @@
import logging
from typing import List from typing import List
from analysis.analyzers.analyzer import ResultStore
from .. import Result from .. import Result
logger = logging.getLogger(__name__)
class Render: class Render:
result_types = [] result_types = []
@ -8,6 +12,11 @@ class Render:
def render(self, results: List[Result], name=None) -> [str]: def render(self, results: List[Result], name=None) -> [str]:
raise NotImplementedError() raise NotImplementedError()
def render_store(self, store: ResultStore, name=None) -> str:
logging.getLogger(__name__).warning("using default implementation!")
for category in store.get_categories():
self.render(store.get_category(category), name=name)
def filter(self, results: List[Result]): def filter(self, results: List[Result]):
if len(self.result_types) == 0: if len(self.result_types) == 0:
return results return results

View File

@ -1,4 +1,5 @@
import json import json
import tempfile
from collections import defaultdict from collections import defaultdict
from typing import List, Tuple from typing import List, Tuple
@ -9,8 +10,13 @@ from scipy.interpolate import interp1d
import networkx as nx import networkx as nx
import itertools import itertools
from analysis.analyzers import Store, BiogamesStore, SimulationOrderAnalyzer from analysis.analyzers import Store, BiogamesStore, SimulationOrderAnalyzer, LocationAnalyzer, BiogamesDuration, \
from analysis.util.meta_temp import CONFIG_NAMES BiogamesTasks, GameFieldInstanceGroup
from analysis.analyzers.analyzer import ResultStore
from analysis.analyzers.render.default import GeoJSON
from analysis.util.geo import calc_distance, calc_distance_simplified
from analysis.util.meta_temp import CONFIG_NAMES, TASK_NAMES, CACHE_NAMES, SEQUENCE_NAMES
from analysis.util.output import flat_dict_to_csv, pretty_ts
from . import Render from . import Render
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper
@ -201,3 +207,60 @@ class SimulationGroupRender(Render):
name=name) name=name)
result_types = [SimulationOrderAnalyzer] result_types = [SimulationOrderAnalyzer]
class OEBRender(Render):
result_types = [LocationAnalyzer, BiogamesTasks, BiogamesDuration, GameFieldInstanceGroup]
timestamp_fields = ("timestamp", "start", "end")
def render(self, results: List[Result], name=None) -> [str]:
data = {}
for r in self.filter(results):
if r.analysis() is LocationAnalyzer:
geojson = GeoJSON()
json = geojson.make_geojson(r.get())
data[f"{r.analysis().__name__}__distance"] = calc_distance(json, "features.0.geometry.coordinates", load=False)
data[f"{r.analysis().__name__}__distance_simplified"] = calc_distance_simplified(json, "features.0.geometry.coordinates", load=False)
else:
for i in r.get():
a = r.analysis().__name__
value = r.get()[i]
if i in self.timestamp_fields:
value = pretty_ts(value)
key = f"{a}__{i}"
key = self.replace(key, i)
if type(value) is dict:
for j in value:
data[key+"__"+j] = value[j]
else:
data[key] = value
return data
def render_store(self, store: ResultStore, name=None) -> str:
data = []
for category in store.get_categories():
data.append(self.render(store.get_category(category)))
#import json
#print(json.dumps(data, indent=1))
csv = flat_dict_to_csv(data)
#print(csv)
if name:
filename = str(name) + ".csv"
else:
filename = "/tmp/biogames" + ".csv"
try:
with open(filename, "w") as out:
out.write(csv)
except PermissionError as e:
raise PermissionError(e, filename)
return filename
def replace(self, key, i):
if i in TASK_NAMES:
key = f"{TASK_NAMES[i]} ({key})"
if "sequence_" in i:
sid = i.split("_")[1]
cache, seq = sid.split("+")
cache = CACHE_NAMES.get(cache, cache)
seq = SEQUENCE_NAMES.get(seq, seq)
key = f"{cache}->{seq} {sid} duration"
return key

View File

@ -1,3 +1,4 @@
import copy
import json import json
import logging import logging
from typing import List from typing import List
@ -90,22 +91,27 @@ class GeoJSON(SpatialRender, Render):
] ]
} }
def make_geojson(self, src):
coordinates = []
times = []
for location in src:
#print(location)
coordinates.append(location["location"]["coordinates"])
times.append(location["timestamp"])
template = copy.deepcopy(self.template)
template["features"][0]["properties"] = {"times": times}
template["features"][0]["geometry"]["coordinates"] = coordinates
return template
def render(self, results: List[Result], name=None) -> [str]: def render(self, results: List[Result], name=None) -> [str]:
files = [] files = []
for result in self.filter(results): for result in self.filter(results):
coordinates = []
times = []
for location in result.get():
#print(location)
coordinates.append(location["location"]["coordinates"])
times.append(location["timestamp"])
if name: if name:
filename = str(name) + ".geojson" filename = str(name) + ".geojson"
else: else:
filename = str(result.name) + ".geojson" filename = str(result.name) + ".geojson"
json = self.make_geojson(result.get())
with open(filename, "w") as out: with open(filename, "w") as out:
self.template["features"][0]["properties"] = {"times": times}
self.template["features"][0]["geometry"]["coordinates"] = coordinates
json.dump(self.template, out, indent=1) json.dump(self.template, out, indent=1)
files.append(filename) files.append(filename)
return files return files

View File

@ -1,7 +1,7 @@
import json import json
import pyproj import pyproj
from shapely.geometry import LineString from shapely.geometry import LineString, mapping
from shapely.ops import transform from shapely.ops import transform
from functools import partial from functools import partial
@ -16,10 +16,18 @@ def distance(track):
return transform(project, track).length return transform(project, track).length
def json_to_track(geojson, path): def json_to_track(geojson, path, load=True):
return LineString(json_path(json.loads(geojson), path)) if load:
geojson = json.loads(geojson)
return LineString(json_path(geojson, path))
def calc_distance(geojson: str, path="coordinates"): def calc_distance(geojson: str, path="coordinates", load=True):
track = json_to_track(geojson, path) track = json_to_track(geojson, path, load)
return distance(track) return distance(track)
def calc_distance_simplified(geojson, path="coordinates", load=True):
track = json_to_track(geojson, path, load)
simplified = track.simplify(0.0002, preserve_topology=True)
return distance(simplified)

View File

@ -130,4 +130,75 @@ GEOJSON_PATTERN = """{
} }
"""# TODO: fix me """# TODO: fix me
GEOJSON_COORDINATES = "[{lon},{lat}]" GEOJSON_COORDINATES = "[{lon},{lat}]"
TASK_NAMES = {
"16fc3117-61db-4f50-b84f-81de6310206f_13127209-103c-4aed-9cce-b8a2cd9f7663_32e93082-1aa7-11e5-9827-74d43509b03a": "Lebensraum",
"16fc3117-61db-4f50-b84f-81de6310206f_13127209-103c-4aed-9cce-b8a2cd9f7663_3a27040f-1a9c-11e5-9827-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075_096093b0-d1ca-49f3-8d51-f32fa8874db5": "Biosphärenreservat",
"16fc3117-61db-4f50-b84f-81de6310206f_2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075_b10951a5-1a8a-11e5-b1a2-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_41abfe17-aef3-41ee-b1e5-eedc8208680f_255d9c6d-1aa0-11e5-9827-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_41abfe17-aef3-41ee-b1e5-eedc8208680f_e0d2dee8-1a9f-11e5-9827-74d43509b03a": "Fellbestimmung",
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_10c3329f-7a88-4aa4-9567-14c811d2a6bc": "Lockstock-Code",
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_5732fe6c-1a9e-11e5-9827-74d43509b03a": "Lockstock-Nachweis",
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_e4bbaf4c-1a9d-11e5-9827-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_e08ffe7c-b24d-4fcd-9355-8a459a2c07b7_597c651a-1a8c-11e5-b1a2-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_e08ffe7c-b24d-4fcd-9355-8a459a2c07b7_da49b7be-bc13-11e4-a0de-6364e0bfe983": "Holzbedarf",
"17d401a9-de21-49a2-95bc-7dafa53dda64_027dcc39-d642-4900-91c4-abbd9c317cb8_610e91d9-0a1c-4a38-9399-deb0ff8dcb05": "Fellbestimmung",
"17d401a9-de21-49a2-95bc-7dafa53dda64_027dcc39-d642-4900-91c4-abbd9c317cb8_6a03c92d-9e23-4c67-9e76-6a5e28224371": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_25a3a482-a119-4db4-8c4e-235ea9d8dab7_90a01be2-dc8a-4733-b302-de5554969453": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_25a3a482-a119-4db4-8c4e-235ea9d8dab7_914778bc-f7e9-4327-a78b-71b6fa8762b1": "Biosphärenreservat",
"17d401a9-de21-49a2-95bc-7dafa53dda64_7a8ff4c4-7976-45e0-8ef5-cb386d536cb3_3ae4452e-ed67-4687-849d-e9341fca2900": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_7a8ff4c4-7976-45e0-8ef5-cb386d536cb3_f6f5c087-487c-43d8-9409-648a59684a09": "Lebensraum",
"17d401a9-de21-49a2-95bc-7dafa53dda64_97b86d4e-4724-4431-9c94-d2f57696fe2e_26e6558e-8069-45a1-961d-ab1ec9c5aa83": "Holzbedarf",
"17d401a9-de21-49a2-95bc-7dafa53dda64_97b86d4e-4724-4431-9c94-d2f57696fe2e_970ff4e0-16bd-4380-8e69-91a324a59523": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_3366d6a3-684f-423a-bd7f-5c0107d4b972": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_a7188b81-e25b-456d-9742-5f11adb7d461": "Lockstock-Nachweis",
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_d29537b9-de0b-42c2-b3da-27a3dbc57988": "Lockstock-Code",
}
CACHE_NAMES = {
"043ab9fe-64e8-4e76-8bf6-8cc9db35eba1": "1a_Infozentrum",
"37f326ed-9732-44b5-9ba7-e666d31cc4e7": "2a_Holzlager",
"bf96eee0-4c92-43d8-aced-a95e4eedae9f": "2b_Holzstapel",
"a5723715-7ba7-4431-9d0b-c91c351a3ccc": "3a_Lebensraum_Hecke",
"dd68ba57-a081-46be-9a76-e49cd5209383": "3b_Lebensraum_Hecke",
"bb21628e-d039-4c16-9fe1-68de7f448fa4": "4a_Lockstock_finden",
"8291c397-b3a9-4564-9365-bd660ab1abcc": "4b_Lockstock_finden",
"e92d8175-a65f-40de-ae76-3cbde55dfd4d": "5a_Straße",
"30451de3-2d5d-44c7-84b2-2abddbc8adcc": "5b_Straße",
"22fcc44c-64d4-4f84-ad05-8107542a04d2": "6a_Jaegerstand",
"1740e151-cd75-45c0-a06e-d724b9d69729": "6a_Jaegerstand",
"6d97d48a-7ac1-4e3a-b797-c2b4aa681a10": "5a_Straße",
"98e60f51-c4d5-4833-bc3b-2820e1bdd09d": "4b_Lockstock_finden",
"61d6dc12-11b5-4a9c-b0d8-7a38a29d772a": "5b_Straße",
"f4762feb-addb-4e82-b923-78f8c7b6aff9": "2b_Holzstapel",
"25b2cc3b-f8fd-4a21-9350-d175d837f6b6": "3a_Lebensraum_Hecke",
"5ba5046f-c956-4c21-aea5-a0a6055ed7e4": "1a_Infozentrum",
"fb60b94b-3f82-4ba9-98ac-f52105bd26f1": "2a_Holzlager",
"12b9584a-14b4-40c6-aa13-9fb11062e917": "4a_Lockstock_finden",
"19908306-8c70-4861-bec8-49e849e94722": "3b_Lebensraum_Hecke",
"None": "initial",
"only": "",
}
SEQUENCE_NAMES = {
"89b769f8-2c98-4f55-b741-1dfa022c3286": "1_Videoerklaerung",
"286cab41-6a81-4dfe-9bef-e86923ca8c97": "A_Einleitung",
"2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075": "B",
"25a3a482-a119-4db4-8c4e-235ea9d8dab7": "B",
"97b86d4e-4724-4431-9c94-d2f57696fe2e": "C_mit_Dilemma",
"e08ffe7c-b24d-4fcd-9355-8a459a2c07b7": "C_mit_Dilemma",
"5f644fb4-5cc7-43a2-afb4-191dce80c875": "D_Dilemmasequenz",
"847ab5ff-7c98-4cdc-bc9e-bb619a0a98bb": "D_Dilemmasequenz",
"13127209-103c-4aed-9cce-b8a2cd9f7663": "E",
"7a8ff4c4-7976-45e0-8ef5-cb386d536cb3": "E",
"a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05": "F",
"b9571a6b-c537-4a92-8618-2d73415dec87": "F",
"027dcc39-d642-4900-91c4-abbd9c317cb8": "G",
"41abfe17-aef3-41ee-b1e5-eedc8208680f": "G",
"be59a20a-69ce-471b-8f70-76ce200e32c9": "H_Abschlusserzaehlung",
"d4073563-da42-4ad2-9a9b-20ef29da6309": "H_Abschlusserzaehlung",
"54e03082-1a6b-11e5-aa26-00199963ac6e": "seq_score",
"95d82cd3-5bda-465a-8757-7179cdafe590": "seq_score",
}

15
analysis/util/output.py Normal file
View File

@ -0,0 +1,15 @@
from datetime import datetime as dt
def flat_dict_to_csv(data):
keys = set()
for i in data:
keys = keys.union(set(i.keys()))
keys = sorted(keys)
out = ",".join(keys)
for i in data:
out += "\n" + ",".join([str(i.get(j, "")) for j in keys])
return out
def pretty_ts(timestamp, fmt="%Y-%m-%d %H:%M:%S"):
d = dt.fromtimestamp(int(timestamp)/1000.0)
return d.strftime(fmt)

View File

@ -155,10 +155,81 @@ KML_geo = """{
] ]
}""" }"""
OEB = """{
"logFormat": "zip",
"entryType": "@class",
"spatials": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
],
"actions": [
"...QuestionAnswerEvent",
"...SimuAnswerEvent"
],
"boards": [
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
],
"analyzers": {
"analysis.analyzers": [
"BiogamesCategorizer",
"LocationAnalyzer",
"BiogamesDuration",
"BiogamesTasks",
"GameFieldInstanceGroup"
]
},
"sequences": {
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
}
},
"custom": {
"simulation_rounds": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
],
"simu_data": [
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
],
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
"instance_id": "instance_id",
"instance_config_id": "config.@id",
"sequences2": {
"id_field": "sequence_id",
"start": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "START"
},
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "PAUSE"
}
},
"coordinates": "location.coordinates",
"metadata": {
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
},
"source": {
"type": "Biogames",
"username": "ba",
"password": "853451",
"host": "https://biogames.kinf.wiai.uni-bamberg.de"
},
"render": [
"OEBRender"
]
}
"""
CONFIGS = { CONFIGS = {
"Biogames": { "Biogames": {
"ActivityMapper": ACTIVITY, "ActivityMapper": ACTIVITY,
"KML": KML "KML": KML,
"DauerEntfernungPunkteZeit": OEB,
}, },
"Geogames": { "Geogames": {
"KML": KML_geo, "KML": KML_geo,
@ -167,13 +238,14 @@ CONFIGS = {
URLS = { URLS = {
"KML": "/", "KML": "/",
"DauerEntfernungPunkteZeit": "/",
"ActivityMapper": "#", "ActivityMapper": "#",
} }
HOSTS = { HOSTS = {
#"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de", #"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de",
#"Biogames": "http://www.biodiv2go.de", #"Biogames": "http://www.biodiv2go.de",
"Biogames": "http://biodiv2govm.kinf.wiai.uni-bamberg.de/", "Biogames": "http://biogames.kinf.wiai.uni-bamberg.de/",
"Geogames": "http://log_data/", "Geogames": "http://log_data/",
} }

View File

@ -10,15 +10,17 @@ import time
from celery import Celery from celery import Celery
from analysis import log_analyzer as la from analysis import log_analyzer as la
from analysis.analyzers import KMLRender, ActivityMapperRender from analysis.analyzers import KMLRender, ActivityMapperRender
from analysis.analyzers.render.biogames import OEBRender
from clients.webclients import CLIENTS from clients.webclients import CLIENTS
FLASK_DB = 1 FLASK_DB = 1
REDIS_HOST = "redis" REDIS_HOST = "redis"
DATA_PATH = "/app/data/results/" DATA_PATH = "/data/results/"
RENDERERS = { # TODO RENDERERS = { # TODO
"KMLRender": KMLRender, "KMLRender": KMLRender,
"ActivityMapper": ActivityMapperRender, "ActivityMapper": ActivityMapperRender,
"OEBRender": OEBRender
} }
app = Celery('tasks', backend='redis://redis', broker='redis://redis') app = Celery('tasks', backend='redis://redis', broker='redis://redis')
@ -55,20 +57,25 @@ def analyze(config, log_ids, **kwargs):
settings = la.parse_settings(config) settings = la.parse_settings(config)
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS) store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
os.mkdir(os.path.join(DATA_PATH, uid)) os.mkdir(os.path.join(DATA_PATH, uid))
for category in store.get_categories(): render = RENDERERS[settings.render[0]]() # TODO
data = store.get_category(category) files = []
render = RENDERERS[settings.render[0]]() # TODO if settings.render[0] == "OEBRender":
print(category, type(category)) files.append(render.render_store(store))
files = render.render(data, name=category[1]) else:
log.error(files) for category in store.get_categories():
for file in files: data = store.get_category(category)
try: print(category, type(category))
head, tail = os.path.split(file) files.append(render.render(data, name=category[1]))
target = os.path.join(DATA_PATH, uid, tail) log.error(files)
shutil.move(file, target) for file in files:
results.append(target) try:
except FileNotFoundError as e: head, tail = os.path.split(file)
log.exception(e) target = os.path.join(DATA_PATH, uid, tail)
log.error(target)
log.error(shutil.move(file, target))
results.append(target)
except FileNotFoundError as e:
log.exception(e)
tmpdir.cleanup() tmpdir.cleanup()
update_status(kwargs['username'], kwargs['name'], ('done', 'FINISHED'), results=results) update_status(kwargs['username'], kwargs['name'], ('done', 'FINISHED'), results=results)