add ActivityMapper and first web-impression
parent
70f72e6162
commit
0d66cc7c88
|
|
@ -2,13 +2,14 @@ from typing import List
|
|||
|
||||
from .analyzer import Analyzer, Result
|
||||
from .analyzer.biogames import BoardDurationAnalyzer, SimulationRoundsAnalyzer, ActivationSequenceAnalyzer, \
|
||||
BiogamesCategorizer
|
||||
BiogamesCategorizer, ActivityMapper
|
||||
from .analyzer.default import LogEntryCountAnalyzer, LocationAnalyzer, LogEntrySequenceAnalyzer, ActionSequenceAnalyzer, \
|
||||
CategorizerStub
|
||||
CategorizerStub, Store
|
||||
from .analyzer.locomotion import LocomotionActionAnalyzer, CacheSequenceAnalyzer
|
||||
from .analyzer.mask import MaskSpatials
|
||||
from .render import Render
|
||||
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender
|
||||
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
|
||||
ActivityMapperRender
|
||||
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender
|
||||
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
|
||||
LocomotionActionRatioRender
|
||||
|
|
@ -37,6 +38,9 @@ __MAPPING__ = {
|
|||
TrackRender,
|
||||
HeatMapRender,
|
||||
],
|
||||
ActivityMapper:[
|
||||
ActivityMapperRender
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
import logging
|
||||
from collections import defaultdict
|
||||
from collections import defaultdict, namedtuple
|
||||
from types import SimpleNamespace
|
||||
from typing import List, NamedTuple
|
||||
|
||||
import os
|
||||
|
||||
from util import json_path, combinate
|
||||
from . import Result, LogSettings, Analyzer, ResultStore
|
||||
from .default import CategorizerStub
|
||||
|
||||
|
|
@ -87,6 +92,7 @@ class ActivationSequenceAnalyzer(Analyzer):
|
|||
|
||||
|
||||
class BiogamesCategorizer(CategorizerStub):
|
||||
__name__ = "BiogamesCategorizer"
|
||||
def __init__(self, settings: LogSettings):
|
||||
super().__init__(settings)
|
||||
|
||||
|
|
@ -95,3 +101,76 @@ class BiogamesCategorizer(CategorizerStub):
|
|||
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
|
||||
self.key = entry[self.settings.custom['instance_id']]
|
||||
return False
|
||||
|
||||
|
||||
class ActivityMapper(Analyzer):
|
||||
__name__ = "ActivityMapper"
|
||||
def __init__(self, settings: LogSettings) -> None:
|
||||
super().__init__(settings)
|
||||
self.store: List[self.State] = []
|
||||
self.instance_config_id: str = None
|
||||
self.filters = SimpleNamespace()
|
||||
self.filters.start = lambda entry: combinate(self.settings.custom["sequences2"]["start"], entry)
|
||||
self.filters.end = lambda entry: combinate(self.settings.custom["sequences2"]["end"], entry)
|
||||
|
||||
self.State: NamedTuple = namedtuple("State", ["sequence", "events", "track", "timestamp"])
|
||||
|
||||
def result(self, store: ResultStore) -> None:
|
||||
for active_segment in self.store: # active_segment → sequence or None (None → map active)
|
||||
seq_data_url = "{host}/game2/editor/config/{config_id}/sequence/{sequence_id}/".format(
|
||||
host=self.settings.custom["host"],
|
||||
config_id=self.instance_config_id,
|
||||
sequence_id=active_segment.sequence,
|
||||
)
|
||||
seq_data = self.settings.source._get(seq_data_url).json()
|
||||
#TODO: use sequence names
|
||||
for event in active_segment.events:
|
||||
if event[self.settings.type_field] in self.settings.boards:
|
||||
local_file = "static/progress/images/{config_id}/{sequence_id}/{board_id}".format(
|
||||
config_id=self.instance_config_id,
|
||||
sequence_id=active_segment.sequence,
|
||||
board_id=event["board_id"])
|
||||
event["image"] = local_file[16:]
|
||||
if os.path.exists(local_file):
|
||||
continue
|
||||
url = "{host}/game2/editor/config/{config_id}/sequence/{sequence_id}/board/{board_id}/".format(
|
||||
host=self.settings.custom["host"],
|
||||
config_id=self.instance_config_id,
|
||||
sequence_id=active_segment.sequence,
|
||||
board_id=event["board_id"]
|
||||
)
|
||||
board = self.settings.source._get(url)
|
||||
if not board.ok:
|
||||
raise ConnectionError()
|
||||
data = board.json()
|
||||
preview_url = json_path(data, "preview_url.medium")
|
||||
logger.debug(preview_url)
|
||||
os.makedirs(local_file[:-len(event["board_id"])], exist_ok=True)
|
||||
self.settings.source.download_file(self.settings.custom['host'] + preview_url, local_file)
|
||||
store.add(Result(type(self), {"instance": self.instance_config_id, "store": [x._asdict() for x in self.store]}))
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
if self.instance_config_id is None:
|
||||
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
|
||||
self.instance_config_id = json_path(entry, self.settings.custom['instance_config_id'])
|
||||
if self.filters.start(entry):
|
||||
self.store.append(
|
||||
self.State(
|
||||
sequence=json_path(entry, json_path(self.settings.custom, "sequences2.id_field")),
|
||||
events=[],
|
||||
track=[],
|
||||
timestamp=entry['timestamp']))
|
||||
elif self.filters.end(entry) or not self.store:
|
||||
self.store.append(self.State(sequence=None, events=[], track=[], timestamp=entry['timestamp']))
|
||||
|
||||
if entry[self.settings.type_field] in self.settings.spatials:
|
||||
self.store[-1].track.append(
|
||||
{
|
||||
'timestamp': entry['timestamp'],
|
||||
'coordinates': json_path(entry, "location.coordinates"),
|
||||
'accuracy': entry['accuracy']
|
||||
}
|
||||
)
|
||||
else:
|
||||
self.store[-1].events.append(entry)
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -93,3 +93,21 @@ class CategorizerStub(Analyzer):
|
|||
def __init__(self, settings: LogSettings):
|
||||
super().__init__(settings)
|
||||
self.key = "default"
|
||||
|
||||
|
||||
class Store(Analyzer):
|
||||
"""
|
||||
Store the entire log
|
||||
"""
|
||||
__name__ = "Store"
|
||||
|
||||
def result(self, store: ResultStore) -> None:
|
||||
store.add(Result(type(self), list(self.store)))
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
self.store.append(entry)
|
||||
return False
|
||||
|
||||
def __init__(self, settings: LogSettings):
|
||||
super().__init__(settings)
|
||||
self.store: list = []
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from typing import List, Tuple
|
|||
import matplotlib.pyplot as plt
|
||||
|
||||
from . import Render
|
||||
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer
|
||||
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper
|
||||
|
||||
|
||||
def plot(src_data: List[Tuple[str, List[int]]]):
|
||||
|
|
@ -30,8 +30,8 @@ class SimulationRoundsRender(Render):
|
|||
|
||||
result_types = [SimulationRoundsAnalyzer]
|
||||
|
||||
class BoardDurationHistRender(Render):
|
||||
|
||||
class BoardDurationHistRender(Render):
|
||||
result_types = [BoardDurationAnalyzer]
|
||||
|
||||
def render(self, results: List[Result]):
|
||||
|
|
@ -61,4 +61,11 @@ class BoardDurationBoxRender(Render):
|
|||
data[board['id']].append(duration)
|
||||
data_tuples = [(key, data[key]) for key in sorted(data)]
|
||||
data_tuples = sorted(data_tuples, key=lambda x: sum(x[1]))
|
||||
plot(data_tuples)
|
||||
plot(data_tuples)
|
||||
|
||||
|
||||
class ActivityMapperRender(Render):
|
||||
result_types = [ActivityMapper]
|
||||
|
||||
def render(self, results: List[Result]):
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class TrackRender(Render):
|
|||
for result in self.filter(results):
|
||||
if len(result.get()) > 0:
|
||||
data.append(
|
||||
[[entry['location']['coordinates'][1], entry['location']['coordinates'][0]] for entry in
|
||||
[[entry['location']['coordinates'][1], entry['location']['coordinates'][0]] for entry in # TODO: configurable
|
||||
result.get()])
|
||||
dumps = json.dumps(data)
|
||||
with open("track_data.js", "w") as out:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,13 @@
|
|||
import json
|
||||
import sys
|
||||
from sources import SOURCES
|
||||
|
||||
|
||||
def load_source(config):
|
||||
if config["type"] in SOURCES:
|
||||
source = SOURCES[config["type"]]()
|
||||
source.connect(**config)
|
||||
return source
|
||||
|
||||
|
||||
class LogSettings:
|
||||
|
|
@ -25,6 +33,8 @@ class LogSettings:
|
|||
self.sequences = json_dict['sequences']
|
||||
if 'custom' in json_dict:
|
||||
self.custom = json_dict['custom']
|
||||
if "source" in json_dict:
|
||||
self.source = load_source(json_dict['source'])
|
||||
|
||||
def __repr__(self):
|
||||
return str({
|
||||
|
|
|
|||
|
|
@ -14,11 +14,12 @@
|
|||
"analyzers": {
|
||||
"analyzers": [
|
||||
"BiogamesCategorizer",
|
||||
"LocomotionActionAnalyzer",
|
||||
"LogEntryCountAnalyzer"
|
||||
"ActivityMapper"
|
||||
]
|
||||
},
|
||||
"disabled_analyzers": [
|
||||
"LocomotionActionAnalyzer",
|
||||
"LogEntryCountAnalyzer",
|
||||
"LocationAnalyzer",
|
||||
"LogEntryCountAnalyzer",
|
||||
"LogEntrySequenceAnalyzer",
|
||||
|
|
@ -44,6 +45,26 @@
|
|||
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
|
||||
],
|
||||
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
|
||||
"instance_id": "instance_id"
|
||||
"instance_id": "instance_id",
|
||||
"instance_config_id": "config.@id",
|
||||
"sequences2":{
|
||||
"id_field": "sequence_id",
|
||||
"start":{
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||
"action":"START"
|
||||
},
|
||||
"end":{
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||
"action":"PAUSE"
|
||||
}
|
||||
},
|
||||
"host":"http://0.0.0.0:5000"
|
||||
},
|
||||
"source":{
|
||||
"type": "Biogames",
|
||||
"url": "http://0.0.0.0:5000/game2/instance/log/list/",
|
||||
"login_url": "http://localhost:5000/game2/auth/json-login",
|
||||
"username": "dev",
|
||||
"password": "dev"
|
||||
}
|
||||
}
|
||||
51
load.py
51
load.py
|
|
@ -1,51 +0,0 @@
|
|||
import json
|
||||
import sqlite3
|
||||
import tempfile
|
||||
import zipfile
|
||||
from json import loads as json_loads
|
||||
|
||||
|
||||
class Loader:
|
||||
def load(self, file: str):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_entry(self) -> object:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class JSONLoader(Loader):
|
||||
data = None
|
||||
|
||||
def load(self, file: str):
|
||||
self.data = json.load(open(file))
|
||||
|
||||
def get_entry(self) -> dict:
|
||||
for entry in self.data:
|
||||
yield entry
|
||||
|
||||
|
||||
class SQLiteLoader(Loader):
|
||||
conn = None
|
||||
|
||||
def load(self, file: str):
|
||||
self.conn = sqlite3.connect(file)
|
||||
|
||||
def get_entry(self) -> dict:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute("SELECT * FROM log_entry")
|
||||
for seq, timestamp, json in cursor.fetchall():
|
||||
yield json_loads(json)
|
||||
|
||||
|
||||
class ZipSQLiteLoader(SQLiteLoader):
|
||||
def load(self, file: str):
|
||||
with zipfile.ZipFile(file, "r") as zipped_log, tempfile.TemporaryDirectory() as tmp:
|
||||
zipped_log.extract("instance_log.sqlite", path=tmp)
|
||||
super(ZipSQLiteLoader, self).load("{dir}/instance_log.sqlite".format(dir=tmp))
|
||||
|
||||
|
||||
LOADERS = {
|
||||
"json": JSONLoader,
|
||||
"sqlite": SQLiteLoader,
|
||||
"zip": ZipSQLiteLoader
|
||||
}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
from .biogames import SQLiteLoader, ZipSQLiteLoader
|
||||
from .loader import JSONLoader
|
||||
|
||||
LOADERS = {
|
||||
"json": JSONLoader,
|
||||
"sqlite": SQLiteLoader,
|
||||
"zip": ZipSQLiteLoader
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
import os
|
||||
import sqlite3
|
||||
import tempfile
|
||||
import zipfile
|
||||
from json import loads as json_loads
|
||||
|
||||
from .loader import Loader
|
||||
|
||||
DB_FILE = "instance_log.sqlite"
|
||||
|
||||
|
||||
class SQLiteLoader(Loader):
|
||||
conn = None
|
||||
|
||||
def load(self, file: str):
|
||||
self.conn = sqlite3.connect(file)
|
||||
|
||||
def get_entry(self) -> dict:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute("SELECT * FROM log_entry")
|
||||
for seq, timestamp, json in cursor.fetchall():
|
||||
yield json_loads(json)
|
||||
|
||||
|
||||
class ZipSQLiteLoader(SQLiteLoader):
|
||||
def load(self, file: str):
|
||||
with zipfile.ZipFile(file, "r") as zipped_log, tempfile.TemporaryDirectory() as tmp:
|
||||
zipped_log.extract(DB_FILE, path=tmp)
|
||||
super(ZipSQLiteLoader, self).load(os.path.join(tmp, DB_FILE))
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
import json
|
||||
|
||||
|
||||
class Loader:
|
||||
def load(self, file: str):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_entry(self) -> object:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class JSONLoader(Loader):
|
||||
data = None
|
||||
|
||||
def load(self, file: str):
|
||||
self.data = json.load(open(file))
|
||||
|
||||
def get_entry(self) -> dict:
|
||||
for entry in self.data:
|
||||
yield entry
|
||||
|
|
@ -6,7 +6,7 @@ import analyzers
|
|||
from analyzers import get_renderer, Analyzer, render
|
||||
from analyzers.analyzer import ResultStore
|
||||
from analyzers.settings import LogSettings, load_settings
|
||||
from load import LOADERS
|
||||
from loaders import LOADERS
|
||||
|
||||
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
|
@ -40,7 +40,7 @@ if __name__ == '__main__':
|
|||
"e32b16998440475b994ab46d481d3e0c",
|
||||
]
|
||||
log_ids: List[str] = [
|
||||
"34fecf49dbaca3401d745fb467",
|
||||
#"34fecf49dbaca3401d745fb467",
|
||||
# "44ea194de594cd8d63ac0314be",
|
||||
# "57c444470dbf88605433ca935c",
|
||||
# "78e0c545b594e82edfad55bd7f",
|
||||
|
|
@ -49,7 +49,8 @@ if __name__ == '__main__':
|
|||
# "e01a684aa29dff9ddd9705edf8",
|
||||
# "fbf9d64ae0bdad0de7efa3eec6",
|
||||
# "fe1331481f85560681f86827ec",
|
||||
"fec57041458e6cef98652df625", ]
|
||||
"fe1331481f85560681f86827ec"]
|
||||
#"fec57041458e6cef98652df625", ]
|
||||
store: ResultStore = ResultStore()
|
||||
for log_id in log_ids:
|
||||
for analysis in process_log(log_id, settings):
|
||||
|
|
|
|||
|
|
@ -1,2 +1,3 @@
|
|||
requests
|
||||
numpy
|
||||
matplotlib
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
from .biogames import Biogames
|
||||
|
||||
SOURCES = {
|
||||
"Biogames": Biogames,
|
||||
}
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
import json
|
||||
import logging
|
||||
import typing
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import os
|
||||
|
||||
from sources.source import Source
|
||||
|
||||
import shutil
|
||||
import requests
|
||||
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Biogames(Source):
|
||||
def __init__(self):
|
||||
self.headers: typing.Dict[str, str] = {'Accept': 'application/json'}
|
||||
self.cookies: typing.Dict[str, str] = {}
|
||||
self.id2link: typing.Dict[str, str] = {}
|
||||
|
||||
def connect(self, **kwargs):
|
||||
for i in ['username', 'password', 'url', 'login_url']:
|
||||
if not i in kwargs:
|
||||
raise ValueError("missing value " + i)
|
||||
csrf_request = requests.get(kwargs['url'])
|
||||
if csrf_request.status_code != 200:
|
||||
raise ConnectionError("unable to obtain CSRF token (" + str(csrf_request) + ")")
|
||||
self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
|
||||
log.info("obtained CSRF token (" + self.cookies['csrftoken'] + ")")
|
||||
login_payload = {
|
||||
'username': kwargs['username'],
|
||||
'password': kwargs['password'],
|
||||
'next': '',
|
||||
'csrfmiddlewaretoken': 'csrftoken'
|
||||
}
|
||||
login = requests.post(kwargs['login_url'], data=json.dumps(login_payload), cookies=self.cookies)
|
||||
if login.status_code != 200:
|
||||
raise ConnectionError("Unable to authenticate!", login, login.text)
|
||||
self.cookies['sessionid'] = login.cookies['sessionid']
|
||||
log.info("obtained sessionid (" + self.cookies['sessionid'] + ")")
|
||||
self.url = kwargs['url']
|
||||
log.info("stored url (" + self.url + ")")
|
||||
|
||||
def list(self):
|
||||
logs_query = requests.get(self.url, cookies=self.cookies, headers=self.headers)
|
||||
log.info(logs_query.status_code)
|
||||
logs = logs_query.json()
|
||||
log.info(len(logs))
|
||||
for i in logs:
|
||||
self.id2link[i["id"]] = i["link"] # TODO
|
||||
return logs
|
||||
|
||||
def get(self, ids: typing.Collection):
|
||||
dir = TemporaryDirectory()
|
||||
files = []
|
||||
for i in ids:
|
||||
url = self.id2link[i]
|
||||
filename = os.path.join(dir.name, url.split("/")[-1])
|
||||
file = self.download_file(url, filename)
|
||||
if file:
|
||||
files.append(file)
|
||||
return dir
|
||||
|
||||
def download_file(self, url, filename):
|
||||
with open(filename, "wb") as out:
|
||||
try:
|
||||
download = requests.get(url, cookies=self.cookies, stream=True)
|
||||
shutil.copyfileobj(download.raw, out)
|
||||
return filename
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
os.remove(filename)
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def _get(self, url):
|
||||
return requests.get(url, cookies=self.cookies, headers=self.headers, stream=True)
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
import typing
|
||||
|
||||
|
||||
class Source:
|
||||
def connect(self, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
def list(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get(self, ids: typing.Collection):
|
||||
raise NotImplementedError
|
||||
|
||||
def close(self):
|
||||
raise NotImplementedError
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<script
|
||||
src="https://code.jquery.com/jquery-3.2.1.min.js"
|
||||
integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
|
||||
crossorigin="anonymous"></script>
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
|
||||
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
|
||||
crossorigin=""/>
|
||||
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
|
||||
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
|
||||
crossorigin=""></script>
|
||||
|
||||
<script src="my.js"></script>
|
||||
<style>
|
||||
.map { width: 512px; height: 256px; }
|
||||
</style>
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
$.getJSON("tmp3.json", function (data) {
|
||||
var list = $("<ul />");
|
||||
var maps = {};
|
||||
$.each(data, function (key, value) {
|
||||
//key: instance_id, value: AnlysisResult
|
||||
//value.result.instance: InstanceConfig_id
|
||||
// console.log(key, value[0].result.store[0].timestamp);
|
||||
$.each(value[0].result.store, function (index, entry) {
|
||||
//console.log(entry);
|
||||
var time = new Date(entry.timestamp);
|
||||
var item = $("<li>", {html: entry.sequence + " @ " + time.toLocaleDateString() + " "+ time.toLocaleTimeString()});
|
||||
var container = $("<p />");
|
||||
if (entry.track.length > 0) {
|
||||
var mapName = "map" + index;
|
||||
//console.log(mapName, entry.track.length);
|
||||
var mapContainer = $("<div />", {id: mapName, class: "map"});
|
||||
var track = [];
|
||||
$.each(entry.track, function (i, elem) {
|
||||
track.push([elem.coordinates[1], elem.coordinates[0]]);
|
||||
});
|
||||
maps[mapName] = track;
|
||||
|
||||
/* mapContainer.ready(function () {
|
||||
var map = L.map(mapName, {maxZoom: 22});
|
||||
L.control.scale().addTo(map);
|
||||
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
|
||||
}).addTo(map);
|
||||
var track = [];
|
||||
$.each(entry.track, function (i, elem) {
|
||||
track.push([elem.coordinates[1], elem.coordinates[0]]);
|
||||
});
|
||||
var layer = L.polyline(track, {color: "green"});
|
||||
console.log(track);
|
||||
L.control.layers(null, [layer]).addTo(map);
|
||||
});*/
|
||||
|
||||
mapContainer.appendTo(container);
|
||||
}
|
||||
$.each(entry.events, function (i, event) {
|
||||
if ("image" in event) {
|
||||
$("<img />", {src: event.image, height: 200}).appendTo(container);
|
||||
}
|
||||
});
|
||||
container.appendTo(item);
|
||||
item.appendTo(list);
|
||||
});
|
||||
});
|
||||
list.appendTo("body");
|
||||
/*});
|
||||
|
||||
$(window).on("load", function () {*/
|
||||
// setTimeout(function () {
|
||||
|
||||
//console.log(maps);
|
||||
$.each(maps, function (mapName, track) {
|
||||
//console.log("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAa");
|
||||
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
|
||||
});
|
||||
var map = L.map(mapName, {layers: [tiles]});
|
||||
L.control.scale().addTo(map);
|
||||
// console.log(mapName, track);
|
||||
var layer = L.polyline(track, {color: "green"}).addTo(map);
|
||||
map.fitBounds(layer.getBounds());
|
||||
//console.log(layer)
|
||||
//L.control.layers({"osm":tiles}, {layer]).addTo(map);
|
||||
});
|
||||
// }, 2000);
|
||||
});
|
||||
|
|
@ -12,7 +12,7 @@ def json_path(obj: dict, key: str):
|
|||
return json_path(obj[child_key[0]], ".".join(child_key[1:]))
|
||||
|
||||
|
||||
def combinate(settings: dict, entry: dict) -> bool:
|
||||
def combinate(settings: dict, entry: dict) -> bool: # TODO: better name...
|
||||
"""combine all settings {<key>: <expected>} with entry using AND"""
|
||||
result = True
|
||||
for key, value in settings.items():
|
||||
|
|
|
|||
Loading…
Reference in New Issue