activity mapper track display basic function

activity_mapper
agp8x 2017-11-14 19:32:10 +01:00
parent 4401757bef
commit e94fd665ad
7 changed files with 367 additions and 219 deletions

View File

@ -140,6 +140,16 @@ class ActivityMapper(Analyzer):
"sequence.question.": "question", "sequence.question.": "question",
"error": "error" "error": "error"
} }
colors = {
"simu": "blue",
"question": "orange",
"image": "green",
"audio": "red",
"video": "purple",
"other": "brown",
"map": "violet",
"error": "grey"
}
def __init__(self, settings: LogSettings) -> None: def __init__(self, settings: LogSettings) -> None:
super().__init__(settings) super().__init__(settings)
@ -157,29 +167,28 @@ class ActivityMapper(Analyzer):
self.State: NamedTuple = namedtuple("State", ["sequence", "events", "track", "timestamp"]) self.State: NamedTuple = namedtuple("State", ["sequence", "events", "track", "timestamp"])
def result_old(self, store: ResultStore) -> None: def result(self, store: ResultStore, **kwargs) -> None:
instance_config_id = self.instance_config_id for board in self.timeline:
for active_segment in self.store: # active_segment → sequence or None (None → map active) if board[self.settings.type_field] in self.settings.boards:
seq_data_url = "/game2/editor/config/{config_id}/sequence/{sequence_id}/".format( if board["extra_data"]["activity_type"] == "simu":
config_id=instance_config_id, board["image"] = "simu.png"
sequence_id=active_segment.sequence, continue
) local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
source = self.settings.source self.settings.source)
seq_data = source._get(seq_data_url).json() if local_file:
# TODO: use sequence names board['image'] = local_file
logger.warning(seq_data) else:
for event in active_segment.events: board['image'] = "ERROR_FETCHING_FILE"
if event[self.settings.type_field] in self.settings.boards: logger.error("error downloading board! %s %s %s", self.instance_config_id, board["sequence_id"],
sequence_id = active_segment.sequence board["board_id"])
board_id = event["board_id"] else:
local_file = download_board(board_id, instance_config_id, sequence_id, source) board["image"] = "map.png"
if local_file is not None: store.add(Result(type(self), {
event["image"] = local_file[16:] "instance": self.instance_config_id,
store.add(Result(type(self), {"instance": instance_config_id, "store": [x._asdict() for x in self.store]})) "track": self.tracks,
"boards": self.timeline,
def result(self, store: ResultStore) -> None: "colors": self.colors,
}))
store.add(Result(type(self), {"instance": self.instance_config_id, "track": self.tracks, "boards": self.timeline}))
def process(self, entry: dict) -> bool: def process(self, entry: dict) -> bool:
if self.track is None: if self.track is None:
@ -195,6 +204,7 @@ class ActivityMapper(Analyzer):
board_data = get_board_data(self.settings.source, self.instance_config_id, entry["sequence_id"], board_data = get_board_data(self.settings.source, self.instance_config_id, entry["sequence_id"],
entry["board_id"]) entry["board_id"])
entry["extra_data"] = board_data entry["extra_data"] = board_data
entry["extra_data"]["activity_type"] = self.classify_entry(entry)
entry['coordinate'] = self.new_coordinate() entry['coordinate'] = self.new_coordinate()
self.timeline.append(entry) self.timeline.append(entry)
return False return False
@ -202,12 +212,15 @@ class ActivityMapper(Analyzer):
def update_board_type(self, entry): def update_board_type(self, entry):
type = self.classify_entry(entry) type = self.classify_entry(entry)
if not type == self.last_board_type: if not type == self.last_board_type:
self.add_track(activity_type=self.last_board_type,end_timestamp=entry['timestamp']) self.add_track(activity_type=self.last_board_type, end_timestamp=entry['timestamp'])
self.last_board_type = type self.last_board_type = type
def classify_entry(self, entry): def classify_entry(self, entry):
entry_type = entry[self.settings.type_field] entry_type = entry[self.settings.type_field]
if self.filters.end(entry): if self.filters.end(entry):
data = {"extra_data": {"activity_type": "map"},"coordinate": self.new_coordinate()}
data.update(entry)
self.timeline.append(data)
return "map" return "map"
if not entry_type in self.settings.boards: if not entry_type in self.settings.boards:
return self.last_board_type return self.last_board_type
@ -236,6 +249,8 @@ class ActivityMapper(Analyzer):
self.track['properties'].update(props) self.track['properties'].update(props)
self.tracks.append(self.track) self.tracks.append(self.track)
self.track = self.new_track(props['end_timestamp']) self.track = self.new_track(props['end_timestamp'])
if self.last_coordinate:
self.track['coordinates'].append(self.last_coordinate)
def new_track(self, timestamp): def new_track(self, timestamp):
return {"type": "LineString", "coordinates": [], "properties": {'start_timestamp': timestamp}} return {"type": "LineString", "coordinates": [], "properties": {'start_timestamp': timestamp}}
@ -305,7 +320,7 @@ class SimulationOrderAnalyzer(Analyzer):
class SimulationCategorizer(CategorizerStub): # TODO: refactor categorizer class SimulationCategorizer(CategorizerStub): # TODO: refactor categorizer
__name__ = "SimulationCategorizer"# TODO: rename -.- (InstanceConfigIDCategorizer) __name__ = "SimulationCategorizer" # TODO: rename -.- (InstanceConfigIDCategorizer)
def process(self, entry: dict) -> bool: def process(self, entry: dict) -> bool:
if self.key is "default": if self.key is "default":
@ -334,5 +349,3 @@ class SimulationFlagsAnalyzer(Analyzer):
def result(self, store: ResultStore, name=None) -> None: def result(self, store: ResultStore, name=None) -> None:
store.add(Result(type(self), self.store, name=name)) store.add(Result(type(self), self.store, name=name))

View File

@ -154,8 +154,9 @@ class ActivityMapperRender(Render):
print(os.getcwd()) print(os.getcwd())
for result in self.filter(results): for result in self.filter(results):
data = result.get() data = result.get()
with open(os.path.join("static", "progress", "data", data['instance']), "w") as out: with open(os.path.join("static", "progress", "data", data['instance'] + "_" + str(name) + ".json"),
json.dump(data["store"], out, indent=1) "w") as out:
json.dump(data, out, indent=1)
return "ok" return "ok"
@ -187,7 +188,8 @@ class SimulationGroupRender(Render):
def render(self, results: List[Result], name=None): def render(self, results: List[Result], name=None):
data = [r.get() for r in self.filter(results)] data = [r.get() for r in self.filter(results)]
print(name, len(data)) print(name, len(data))
graph_plot(list(data), ylabel="simulation retries", title="sequential simulation retries", rotation=None, name=name) # graph_fit(list(data), name=name)
#graph_fit(list(data), name=name) graph_plot(list(data), ylabel="simulation retries", title="sequential simulation retries", rotation=None,
name=name)
result_types = [SimulationOrderAnalyzer] result_types = [SimulationOrderAnalyzer]

View File

@ -13,7 +13,8 @@
], ],
"analyzers": { "analyzers": {
"analyzers": [ "analyzers": [
"SimulationCategorizer", "BiogamesCategorizer",
"ActivityMapper",
"SimulationFlagsAnalyzer" "SimulationFlagsAnalyzer"
] ]
}, },
@ -23,6 +24,7 @@
"LogEntryCountAnalyzer", "LogEntryCountAnalyzer",
"SimulationOrderAnalyzer", "SimulationOrderAnalyzer",
"ProgressAnalyzer", "ProgressAnalyzer",
"SimulationCategorizer",
"InstanceConfig"], "InstanceConfig"],
"disabled_analyzers": [ "disabled_analyzers": [
"LocomotionActionAnalyzer", "LocomotionActionAnalyzer",

View File

@ -14,13 +14,11 @@ from loaders import LOADERS
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG) logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
log: logging.Logger = logging.getLogger(__name__) log: logging.Logger = logging.getLogger(__name__)
requests_log = logging.getLogger('requests') logging.getLogger('requests').setLevel(logging.WARN)
requests_log.setLevel(logging.WARN) logging.getLogger("urllib3").setLevel(logging.WARNING)
def process_log(log_id: str, settings: LogSettings) -> List[Analyzer]: def process_log(logfile: str, settings: LogSettings) -> List[Analyzer]:
logfile: str = "data/inst_{id}.{format}".format(id=log_id, format=settings.log_format)
logfile = log_id
loader = LOADERS[settings.log_format]() loader = LOADERS[settings.log_format]()
try: try:
loader.load(logfile) loader.load(logfile)
@ -41,60 +39,30 @@ def process_log(log_id: str, settings: LogSettings) -> List[Analyzer]:
return analyzers return analyzers
if __name__ == '__main__': def run_analysis(log_ids: list, settings):
settings: LogSettings = load_settings("biogames2.json")
log_ids: List[str] = [
"20d4244719404ffab0ca386c76e4b112",
"56d9b64144ab44e7b90bf766f3be32e3",
"dc2cdc28ca074715b905e4aa5badff10",
"e32b16998440475b994ab46d481d3e0c",
]
log_ids: List[str] = [
# "34fecf49dbaca3401d745fb467",
# "44ea194de594cd8d63ac0314be",
# "57c444470dbf88605433ca935c",
# "78e0c545b594e82edfad55bd7f",
# "91abfd4b31a5562b1c66be37d9",
"597b704fe9ace475316c345903",
"e01a684aa29dff9ddd9705edf8",
"fbf9d64ae0bdad0de7efa3eec6",
# "fe1331481f85560681f86827ec",
"fe1331481f85560681f86827ec"]
# "fec57041458e6cef98652df625", ]
log_ids = []
# with open("/home/clemens/git/ma/test/filtered") as src:
with open("/home/agp8x/git/uni/ma/project/data/0000_ref") as src:
for line in src:
line = line.strip()
log_ids.append(line)
store: ResultStore = ResultStore() store: ResultStore = ResultStore()
for log_id in log_ids: for log_id in log_ids:
for analysis in process_log(log_id, settings): for analysis in process_log(log_id, settings):
log.info("* Result for " + analysis.name()) log.info("* Result for " + analysis.name())
# print(analysis.result())
# print(analysis.render())
analysis.result(store, name=log_id) analysis.result(store, name=log_id)
if False: return store
for r in get_renderer(analyzers.LocomotionActionAnalyzer):
r().render(store.get_all())
if False:
render(analyzers.LocationAnalyzer, store.get_all())
# print(json.dumps(store.serializable(), indent=1))
if False:
render(analyzers.ActivityMapper, store.get_all())
render(analyzers.ProgressAnalyzer, store.get_all())
if False:
from analyzers.postprocessing import graph
g = graph.Cache(settings) def load_ids(name: str):
g.run(store) log_ids = []
if False: with open(name) as src:
# render(analyzers.SimulationOrderAnalyzer, store.get_all()) for line in src:
for cat in store.get_categories(): line = line.strip()
data = store.get_category(cat) log_ids.append(line)
render(analyzers.SimulationOrderAnalyzer, data, name=cat) return log_ids
if False:
def urach_logs(log_ids, settings):
return ["data/inst_{id}.{format}".format(id=log_id, format=settings.log_format) for log_id in log_ids]
def write_logentry_count_csv():
global cat, data, lines, csvfile
LogEntryCountCSV.summary = None LogEntryCountCSV.summary = None
for cat in store.get_categories(): for cat in store.get_categories():
data = store.get_category(cat) data = store.get_category(cat)
@ -118,7 +86,10 @@ if __name__ == '__main__':
writer.writerow(["name"] + [h.split(".")[-1] for h in headers]) writer.writerow(["name"] + [h.split(".")[-1] for h in headers])
for line in lines: for line in lines:
writer.writerow(line) writer.writerow(line)
if True:
def write_simulation_flag_csv():
global csvfile, result, i
from datetime import datetime from datetime import datetime
json.dump(store.serializable(), open("simus.json", "w"), indent=2) json.dump(store.serializable(), open("simus.json", "w"), indent=2)
with open("simus.csv", "w") as csvfile: with open("simus.csv", "w") as csvfile:
@ -134,10 +105,68 @@ if __name__ == '__main__':
len(i['answers']['universe_state']) if i['answers']['universe_state'] else 0, len(i['answers']['universe_state']) if i['answers']['universe_state'] else 0,
len(i['selected_actions']) if i['selected_actions'] else 0, len(i['selected_actions']) if i['selected_actions'] else 0,
i['timestamp'], i['timestamp'],
str(datetime.fromtimestamp(i['timestamp']/1000)) str(datetime.fromtimestamp(i['timestamp'] / 1000))
)) ))
if __name__ == '__main__':
settings: LogSettings = load_settings("biogames2.json")
log_ids_urach: List[str] = urach_logs([
# "34fecf49dbaca3401d745fb467",
# "44ea194de594cd8d63ac0314be",
# "57c444470dbf88605433ca935c",
# "78e0c545b594e82edfad55bd7f",
# "91abfd4b31a5562b1c66be37d9",
# "597b704fe9ace475316c345903",
# "e01a684aa29dff9ddd9705edf8",
# "fbf9d64ae0bdad0de7efa3eec6",
"fe1331481f85560681f86827ec",
# "fe1331481f85560681f86827ec"]
"fec57041458e6cef98652df625", ]
,settings)
store: ResultStore = run_analysis(log_ids_urach, settings)
if False: if False:
#json.dump(store.serializable(), open("new.json", "w"), indent=1) for r in get_renderer(analyzers.LocomotionActionAnalyzer):
r().render(store.get_all())
if False:
render(analyzers.LocationAnalyzer, store.get_all())
# print(json.dumps(store.serializable(), indent=1))
if True:
for cat in store.get_categories():
render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
# render(analyzers.ProgressAnalyzer, store.get_all())
if False:
from analyzers.postprocessing import graph
g = graph.Cache(settings)
g.run(store)
if False:
# render(analyzers.SimulationOrderAnalyzer, store.get_all())
for cat in store.get_categories():
data = store.get_category(cat)
render(analyzers.SimulationOrderAnalyzer, data, name=cat)
if False:
write_logentry_count_csv()
if False:
write_simulation_flag_csv()
def calc_distance(geojson: str):
from shapely.geometry import LineString
from shapely.ops import transform
from functools import partial
import pyproj
track = LineString(json.loads(geojson)['coordinates'])
project = partial(
pyproj.transform,
pyproj.Proj(init='EPSG:4326'),
pyproj.Proj(init='EPSG:32633'))
return transform(project, track).length
if False:
# json.dump(store.serializable(), open("new.json", "w"), indent=1)
from collections import defaultdict from collections import defaultdict
keys = [ keys = [
@ -150,7 +179,8 @@ if __name__ == '__main__':
"map" "map"
] ]
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
#results = []
# results = []
places = defaultdict(list) places = defaultdict(list)
@ -162,23 +192,24 @@ if __name__ == '__main__':
print(json.dumps(result, indent=4)) print(json.dumps(result, indent=4))
total = sum(result.values()) total = sum(result.values())
print(total) print(total)
percentage = defaultdict(lambda :0) percentage = defaultdict(lambda: 0)
minutes = defaultdict(lambda:0) minutes = defaultdict(lambda: 0)
for i in result: for i in result:
percentage[i]= result[i]/total percentage[i] = result[i] / total
minutes[i] = result[i]/60_000 minutes[i] = result[i] / 60_000
print(json.dumps(percentage,indent=4)) print(json.dumps(percentage, indent=4))
if not 'error' in result: if not 'error' in result:
#places[log.get()['instance']].append(percentage) # places[log.get()['instance']].append(percentage)
places[log.get()['instance']].append(minutes) places[log.get()['instance']].append(minutes)
for place in places: for place in places:
places[place] = sorted(places[place], key=lambda item:item['map']) places[place] = sorted(places[place], key=lambda item: item['map'])
dummy = [0]*len(keys) dummy = [0] * len(keys)
results = [] results = []
sites = [] sites = []
from util.meta_temp import CONFIG_NAMES from util.meta_temp import CONFIG_NAMES
for i in places: for i in places:
for j in places[i]: for j in places[i]:
ordered = [] ordered = []
@ -188,38 +219,37 @@ if __name__ == '__main__':
results.append(dummy) results.append(dummy)
sites.append(CONFIG_NAMES[i] if i in CONFIG_NAMES else "---") sites.append(CONFIG_NAMES[i] if i in CONFIG_NAMES else "---")
size = len(results) size = len(results)
ind = np.arange(size) ind = np.arange(size)
width=0.9 width = 0.9
print(results) print(results)
data = list(zip(*results)) data = list(zip(*results))
print(data) print(data)
lines = [] lines = []
bottom = [0]*len(results) bottom = [0] * len(results)
for i in range(0, len(data)): for i in range(0, len(data)):
lines.append(plt.bar(ind,data[i], bottom=bottom, width=width)[0]) lines.append(plt.bar(ind, data[i], bottom=bottom, width=width)[0])
for k,x in enumerate(data[i]): for k, x in enumerate(data[i]):
bottom[k] += x bottom[k] += x
plt.legend(lines, keys) plt.legend(lines, keys)
plt.title(", ".join(sites)) plt.title(", ".join(sites))
plt.show() plt.show()
#size = len(results) # size = len(results)
#ind = np.arange(size) # ind = np.arange(size)
#width = 0.9 # width = 0.9
#print(results) # print(results)
#data = list(zip(*results)) # data = list(zip(*results))
#print(data) # print(data)
#lines = [] # lines = []
#bottom = [0] * len(results) # bottom = [0] * len(results)
#for i in range(0, len(data)): # for i in range(0, len(data)):
# lines.append(plt.bar(ind, data[i], bottom=bottom, width=width)[0]) # lines.append(plt.bar(ind, data[i], bottom=bottom, width=width)[0])
# for k, x in enumerate(data[i]): # for k, x in enumerate(data[i]):
# bottom[k] += x # bottom[k] += x
#plt.legend(lines, keys) # plt.legend(lines, keys)
#plt.title("Zwei Spiele in Filderstadt (t1=237min; t2=67min)") # plt.title("Zwei Spiele in Filderstadt (t1=237min; t2=67min)")
#plt.show() # plt.show()

View File

@ -2,14 +2,97 @@
src="https://code.jquery.com/jquery-3.2.1.min.js" src="https://code.jquery.com/jquery-3.2.1.min.js"
integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4=" integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
crossorigin="anonymous"></script> crossorigin="anonymous"></script>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css" <link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ==" integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
crossorigin=""/> crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js" <script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log==" integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
crossorigin=""></script> crossorigin=""></script>
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
<script src="my.js"></script> <script src="my.js"></script>
<style> <style>
.map { width: 512px; height: 256px; } .mapDiv {
</style> width: 1024px;
height: 768px;
}
.board {
width: 32px;
height: 32px;
display: inline-block;
position: relative;
}
.board img {
max-width: 100%;
max-height: 100%;
position: absolute;
/*bottom: 0px;*/
}
.board img:hover {
max-width: inherit;
max-height: inherit;
z-index: 99;
top: 20px;
}
.highlight {
/*what a nice way to highlight*/
display: none;
}
.simu {
background-color: blue;
}
.question {
background-color: orange;
}
.image {
background-color: green;
}
.audio {
background-color: red;
}
.video {
background-color: purple;
}
.other {
background-color: brown;
}
.map {
background-color: violet;
}
.error {
background-color: grey;
}
ul {
list-style-type: none;
overflow: auto;
overflow-y: hidden;
display: inline-block;
/*max-width:100%;
margin: 0 0 1em;
white-space: nowrap;
height:200px;*/
}
li {
display: inline-block;
vertical-align: top;
}
</style>
<div style="font-size:0.1px;position:absolute;bottom:0;">OSM Logo: CC-BY-SA
http://wiki.openstreetmap.org/wiki/File:Mag_map-120x120.png
</div>

View File

@ -1,71 +1,90 @@
$.getJSON("tmp3.json", function (data) { //$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_03b9b6b4-c8ab-4182-8902-1620eebe8889.json", function (data) {
var list = $("<ul />"); $.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_de7df5b5-edd5-4070-840f-68854ffab9aa.json", function (data) {
var maps = {}; var images = {};
$.each(data, function (index, entry) { var mapContainer = $("<div />", {id: "mainMap", class: "mapDiv"});
//key: instance_id, value: AnlysisResult mapContainer.appendTo("body");
//value.result.instance: InstanceConfig_id var tiles = {
// console.log(key, value[0].result.store[0].timestamp); "osm": L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
//$.each(value[0].result.store, function (index, entry) {
//console.log(entry);
var time = new Date(entry.timestamp);
var item = $("<li>", {html: entry.sequence + " @ " + time.toLocaleDateString() + " "+ time.toLocaleTimeString()});
var container = $("<p />");
if (entry.track.length > 0) {
var mapName = "map" + index;
//console.log(mapName, entry.track.length);
var mapContainer = $("<div />", {id: mapName, class: "map"});
var track = [];
$.each(entry.track, function (i, elem) {
track.push([elem.coordinates[1], elem.coordinates[0]]);
});
maps[mapName] = track;
/* mapContainer.ready(function () {
var map = L.map(mapName, {maxZoom: 22});
L.control.scale().addTo(map);
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors', attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
}).addTo(map); }),
var track = []; "openstreetmap": L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
$.each(entry.track, function (i, elem) { attribution: '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
track.push([elem.coordinates[1], elem.coordinates[0]]); }),
}); "esri sat": L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
var layer = L.polyline(track, {color: "green"}); attribution: 'Tiles &copy; Esri &mdash; Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
console.log(track); }),
L.control.layers(null, [layer]).addTo(map); "google sat": L.tileLayer('http://{s}.google.com/vt/lyrs=s&x={x}&y={y}&z={z}', {
});*/ maxZoom: 20,
subdomains: ['mt0', 'mt1', 'mt2', 'mt3']
})
};
var map = L.map("mainMap", {layers: [tiles.osm], maxZoom: 22, maxNativeZoom: 19});
mapContainer.appendTo(container); function styleTrack(feature) {
var styles = {};
styles.color = data.colors[feature.properties.activity_type];
return styles;
} }
$.each(entry.events, function (i, event) {
if ("image" in event) { var highlighted = null;
$("<img />", {src: event.image, height: 200}).appendTo(container);
function onClick(e) {
var start = e.target.feature.geometry.properties.start_timestamp;
var end = e.target.feature.geometry.properties.end_timestamp;
var changed = highlighted !== e.target.feature;
$.each(images, function (timestamp, board) {
if ((timestamp >= start && timestamp < end) && changed) {
board.image.first().addClass("highlight");
} else {
board.image.removeClass("highlight");
highlighted = null;
} }
}
);
if (changed) {
highlighted = e.target.feature;
}
}
var coords = [];
function onEachFeature(feature, layer) {
layer.setStyle(styleTrack(feature));
layer.on('click', onClick);
if (feature.coordinates.length > 1) {
coords = coords.concat(feature.coordinates.map(function (p) {
return [p[1], p[0], 0.1];
}));
}
}
var track = L.geoJSON(data['track'], {
//style: styleTrack,
onEachFeature: onEachFeature
}).addTo(map);
map.fitBounds(track.getBounds());
var heat = L.heatLayer(coords);
L.control.layers(tiles, {"heatmap": heat}).addTo(map);
var marker = null;
var list = $("<ul />");
$.each(data["boards"], function (index, entry) {
//console.log(index, entry);
var item = $("<li>", {class: entry.extra_data.activity_type});
var container = $("<div>", {class: "board"});
var image = $("<img>", {src: entry.image.replace("static/progress/", ""), height: 200});
image.attr("data-time", entry.timestamp);
image.hover(function () {
marker = L.geoJSON(entry.coordinate).addTo(map);
}, function () {
map.removeLayer(marker);
}); });
images[entry.timestamp] = {image: image, coordinate: entry.coordinate};
image.appendTo(container);
container.appendTo(item); container.appendTo(item);
item.appendTo(list); item.appendTo(list);
//});
}); });
list.appendTo("body"); list.appendTo("body");
var slider = $("<input />", {type: "range" })
/*});
$(window).on("load", function () {*/
// setTimeout(function () {
//console.log(maps);
$.each(maps, function (mapName, track) {
//console.log("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAa");
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
});
var map = L.map(mapName, {layers: [tiles]});
L.control.scale().addTo(map);
// console.log(mapName, track);
var layer = L.polyline(track, {color: "green"}).addTo(map);
map.fitBounds(layer.getBounds());
//console.log(layer)
//L.control.layers({"osm":tiles}, {layer]).addTo(map);
});
// }, 2000);
}); });

View File

@ -54,7 +54,6 @@ def get_board_data(source, instance_id, sequence_id, board_id):
return {"class": "error"} return {"class": "error"}
result = { result = {
"class": instance_data["@class"], "class": instance_data["@class"],
"id": instance_data["@id"]
} }
for i in ["image", "audio", "video"]: for i in ["image", "audio", "video"]:
key = i + "_file" key = i + "_file"