wörk
parent
885fd770f7
commit
977e8a5eab
|
|
@ -0,0 +1,19 @@
|
||||||
|
class Analyzer:
|
||||||
|
"""Operate on log entries, one at a time"""
|
||||||
|
|
||||||
|
def __init__(self, settings: LogSettings) -> None:
|
||||||
|
self.settings: LogSettings = settings
|
||||||
|
|
||||||
|
def process(self, entry: dict) -> bool:
|
||||||
|
"""
|
||||||
|
Process an entry
|
||||||
|
:param entry: Entry to process
|
||||||
|
:return: True if consumed, False for further analysis
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def result(self, store: ResultStore, name=None) -> None:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def name(self) -> str:
|
||||||
|
return self.__name__
|
||||||
|
|
@ -0,0 +1,13 @@
|
||||||
|
class Render:
|
||||||
|
result_types = []
|
||||||
|
|
||||||
|
def render(self, results: List[Result], name=None) -> [str]:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def filter(self, results: List[Result]):
|
||||||
|
if len(self.result_types) == 0:
|
||||||
|
return results
|
||||||
|
return filter(self.__filter__, results)
|
||||||
|
|
||||||
|
def __filter__(self, obj: Result):
|
||||||
|
return obj.analysis() in self.result_types
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
|
|
||||||
\section{Location based Games}
|
\section{Location based Games: Put the 'fun' in education}
|
||||||
\subsection{Put the 'fun' in education}
|
|
||||||
Intersection of GIS and gaming technology\cite{Ahlqvist2018}
|
Intersection of GIS and gaming technology\cite{Ahlqvist2018}
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item Game actions tied to real-world spatial places ('Break the magic circle')%TODO citation
|
\item Game actions tied to real-world spatial places ('Break the magic circle')%TODO citation
|
||||||
|
|
@ -11,7 +10,7 @@ Intersection of GIS and gaming technology\cite{Ahlqvist2018}
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
\image{.5\textwidth}{../../PresTeX/images/gg2}{Geogame map view}{img:gg2}
|
\image{.5\textwidth}{../../PresTeX/images/gg2}{Geogame map view}{img:gg2}
|
||||||
|
|
||||||
\subsection{Research with location based games}
|
\section{Research with location based games}
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item Define mission statement
|
\item Define mission statement
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
\label{sec:scope}
|
\label{sec:scope}
|
||||||
\subsection{Goal definition}
|
\section{Goal definition}
|
||||||
A Framework for the Analysis of Spatial Game Data
|
A Framework for the Analysis of Spatial Game Data
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item Framework for analysis
|
\item Framework for analysis
|
||||||
|
|
@ -13,7 +13,7 @@ A Framework for the Analysis of Spatial Game Data
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
|
|
||||||
\subsection{Components}
|
\section{Components}
|
||||||
Prerequisites: Game log
|
Prerequisites: Game log
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item Creation
|
\item Creation
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
\section{State of research}
|
\section{State of research}
|
||||||
|
|
||||||
\subsection{Log processing}
|
\section{Log processing}
|
||||||
System administrators and developers face a daily surge of log files from applications, systems, and servers.
|
System administrators and developers face a daily surge of log files from applications, systems, and servers.
|
||||||
For knowledge extraction, a wide range of tools is in constant development for such environments.
|
For knowledge extraction, a wide range of tools is in constant development for such environments.
|
||||||
Currently, an architectural approach with three main components is most frequently applied.
|
Currently, an architectural approach with three main components is most frequently applied.
|
||||||
|
|
@ -23,13 +23,13 @@ StatsD\furl{https://github.com/etsy/statsd} & Prometheus\furl{https://prometheus
|
||||||
\label{tab:logs}
|
\label{tab:logs}
|
||||||
\end{longtable}
|
\end{longtable}
|
||||||
|
|
||||||
\subsubsection{Collection}
|
\subsection{Collection}
|
||||||
Nearly all services designed for log collection offer multiple interfaces for submitting log data.
|
Nearly all services designed for log collection offer multiple interfaces for submitting log data.
|
||||||
By way of illustration, Logstash features a long list of input plugins from streaming files over an HTTP API to proprietary vendor sources like Amazon Web Services (AWS)\furl{https://www.elastic.co/guide/en/logstash/current/input-plugins.html}. \nomenclature{\m{A}mazon \m{W}eb \m{S}ervices}{AWS} \nomenclature{\m{A}pplication \m{P}rogramming \m{I}nterface}{API}\nomenclature{\m{H}yper\m{t}ext \m{T}ransport \m{P}rotocol}{HTTP}
|
By way of illustration, Logstash features a long list of input plugins from streaming files over an HTTP API to proprietary vendor sources like Amazon Web Services (AWS)\furl{https://www.elastic.co/guide/en/logstash/current/input-plugins.html}. \nomenclature{\m{A}mazon \m{W}eb \m{S}ervices}{AWS} \nomenclature{\m{A}pplication \m{P}rogramming \m{I}nterface}{API}\nomenclature{\m{H}yper\m{t}ext \m{T}ransport \m{P}rotocol}{HTTP}
|
||||||
|
|
||||||
Aside from aggreation, the topic of log creation is covered from host-based monitoring solutions like Icinga to application centric approaches with e.g. StatsD embedded in the application source code\furl{https://thenewstack.io/collecting-metrics-using-statsd-a-standard-for-real-time-monitoring/}.
|
Aside from aggreation, the topic of log creation is covered from host-based monitoring solutions like Icinga to application centric approaches with e.g. StatsD embedded in the application source code\furl{https://thenewstack.io/collecting-metrics-using-statsd-a-standard-for-real-time-monitoring/}.
|
||||||
|
|
||||||
\subsubsection{Databases}
|
\subsection{Databases}
|
||||||
The key component for a log processing system is the storage.
|
The key component for a log processing system is the storage.
|
||||||
While relational database management systems (RDBMS) \nomenclature{\m{R}elational \m{D}ata\m{b}ase \m{M}anagement \m{S}ystem}{RDBMS} can be suitable for small-scale solutions, the temporal order of events impose many pitfalls.
|
While relational database management systems (RDBMS) \nomenclature{\m{R}elational \m{D}ata\m{b}ase \m{M}anagement \m{S}ystem}{RDBMS} can be suitable for small-scale solutions, the temporal order of events impose many pitfalls.
|
||||||
For instance, django-monit-collector\furl{https://github.com/nleng/django-monit-collector} as open alternative to the proprietary MMonit cloud service\furl{https://mmonit.com/monit/\#mmonit} assures temporal coherence through lists of timestamps and measurement values stored as JSON strings in a RDBMS. \nomenclature{\m{J}ava\m{s}cript \m{O}bject \m{N}otation}{JSON}
|
For instance, django-monit-collector\furl{https://github.com/nleng/django-monit-collector} as open alternative to the proprietary MMonit cloud service\furl{https://mmonit.com/monit/\#mmonit} assures temporal coherence through lists of timestamps and measurement values stored as JSON strings in a RDBMS. \nomenclature{\m{J}ava\m{s}cript \m{O}bject \m{N}otation}{JSON}
|
||||||
|
|
@ -43,7 +43,7 @@ More recently, alternatives written in modern languages are popular, like Influx
|
||||||
\image{\textwidth}{mgroth}{Weather station plot with RDDtool \cite{RDD}}{img:rdd}
|
\image{\textwidth}{mgroth}{Weather station plot with RDDtool \cite{RDD}}{img:rdd}
|
||||||
\nomenclature{\m{T}ime \m{S}eries \m{D}ata\m{b}ase}{TSDB}
|
\nomenclature{\m{T}ime \m{S}eries \m{D}ata\m{b}ase}{TSDB}
|
||||||
|
|
||||||
\subsubsection{Frontend}
|
\subsection{Frontend}
|
||||||
|
|
||||||
Frontends utilize the powerful query languages of the TSDB systems backing them.
|
Frontends utilize the powerful query languages of the TSDB systems backing them.
|
||||||
Grafana e.g. provides customizable dashboards with graphing and mapping support \cite{komarek2017metric}.
|
Grafana e.g. provides customizable dashboards with graphing and mapping support \cite{komarek2017metric}.
|
||||||
|
|
@ -51,12 +51,12 @@ Additional functionality can be added with plugins, e.g. for new data sources or
|
||||||
The query languages of the data sources is abstracted by an common user interface.
|
The query languages of the data sources is abstracted by an common user interface.
|
||||||
|
|
||||||
|
|
||||||
\subsection{Pedestrian traces}
|
\section{Pedestrian traces}
|
||||||
Analyzing pedestrian movement based on GPS logs is an established technique.
|
Analyzing pedestrian movement based on GPS logs is an established technique.
|
||||||
In the following sections, \autoref{sssec:gps} provides an overview of GPS as data basis, \autoref{sssec:act} highlights some approaches to activity mining and \autoref{sssec:vis} showcases popular visualizations of tempo-spatial data.
|
In the following sections, \autoref{sssec:gps} provides an overview of GPS as data basis, \autoref{sssec:act} highlights some approaches to activity mining and \autoref{sssec:vis} showcases popular visualizations of tempo-spatial data.
|
||||||
\nomenclature{\m{G}lobal \m{P}ositioning \m{S}ystem}{GPS}
|
\nomenclature{\m{G}lobal \m{P}ositioning \m{S}ystem}{GPS}
|
||||||
|
|
||||||
\subsubsection{Data basis: GPS}\label{sssec:gps}
|
\subsection{Data basis: GPS}\label{sssec:gps}
|
||||||
Global navigation satellite systems (GNSS) like GPS, Galileo, GLONASS, or BeiDou are a source of positioning data for mobile users.
|
Global navigation satellite systems (GNSS) like GPS, Galileo, GLONASS, or BeiDou are a source of positioning data for mobile users.
|
||||||
\nomenclature{\m{G}lobal \m{N}avigation \m{S}atellite \m{S}ystems}{GNSS}
|
\nomenclature{\m{G}lobal \m{N}avigation \m{S}atellite \m{S}ystems}{GNSS}
|
||||||
\cite{van_der_Spek_2009} has shown that such signals provide a reliable service in many situations.
|
\cite{van_der_Spek_2009} has shown that such signals provide a reliable service in many situations.
|
||||||
|
|
@ -66,7 +66,7 @@ Therefore, GNSS are suitable instruments for acquiring spatio-temporal data \cit
|
||||||
However, \cite{Ranacher_2015} reminds of systematical overestimates by GPS due to interpolation errors.
|
However, \cite{Ranacher_2015} reminds of systematical overestimates by GPS due to interpolation errors.
|
||||||
To eliminate such biases of one system, \cite{Li2015} describes the combination of multiple GNSS for improved accuracy and reduced convergence time.
|
To eliminate such biases of one system, \cite{Li2015} describes the combination of multiple GNSS for improved accuracy and reduced convergence time.
|
||||||
|
|
||||||
\subsubsection{Activity Mining}\label{sssec:act}
|
\subsection{Activity Mining}\label{sssec:act}
|
||||||
GPS (or GNSS) tracks generally only contain the raw tempo-spatial data (possibly accompanied by metadata like accuracy, visible satellites, etc.).
|
GPS (or GNSS) tracks generally only contain the raw tempo-spatial data (possibly accompanied by metadata like accuracy, visible satellites, etc.).
|
||||||
Any additional information needs either be logged seperately or needs to be derived from the track data itself.
|
Any additional information needs either be logged seperately or needs to be derived from the track data itself.
|
||||||
This activity mining allows e.g. the determination of the modes of transport used while creating the track \cite{Gong_2014}.
|
This activity mining allows e.g. the determination of the modes of transport used while creating the track \cite{Gong_2014}.
|
||||||
|
|
@ -77,7 +77,7 @@ Beside points of interest (POIs), individual behaviour patterns can be mined fro
|
||||||
Post-processing of these patterns with machine learning enables predictions of future trajectories \cite{10.1007/978-3-642-23199-5_37}.
|
Post-processing of these patterns with machine learning enables predictions of future trajectories \cite{10.1007/978-3-642-23199-5_37}.
|
||||||
|
|
||||||
|
|
||||||
\subsubsection{Visualization}\label{sssec:vis}
|
\subsection{Visualization}\label{sssec:vis}
|
||||||
|
|
||||||
\image{.81\textwidth}{../../PresTeX/images/strava}{Heatmap: Fitnesstracker\cite{strava}}{img:strava}
|
\image{.81\textwidth}{../../PresTeX/images/strava}{Heatmap: Fitnesstracker\cite{strava}}{img:strava}
|
||||||
|
|
||||||
|
|
@ -88,7 +88,7 @@ Post-processing of these patterns with machine learning enables predictions of f
|
||||||
\image{\textwidth}{../../PresTeX/images/generalization}{Trajectories and generalizations with varying radius parameter \cite{adrienko2011spatial}}{img:generalization}
|
\image{\textwidth}{../../PresTeX/images/generalization}{Trajectories and generalizations with varying radius parameter \cite{adrienko2011spatial}}{img:generalization}
|
||||||
|
|
||||||
|
|
||||||
\subsection{Analyzing games}
|
\section{Analyzing games}
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item there's more than heatmaps
|
\item there's more than heatmaps
|
||||||
\item combine position with game actions
|
\item combine position with game actions
|
||||||
|
|
@ -101,7 +101,7 @@ Post-processing of these patterns with machine learning enables predictions of f
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
\subsection{Summary}
|
\section{Summary}
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item Log processing: Powerful stacks
|
\item Log processing: Powerful stacks
|
||||||
\item Movement analysis: Large field already explored (GPS influence, Patterns, Behavior recognition, …)
|
\item Movement analysis: Large field already explored (GPS influence, Patterns, Behavior recognition, …)
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,154 @@
|
||||||
Based on the findings in \autoref{sec:solution}, an implementation with Python was realized.
|
Based on the findings in \autoref{sec:solution}, an implementation with Python was realized.
|
||||||
|
The following sections describe the structure and service composition utilized to fullfill the requirements.
|
||||||
|
|
||||||
\section{klassenstruktur}
|
\section{Code structure}
|
||||||
\subsection{Analysis Framework}
|
There are four packages forming the Analysis Framework project:
|
||||||
|
\begin{itemize}
|
||||||
|
\item analysis: Core analysis functionality, including log parsing, analysis, postprocessing and rendering
|
||||||
|
\item clients: Connection classes to game servers to retrieve log files and game configurations
|
||||||
|
\item selector: Web interface for non-expert users
|
||||||
|
\item tasks: Definition of asynchronous tasks
|
||||||
|
\end{itemize}
|
||||||
|
The analysis and clients packages are described in \autoref{sec:analysisframework}, while \autoref{sec:web} features selector and tasks packages.
|
||||||
|
\image{.7\textwidth}{packages}{Project package overview}{img:packages}
|
||||||
|
|
||||||
\subsection{Web Interface}
|
\subsection{Analysis Framework}\label{sec:analysisframework}
|
||||||
|
The internal structure of the analysis package is shown in \autoref{img:pack-analysis}.
|
||||||
|
Besides the subpackages for analysing work (analyzers: \autoref{sec:analysiswork}) and log parsing (loaders: \autoref{sec:loaders}), it contains helper functionalities and finally the Python module \texttt{log\_analyzer} as entrypoint for researches experimenting and outline of the intended workflow.
|
||||||
|
\image{.7\textwidth}{packages-analysis}{anaylsis package overview}{img:pack-analysis}
|
||||||
|
|
||||||
\subsection{Integration \& Service composition}
|
\subsubsection{Log parsing}\label{sec:loaders}
|
||||||
|
Outlined in \autoref{img:pack-loader}, the parsing of log files into an internal structure happens here.
|
||||||
|
\image{.7\textwidth}{packages-loader}{loader package overview}{img:pack-loader}
|
||||||
|
|
||||||
|
\paragraph{The loader module} holds the definition of the abstract base class \texttt{Loader}.
|
||||||
|
It has two unimplemented methods: \texttt{load} and \texttt{get\_entry}.
|
||||||
|
While the first is issued with an filename as argument to load a log file, the second it then called repeatedly to retrieve a single log for the analysis steps.
|
||||||
|
Processing stops when all log entries have been passed from this method.
|
||||||
|
|
||||||
|
The module also defines a showcase implementation loading a JSON file and \texttt{yield}ing it's items.
|
||||||
|
|
||||||
|
\paragraph{Biogames} is for the log files of Biodiv2go, a composite approach was used: The games' log files come as ZIP archive with an SQLite database and possibly media files.
|
||||||
|
The \texttt{SQLiteLoader} contains the logic to handle a plain SQLite file according to the definition of the \texttt{Loader} from above.
|
||||||
|
By extending this class, \texttt{ZipSQLiteLoader} focuses on unzipping the archive and creating a temporary storage location, leaving interpretation of the data to its super class.
|
||||||
|
This avoids code duplication and, with little amount of tweaking, would present a generic way to handle SQLite database files.
|
||||||
|
|
||||||
|
\paragraph{Neocart(ographer)}
|
||||||
|
was the evaluation step described in \autoref{sec:eval}.
|
||||||
|
This \texttt{Loader} deals with some seriously broken XML files.
|
||||||
|
|
||||||
|
\paragraph{Module settings} are stored in the \texttt{\_\_init\_\_} module.
|
||||||
|
This is mainly a mapping to allow references to \texttt{Loader}s in the JSON files for configuration (see \autoref{sec:settings}).
|
||||||
|
|
||||||
|
\subsubsection{Analysis Work package}\label{sec:analysiswork}
|
||||||
|
\autoref{img:pack-analyzers} shows the subpackages of \texttt{anaylsis.analyzers}.
|
||||||
|
There are subpackages for doing the actual analysis work, as well as for the postprocess and rendering step.
|
||||||
|
Additional the \texttt{settings} module defines the LogSettings class.
|
||||||
|
\image{.7\textwidth}{packages-analysis-analyzers}{anaylsis.analyzers package overview}{img:pack-analyzers}
|
||||||
|
|
||||||
|
\paragraph{LogSettings}\label{sec:settings}
|
||||||
|
This class holds the configuration for an analysis run:
|
||||||
|
\begin{itemize}
|
||||||
|
\item The type of the log parser to use
|
||||||
|
\item Information about the structure of the parsed log files, e.g.
|
||||||
|
\begin{itemize}
|
||||||
|
\item What is the key of the field to derive the type of the log entry?
|
||||||
|
\item What value does this field hold, when there is spatial information?
|
||||||
|
\item What value does indicate game actions?
|
||||||
|
\item What is the path to obtain spatial information from an spatial entry?
|
||||||
|
\end{itemize}
|
||||||
|
\item The analysis setup:
|
||||||
|
\begin{itemize}
|
||||||
|
\item Which analyzers to use,
|
||||||
|
\item and the order to apply them
|
||||||
|
\end{itemize}
|
||||||
|
\item Variable data to configure the source (see \autoref{sec:source}).
|
||||||
|
\item Rendering methods to apply to the result set
|
||||||
|
\end{itemize}
|
||||||
|
|
||||||
|
The settings are stored as JSON files, and parsed by runtime into a \texttt{LogSetting} object (see \autoref{img:oebkml} for a sample JSON settings file).
|
||||||
|
The helper functions in \texttt{analysis.util} provide a very basic implementation of an query language for Python dictionaries:
|
||||||
|
A dot-separated string defines the path to take through the dictionary, providing basically syntactic sugar to avoid lines like \texttt{entry["instance"]["config"]["@id"]}.
|
||||||
|
As this prooves quite difficult to configure using JSON, the path-string \texttt{"instance.config.@id"} is much more deserialization friendly.
|
||||||
|
|
||||||
|
\paragraph{The Analyzer package} defines the work classes to extract information from log entries.
|
||||||
|
The packages' init-module defines the Result and ResultStore classes, as well as the abstract base class for the Analyzers.
|
||||||
|
|
||||||
|
As shown in \autoref{code:anaylzer}, this base class provides the basic mechanics to access the settings.
|
||||||
|
The core feature of this project is condensed in the method stub \texttt{process}.
|
||||||
|
It is fed with an parsed entry from \autoref{sec:loaders}, processes it, possibly updates the internal state of the class, and the can decide to end the processing of the particular log entry or continue to feed down into the remainder of the analysis chain.
|
||||||
|
|
||||||
|
When all log entries of a log file are processed, the \texttt{result} method returns the findings of this analysis instance (see \autoref{par:result}).
|
||||||
|
|
||||||
|
\lstinputlisting[language=python,caption={Analyzer base class},label=code:anaylzer]{code/analyzer.py}
|
||||||
|
|
||||||
|
There are 23 classes implementing analysis functionality, splitted into modules for generic use, Biodiv2go analysis, and filtering purposes.
|
||||||
|
|
||||||
|
\paragraph{Results}\label{par:result} are stored in a \texttt{Result} object (\texttt{analysis.analyzers.analyzer.\_\_init\_\_}).
|
||||||
|
This class keeps track of the origin of the resulting data to allow filtering for results by arbitrary analzing classes.
|
||||||
|
|
||||||
|
As \autoref{code:anaylzer} shows, the \texttt{Result}s are stored in a \texttt{ResultStore}.
|
||||||
|
This store - defined next to the \texttt{Result} class - provides means to structure the results by arbitrary measures.
|
||||||
|
By passing the store's reference into the analyzers, any analyzer can introduce categorization measures.
|
||||||
|
This allows for example to distinguish several log files by name, or to combine log files and merge the results by events happening during the games' progress.
|
||||||
|
With an default of an dictionary of lists, the API supports a callable factory for arbitrary use.
|
||||||
|
|
||||||
|
\paragraph{Rendering of the Results} is done in the \texttt{render} package.
|
||||||
|
Similar to the Analyzers' package, the render package defines its common base class in the initialization module, as shown in \autoref{code:render}.
|
||||||
|
It provides implementors means to filter the result set to relevant analysis types through the \texttt{filter} methods.
|
||||||
|
Of course, the implementation of the rendering method is left open.
|
||||||
|
|
||||||
|
\lstinputlisting[language=python,caption={Render base class},label=code:render]{code/render.py}
|
||||||
|
|
||||||
|
There are 18 implementations, again splitted into generic and game-specific ones.
|
||||||
|
|
||||||
|
The most generic renderers just dump the results into JSON files or echo them to the console.
|
||||||
|
A more advanced implementation relies on the \texttt{LocationAnalyzer} and creates a KML file with a track animation (example: \autoref{img:oebge}).
|
||||||
|
Finally, e.g. \texttt{biogames.SimulationGroupRender} performs postprocessing steps on a collection of \texttt{biogames.SimulationOrderAnalyzer} results by creating a graph with matplotlib\furl{https://matplotlib.org/} to discover simulation retries (example: \autoref{img:retries}).
|
||||||
|
|
||||||
|
\subsection{Sources}\label{sec:source} of log files are clients connecting either to game servers directly or other log providers.
|
||||||
|
There is currently a bias towards HTTP clients, as REST APIs are todays go-to default.
|
||||||
|
To acknowledge this bias, the HTTP oriented base class is not defined at package level.
|
||||||
|
The \texttt{Client} originates from the \texttt{client.webclients} package instead.
|
||||||
|
It contains some convenience wrappers to add cookies, headers and URL-completion to HTTP calls as well as handling file downloads.
|
||||||
|
The two implementing classes are designed for Biodiv2go and a Geogames-Team log provider.
|
||||||
|
Using a REST API, the \texttt{Biogames} client integrates seamlessly into the authentication and authorization of the game server.
|
||||||
|
The client acts as proxy for users to avoid issues with cross-origin scripting (XSS) or resource Sharing (CORS).
|
||||||
|
|
||||||
|
The Geogames-Team's geogames like Neocartographer wirte game logs to files and only have a server running during the active game.
|
||||||
|
Therefore, an additional log providing server was created to allow access to the log files (see also: \autoref{sec:ggt-server}).
|
||||||
|
|
||||||
|
Clients can have arbitrary amounts of options, as all fields in the JSON settings file are passed through.
|
||||||
|
|
||||||
|
\subsection{Web Interface}\label{sec:web}
|
||||||
|
The selector package holds a Flask\furl{http://flask.pocoo.org/} app for an web interface for non-expert users.
|
||||||
|
It utilizes the provided clients (see \autoref{sec:source}) for authentication, and gives users the following options:
|
||||||
|
\begin{itemize}
|
||||||
|
\item Exploring available game logs
|
||||||
|
\item Configuring a new analysis run
|
||||||
|
\item View analysis run status
|
||||||
|
\item View analysis run results
|
||||||
|
\end{itemize}
|
||||||
|
The web interface offers all available clients for the user to choose from.
|
||||||
|
With user provided credentials, the server retrieves the available game logs and offers them, together with the predefined analysis options, to create an new analysis run.
|
||||||
|
When an analysis run is requested, the server issues a new task to be executed (see \autoref{sec:tasks}).
|
||||||
|
|
||||||
|
An overview page lists the status of the tasks from the given user, and provides access to the results once the task is finished.
|
||||||
|
When problems occur, the status page informs the user, too.
|
||||||
|
|
||||||
|
\subsection{Task definition}\label{sec:tasks} in the \texttt{package} provides tasks available for execution.
|
||||||
|
This package is the interface for celery\furl{http://www.celeryproject.org/} workers and issuers.
|
||||||
|
The key point is the task \texttt{analyze} to start new analysis runs.
|
||||||
|
When a new task is scheduled, the issuer puts a task in the Redis DB\furl{https://redis.io/}.
|
||||||
|
A free worker node claims the task and executes it.
|
||||||
|
During the runtime, status updates are stored in the Redis Db to inform the issuer about progress, failures and results artifacts.
|
||||||
|
|
||||||
|
|
||||||
|
\section{Service composition}
|
||||||
|
|
||||||
|
\image{\textwidth}{architecture.pdf}{archoitecure overview}{img:arch}
|
||||||
|
…
|
||||||
|
\subsection{Geogame Log file provider}\label{sec:ggt-server}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -41,15 +184,13 @@ Rendering
|
||||||
|
|
||||||
\subsection{Examples}
|
\subsection{Examples}
|
||||||
Configuration \& results
|
Configuration \& results
|
||||||
\twofigures{0.5}{../../PresTeX/images/oeb-kml}{Analyzer configuration}{img:oebkml}{../../PresTeX/images/oeb-ge}{Result visualized}{img:oebge}{Example: Generate KML tracks (BioDiv2Go; Oberelsbach2016)}{fig:oeb2016}
|
%\twofigures{0.5}{../../PresTeX/images/oeb-kml}{Analyzer configuration}{img:oebkml}{../../PresTeX/images/oeb-ge}{Result visualized}{img:oebge}{Example: Generate KML tracks (BioDiv2Go; Oberelsbach2016)}{fig:oeb2016}
|
||||||
|
|
||||||
|
|
||||||
ActivityMapper
|
ActivityMapper
|
||||||
\image{.7\textwidth}{../../PresTeX/images/track-fi}{Combined screen activity and spatial progress}{img:trackfi}
|
\image{.7\textwidth}{../../PresTeX/images/track-fi}{Combined screen activity and spatial progress}{img:trackfi}
|
||||||
|
|
||||||
|
|
||||||
Graphs
|
Graphs
|
||||||
\image{\textwidth}{../../PresTeX/images/simu-retries}{Experimentational rounds}{img:retries}
|
|
||||||
\image{\textwidth}{../../PresTeX/images/speed}{Speed distribution}{img:speed}
|
\image{\textwidth}{../../PresTeX/images/speed}{Speed distribution}{img:speed}
|
||||||
\image{.9\textwidth}{../../PresTeX/images/time-rel}{Time distribution}{img:time}
|
\image{.9\textwidth}{../../PresTeX/images/time-rel}{Time distribution}{img:time}
|
||||||
|
|
||||||
\image{\textwidth}{architecture.pdf}{archoitecure overview}{img:arch}
|
|
||||||
|
|
@ -1,6 +1,15 @@
|
||||||
\section{Setup für Kibana} \label{app:kibana}
|
\section{Examples}
|
||||||
|
\subsection{Configuration}
|
||||||
|
\image{\textwidth}{../../PresTeX/images/oeb-kml}{Analyzer configuration}{img:oebkml}
|
||||||
|
\subsection{Results}
|
||||||
|
\image{\textwidth}{../../PresTeX/images/oeb-ge}{Result visualized}{img:oebge}
|
||||||
|
\image{\textwidth}{../../PresTeX/images/simu-retries}{Experimentational rounds}{img:retries}
|
||||||
|
|
||||||
|
\section{Containers}
|
||||||
|
\subsection{Kibana test setup} \label{app:kibana}
|
||||||
\lstinputlisting[language=yaml,caption={Docker-compose file for Kibana test setup},label=code:kibana]{code/kibana-docker-compose.yml}
|
\lstinputlisting[language=yaml,caption={Docker-compose file for Kibana test setup},label=code:kibana]{code/kibana-docker-compose.yml}
|
||||||
\section{Biogames Server Dockerized}
|
|
||||||
|
\subsection{Biogames server dockerized} \label{app:biogames}
|
||||||
\image{\textwidth}{biogames.pdf}{Dockerized setup for biogames}{img:bd2gdocker}
|
\image{\textwidth}{biogames.pdf}{Dockerized setup for biogames}{img:bd2gdocker}
|
||||||
\lstinputlisting[language=yaml,caption={Docker-compose file for Biogames server},label=code:bd2s]{code/biogames/docker-compose.yml}
|
\lstinputlisting[language=yaml,caption={Docker-compose file for Biogames server},label=code:bd2s]{code/biogames/docker-compose.yml}
|
||||||
\lstinputlisting[language=yaml,caption={Dockerfile for Biogames server},label=code:bd2d]{code/biogames/Dockerfile}
|
\lstinputlisting[language=yaml,caption={Dockerfile for Biogames server},label=code:bd2d]{code/biogames/Dockerfile}
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@
|
||||||
\chapter{umsetzung}
|
\chapter{umsetzung}
|
||||||
\input{content/4-implementation}
|
\input{content/4-implementation}
|
||||||
|
|
||||||
\chapter{evaluierung}
|
\chapter{evaluierung}\label{sec:eval}
|
||||||
%\input{content/5-evaluation}
|
%\input{content/5-evaluation}
|
||||||
\section{Portierbarkeit (anderes game)}
|
\section{Portierbarkeit (anderes game)}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
digraph{
|
||||||
|
r [label="analysis.analyzers"];
|
||||||
|
|
||||||
|
r -> analyzer;
|
||||||
|
r -> render;
|
||||||
|
r -> settings;
|
||||||
|
settings [shape = "box"];
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,10 @@
|
||||||
|
digraph{
|
||||||
|
r [label="analysis"];
|
||||||
|
|
||||||
|
r -> analyzers;
|
||||||
|
r -> loaders;
|
||||||
|
r -> util;
|
||||||
|
r -> log_analyzer;
|
||||||
|
|
||||||
|
log_analyzer [shape = "box"];
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
digraph{
|
||||||
|
node [shape = "box"];
|
||||||
|
r [label="analysis.loaders",shape="ellipse"];
|
||||||
|
|
||||||
|
r -> biogames;
|
||||||
|
r -> loader;
|
||||||
|
r -> neocart;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
digraph{
|
||||||
|
r [label="/"];
|
||||||
|
|
||||||
|
r -> analysis;
|
||||||
|
r -> clients;
|
||||||
|
r -> selector;
|
||||||
|
r -> tasks;
|
||||||
|
}
|
||||||
|
|
@ -109,7 +109,7 @@
|
||||||
|
|
||||||
\appendix
|
\appendix
|
||||||
\setstretch{1.5}
|
\setstretch{1.5}
|
||||||
\chapter{Anhang}
|
\chapter{Appendix}
|
||||||
\input{content/appendix}
|
\input{content/appendix}
|
||||||
\setstretch{1.1}
|
\setstretch{1.1}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue