Bladeren bron

Begin updating framework based on other project

Eren Yilmaz 5 jaren geleden
bovenliggende
commit
64e30ba14f

+ 1 - 1
README.md

@@ -21,7 +21,7 @@ The server is intended to be run within a python 3.6 environment on the host spe
 
 On the server you can install the required packages using pip:
 ```
-pip3 install bottle requests tabulate
+pip install bottle requests tabulate gevent-websocket passlib joblib requests
 ```
 and start the server using
 ```

+ 0 - 0
doc/__init__.py


+ 69 - 0
doc/documentation.py

@@ -0,0 +1,69 @@
+# the general syntax is 
+# {attribute1: value, attribute2: value, ...}
+#
+# for example a valid request to /register would look like 
+# {"email": "user123@example.org", "username": "user123", "password": "FILTERED", "preferred_language": "german"}
+# while a valid request to /events would be the empty object {}
+
+cache_translation_required_attributes = ['original_language_iso', 'original_text', 'target_language_iso']
+cache_translation_possible_attributes = ['original_language_iso', 'original_text', 'target_language_iso', 'translated_text']
+
+cities_required_attributes = []
+cities_possible_attributes = []
+
+confirm_translation_required_attributes = ['session_id', 'translation_id']
+confirm_translation_possible_attributes = ['session_id', 'translation_id']
+
+create_location_required_attributes = ['city', 'house_number', 'phone_number', 'street', 'zip_code']
+create_location_possible_attributes = ['city', 'house_number', 'name', 'phone_number', 'room_number', 'street', 'zip_code']
+
+create_organization_required_attributes = ['location_id', 'name']
+create_organization_possible_attributes = ['location_id', 'name']
+
+create_person_required_attributes = ['email', 'first_name', 'location_id', 'name']
+create_person_possible_attributes = ['email', 'first_name', 'location_id', 'name', 'preferred_language_id']
+
+edit_translation_required_attributes = ['new_text', 'session_id', 'translation_id']
+edit_translation_possible_attributes = ['new_text', 'session_id', 'translation_id']
+
+languages_required_attributes = []
+languages_possible_attributes = []
+
+location_info_required_attributes = ['location_id', 'session_id']
+location_info_possible_attributes = ['location_id', 'session_id']
+
+login_required_attributes = ['email', 'password']
+login_possible_attributes = ['email', 'password']
+
+logout_required_attributes = ['session_id']
+logout_possible_attributes = ['session_id']
+
+organization_name_exists_required_attributes = ['name']
+organization_name_exists_possible_attributes = ['name']
+
+promote_required_attributes = ['promoted_user_id', 'role', 'session_id']
+promote_possible_attributes = ['promoted_user_id', 'role', 'session_id']
+
+register_required_attributes = ['password', 'person_id']
+register_possible_attributes = ['password', 'person_id']
+
+translations_required_attributes = []
+translations_possible_attributes = ['confirmed']
+
+update_location_required_attributes = ['location_id', 'session_id']
+update_location_possible_attributes = ['additional_information', 'city', 'house_number', 'location_id', 'name', 'person_id', 'phone_number', 'room_number', 'session_id', 'street', 'zip_code']
+
+update_organization_required_attributes = ['name', 'organization_id', 'session_id']
+update_organization_possible_attributes = ['name', 'organization_id', 'person_id', 'session_id']
+
+update_person_required_attributes = ['person_id', 'session_id']
+update_person_possible_attributes = ['first_name', 'name', 'new_email', 'new_organization_id', 'new_preferred_language_id', 'person_id', 'session_id']
+
+update_user_required_attributes = ['new_password', 'session_id', 'user_id']
+update_user_possible_attributes = ['new_password', 'session_id', 'user_id']
+
+user_info_required_attributes = ['user_id']
+user_info_possible_attributes = ['user_id']
+
+users_required_attributes = ['session_id']
+users_possible_attributes = ['session_id']

+ 35 - 0
doc/generate.py

@@ -0,0 +1,35 @@
+# read server controller
+import re
+
+if __name__ == '__main__':
+    with open('server_controller.py', 'r') as file:
+        text = file.read().replace('\n', '')
+
+    text = text.split('def ')
+    del text[0]
+    documentation = '''# the general syntax is 
+# {attribute1: value, attribute2: value, ...}
+#
+# for example a valid request to /register would look like 
+# {"email": "user123@example.org", "username": "user123", "password": "FILTERED", "preferred_language": "german"}
+# while a valid request to /events would be the empty object {}
+\n'''
+    for method in sorted(text):
+        method = 'def ' + method
+        method_name = re.search(r"def (.*?)\s*\(", method)[1]
+        if method_name[0] == '_':
+            continue
+        directly_used_arguments = re.findall(r"json_request\['(.*?)'\]", method)
+        missing_attributes = re.search(r"missing_attributes\(json_request, \[((.|\n)*?)\]\)", method)
+        if missing_attributes is None:
+            print('HINT: method', method_name, 'does not require any parameters')
+            required_arguments = []
+        else:
+            required_arguments = re.findall(r"'(.*?)'", missing_attributes[0])
+        required_arguments = sorted(list(set(required_arguments)))
+        possible_arguments = sorted(list(set(directly_used_arguments + required_arguments)))
+        documentation += method_name + '_required_attributes = ' + str(required_arguments) + '\n'
+        documentation += method_name + '_possible_attributes = ' + str(possible_arguments) + '\n\n'
+    with open("doc/documentation.py", "w", newline='\n') as file:
+        file.write(documentation[:-1])
+    print(documentation)

+ 0 - 0
create_new_key.py → jobs/create_new_key.py


+ 0 - 0
create_new_stock.py → jobs/create_new_stock.py


+ 0 - 0
delete_ownable.py → jobs/delete_ownable.py


+ 0 - 0
delete_user.py → jobs/delete_user.py


+ 0 - 0
drop_old_sessions.py → jobs/drop_old_sessions.py


+ 0 - 0
hash_all_users_passwords.py → jobs/hash_all_users_passwords.py


+ 0 - 0
publish_news_item.py → jobs/publish_news_item.py


+ 0 - 0
reset_bank.py → jobs/reset_bank.py


+ 0 - 0
run_db_setup.py → jobs/run_db_setup.py


+ 0 - 0
lib/__init__.py


+ 133 - 0
lib/db_log.py

@@ -0,0 +1,133 @@
+import logging
+import os
+import sqlite3 as db
+import sys
+from math import inf
+from shutil import copyfile
+import time
+from typing import Optional
+import git
+
+DBName = str
+
+connected_dbs = [DBName]
+
+# get current commit at start time of program
+repo = git.Repo(search_parent_directories=True)
+CURRENT_SHA = repo.head.object.hexsha
+
+
+class DBLog:
+    def __init__(self, db_name='log.db', create_if_not_exists=True):
+        if db_name in connected_dbs:
+            raise ValueError(f'There is already a connection to {db_name}.'
+                             'If you want to re-use the same connection you can get it from `db_log.connected_dbs`.'
+                             'If you want to disconnect you can call log.disconnect().')
+        self.connection: Optional[db.Connection] = None
+        self.cursor: Optional[db.Cursor] = None
+        self.db_name: Optional[DBName] = None
+
+        db_name = db_name.lower()
+        if not os.path.isfile(db_name) and not create_if_not_exists:
+            raise FileNotFoundError('There is no database with this name.')
+        creating_new_db = not os.path.isfile(db_name)
+        try:
+            db_connection = db.connect(db_name, check_same_thread=False)
+            # db_setup.create_functions(db_connection)
+            # db_setup.set_pragmas(db_connection.cursor())
+            # connection.text_factory = lambda x: x.encode('latin-1')
+        except db.Error as e:
+            print("Database error %s:" % e.args[0])
+            raise
+
+        self.connection = db_connection
+        self.cursor = self.connection.cursor()
+        self.db_name = db_name
+        if creating_new_db:
+            try:
+                if os.path.isfile('/test-db/' + db_name):
+                    print('Using test logs')
+                    copyfile('/test-db/' + db_name, db_name)
+                else:
+                    self.setup()
+            except Exception:
+                if self.connection is not None:
+                    self.connection.rollback()
+                os.remove(db_name)
+                raise
+        self.connected = True
+        self.min_level = -inf
+
+    def disconnect(self, rollback=True):
+        if rollback:
+            self.connection.rollback()
+        else:
+            self.connection.commit()
+        self.connection.close()
+        self.connected = False
+
+    def setup(self):
+        self.cursor.execute('''
+        CREATE TABLE IF NOT EXISTS entries(
+            rowid INTEGER PRIMARY KEY,
+            dt_created DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+            message TEXT NOT NULL,
+            data BLOB, -- can be null
+            pid INTEGER NOT NULl,
+            message_type VARCHAR (25) NOT NULL, -- a plain text message title
+            level INTEGER NOT NULL, -- relates to logging.ERROR and similar ones
+            head_hex_sha VARCHAR-- SHA of currently checked out commit
+        )
+        ''')
+
+    def log(self,
+            message,
+            level,
+            message_type='generic',
+            data=None,
+            dt_created=None,
+            current_pid=None,
+            current_head_hex_sha=CURRENT_SHA,
+            data_serialization_method=lambda x: x):
+        if level < self.min_level:
+            return
+        if dt_created is None:
+            dt_created = round(time.time())
+        if current_pid is None:
+            current_pid = os.getpid()
+        data: str = data_serialization_method(data)
+        self.cursor.execute('''
+        INSERT INTO entries(message, data, dt_created, pid, head_hex_sha, message_type, level)
+        VALUES (?, ?, ?, ?, ?, ?, ?)
+        ''', (message, data, dt_created, current_pid, current_head_hex_sha, message_type, level))
+
+    def debug(self, message, *args, **kwargs):
+        self.log(message, logging.DEBUG, *args, **kwargs)
+
+    def info(self, message, *args, **kwargs):
+        self.log(message, logging.INFO, *args, **kwargs)
+
+    def warning(self, message, *args, **kwargs):
+        self.log(message, logging.WARNING, *args, **kwargs)
+
+    warn = warning
+
+    def error(self, message, *args, **kwargs):
+        self.log(message, logging.ERROR, *args, **kwargs)
+
+    def critical(self, message, *args, **kwargs):
+        self.log(message, logging.CRITICAL, *args, **kwargs)
+
+    fatal = critical
+
+    def exception(self, msg, *args, data=None, **kwargs):
+        if data is None:
+            data = sys.exc_info()
+        self.error(msg, *args, data=data, **kwargs)
+
+    def commit(self):
+        c = time.clock()
+        self.connection.commit()
+        delta = time.clock() - c
+        print(f'Committing log files took {delta} seconds')
+        return delta

+ 165 - 0
lib/print_exc_plus.py

@@ -0,0 +1,165 @@
+import os
+import re
+import sys
+import traceback
+from itertools import islice
+from typing import Sized, Dict, Tuple
+
+from types import FrameType
+
+from lib.threading_timer_decorator import exit_after
+
+try:
+    import numpy
+except ImportError:
+    numpy = None
+
+FORMATTING_OPTIONS = {
+    'MAX_LINE_LENGTH': 1024,
+    'SHORT_LINE_THRESHOLD': 128,
+    'MAX_NEWLINES': 20,
+}
+ID = int
+
+
+# noinspection PyPep8Naming
+def name_or_str(X):
+    try:
+        return re.search(r"<class '?(.*?)'?>", str(X))[1]
+    except TypeError:  # if not found
+        return str(X)
+
+
+@exit_after(2)
+def type_string(x):
+    if numpy is not None and isinstance(x, numpy.ndarray):
+        return name_or_str(type(x)) + str(x.shape)
+    elif isinstance(x, Sized):
+        return name_or_str(type(x)) + f'({len(x)})'
+    else:
+        return name_or_str(type(x))
+
+
+@exit_after(2)
+def to_string_with_timeout(x):
+    return str(x)
+
+
+def nth_index(iterable, value, n):
+    matches = (idx for idx, val in enumerate(iterable) if val == value)
+    return next(islice(matches, n - 1, n), None)
+
+
+def print_exc_plus():
+    """
+    Print the usual traceback information, followed by a listing of all the
+    local variables in each frame.
+    """
+    limit = FORMATTING_OPTIONS['MAX_LINE_LENGTH']
+    max_newlines = FORMATTING_OPTIONS['MAX_NEWLINES']
+    tb = sys.exc_info()[2]
+    if numpy is not None:
+        options = numpy.get_printoptions()
+        numpy.set_printoptions(precision=2, edgeitems=2, floatmode='maxprec', threshold=20, linewidth=120)
+    else:
+        options = {}
+    stack = []
+    long_printed_objs: Dict[ID, Tuple[str, FrameType]] = {}
+
+    while tb:
+        stack.append(tb.tb_frame)
+        tb = tb.tb_next
+    for frame in stack:
+        if frame is not stack[0]:
+            print('-' * 40)
+        try:
+            print("Frame %s in %s at line %s" % (frame.f_code.co_name,
+                                                 os.path.relpath(frame.f_code.co_filename),
+                                                 frame.f_lineno))
+        except ValueError:  # if path is not relative
+            print("Frame %s in %s at line %s" % (frame.f_code.co_name,
+                                                 frame.f_code.co_filename,
+                                                 frame.f_lineno))
+        for key, value in frame.f_locals.items():
+            # We have to be careful not to cause a new error in our error
+            # printer! Calling str() on an unknown object could cause an
+            # error we don't want.
+
+            # noinspection PyBroadException
+            try:
+                key_string = to_string_with_timeout(key)
+            except KeyboardInterrupt:
+                key_string = "<TIMEOUT WHILE PRINTING KEY>"
+            except Exception:
+                key_string = "<ERROR WHILE PRINTING KEY>"
+
+            # noinspection PyBroadException
+            try:
+                type_as_string = type_string(value)
+            except KeyboardInterrupt:
+                type_as_string = "<TIMEOUT WHILE PRINTING TYPE>"
+            except Exception as e:
+                # noinspection PyBroadException
+                try:
+                    type_as_string = f"<{type(e).__name__} WHILE PRINTING TYPE>"
+                except Exception:
+                    type_as_string = "<ERROR WHILE PRINTING TYPE>"
+
+            if id(value) in long_printed_objs:
+                prev_key_string, prev_frame = long_printed_objs[id(value)]
+                if prev_frame is frame:
+                    print("\t%s is the same as '%s'" %
+                          (key_string + ' : ' + type_as_string,
+                           prev_key_string))
+                else:
+                    print("\t%s is the same as '%s' in frame %s in %s at line %s." %
+                          (key_string + ' : ' + type_as_string,
+                           prev_key_string,
+                           prev_frame.f_code.co_name,
+                           os.path.relpath(prev_frame.f_code.co_filename),
+                           prev_frame.f_lineno))
+                continue
+
+            # noinspection PyBroadException
+            try:
+                value_string = to_string_with_timeout(value)
+            except KeyboardInterrupt:
+                value_string = "<TIMEOUT WHILE PRINTING VALUE>"
+            except Exception:
+                value_string = "<ERROR WHILE PRINTING VALUE>"
+            line: str = '\t' + key_string + ' : ' + type_as_string + ' = ' + value_string
+            if limit is not None and len(line) > limit:
+                line = line[:limit - 1] + '...'
+            if max_newlines is not None and line.count('\n') > max_newlines:
+                line = line[:nth_index(line, '\n', max_newlines)].strip() + '... (' + str(
+                    line[nth_index(line, '\n', max_newlines):].count('\n')) + ' more lines)'
+            if len(line) > FORMATTING_OPTIONS['SHORT_LINE_THRESHOLD']:
+                long_printed_objs[id(value)] = key_string, frame
+            print(line)
+
+    traceback.print_exc()
+    if numpy is not None:
+        numpy.set_printoptions(**options)
+
+
+def main():
+    def fun1(c, d, e):
+        return fun2(c, d + e)
+
+    def fun2(g, h):
+        raise RuntimeError
+
+    def fun3(z):
+        return numpy.zeros(shape=z)
+
+    try:
+        import numpy as np
+        fun1(numpy.random.normal(size=(3, 4, 5, 6)), '12321', '123')
+        data = '???' * 100
+        fun3(data)
+    except:
+        print_exc_plus()
+
+
+if __name__ == '__main__':
+    main()

+ 194 - 0
lib/progress_bar.py

@@ -0,0 +1,194 @@
+import functools
+import math
+import time
+from math import floor
+from typing import Iterable, Sized, Iterator
+
+
+class ProgressBar(Sized, Iterable):
+    def __iter__(self) -> Iterator:
+        self.check_if_num_steps_defined()
+        self.current_iteration = -1  # start counting at the end of the first epoch
+        self.current_iterator = iter(self._backend)
+        self.start_time = time.clock()
+        return self
+
+    def __init__(self,
+                 num_steps=None,
+                 prefix='',
+                 suffix='',
+                 line_length=75,
+                 empty_char='-',
+                 fill_char='#',
+                 print_eta=True,
+                 decimals=1):
+        self.decimals = decimals
+        self.line_length = line_length
+        self.suffix = suffix
+        self.empty_char = empty_char
+        self.prefix = prefix
+        self.fill_char = fill_char
+        self.print_eta = print_eta
+        self.current_iteration = 0
+        self.last_printed_value = None
+        self.current_iterator = None
+        self.start_time = time.clock()
+
+        try:
+            self._backend = range(num_steps)
+        except TypeError:
+            if isinstance(num_steps, Sized):
+                if isinstance(num_steps, Iterable):
+                    self._backend = num_steps
+                else:
+                    self._backend = range(len(num_steps))
+            elif num_steps is None:
+                self._backend = None
+            else:
+                raise
+
+        assert num_steps is None or isinstance(self._backend, (Iterable, Sized))
+
+    def set_num_steps(self, num_steps):
+        try:
+            self._backend = range(num_steps)
+        except TypeError:
+            if isinstance(num_steps, Sized):
+                if isinstance(num_steps, Iterable):
+                    self._backend = num_steps
+                else:
+                    self._backend = range(len(num_steps))
+            elif num_steps is None:
+                self._backend = None
+            else:
+                raise
+
+        assert num_steps is None or isinstance(self._backend, (Iterable, Sized))
+
+    def __len__(self):
+        return len(self._backend)
+
+    def __next__(self):
+        self.print_progress()
+        try:
+            result = next(self.current_iterator)
+            self.increment_iteration()
+            self.print_progress()
+            return result
+        except StopIteration:
+            self.increment_iteration()
+            self.print_progress()
+            raise
+
+    def step(self, num_iterations=1):
+        self.current_iteration += num_iterations
+        self.print_progress()
+
+    def print_progress(self, iteration=None):
+        """
+        Call in a loop to create terminal progress bar
+        @params:
+            iteration   - Optional  : current iteration (Int)
+        """
+        if iteration is not None:
+            self.current_iteration = iteration
+        try:
+            progress = self.current_iteration / len(self)
+        except ZeroDivisionError:
+            progress = 1
+        if self.current_iteration == 0:
+            self.start_time = time.clock()
+        if self.print_eta and progress > 0:
+            time_spent = (time.clock() - self.start_time)
+            eta = time_spent / progress * (1 - progress)
+            if progress == 1:
+                eta = f' T = {int(time_spent / 60):02d}:{round(time_spent % 60):02d}'
+            else:
+                eta = f' ETA {int(eta / 60):02d}:{round(eta % 60):02d}'
+        else:
+            eta = ''
+        percent = ("{0:" + str(4 + self.decimals) + "." + str(self.decimals) + "f}").format(100 * progress)
+        bar_length = self.line_length - len(self.prefix) - len(self.suffix) - len(eta) - 4 - 6
+        try:
+            filled_length = int(bar_length * self.current_iteration // len(self))
+        except ZeroDivisionError:
+            filled_length = bar_length
+        if math.isclose(bar_length * progress, filled_length):
+            overflow = 0
+        else:
+            overflow = bar_length * progress - filled_length
+            overflow *= 10
+            overflow = floor(overflow)
+        assert overflow in range(10), overflow
+        if overflow > 0:
+            bar = self.fill_char * filled_length + str(overflow) + self.empty_char * (bar_length - filled_length - 1)
+        else:
+            bar = self.fill_char * filled_length + self.empty_char * (bar_length - filled_length)
+
+        print_value = '\r{0} |{1}| {2}% {4}{3}'.format(self.prefix, bar, percent, eta, self.suffix)
+        if self.current_iteration == len(self):
+            print_value += '\n'  # Print New Line on Complete
+        if self.last_printed_value == print_value:
+            return
+        self.last_printed_value = print_value
+        print(print_value, end='')
+
+    def increment_iteration(self):
+        self.current_iteration += 1
+        if self.current_iteration > len(self):  # catches the special case at the end of the bar
+            self.current_iteration %= len(self)
+
+    def monitor(self, func=None):
+        """ Decorates the given function func to print a progress bar before and after each call. """
+        if func is None:
+            # Partial application, to be able to specify extra keyword
+            # arguments in decorators
+            return functools.partial(self.monitor)
+
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            self.check_if_num_steps_defined()
+            self.print_progress()
+            result = func(*args, **kwargs)
+            self.increment_iteration()
+            self.print_progress()
+            return result
+
+        return wrapper
+
+    def check_if_num_steps_defined(self):
+        if self._backend is None:
+            raise RuntimeError('You need to specify the number of iterations before starting to iterate. '
+                               'You can either pass it to the constructor or use the method `set_num_steps`.')
+
+
+if __name__ == '__main__':
+    # Einfach beim iterieren verwenden
+    for x in ProgressBar([0.5, 2, 0.5]):
+        time.sleep(x)
+
+    # manuell aufrufen
+    data = [1, 5, 5, 6, 12, 3, 4, 5]
+    y = 0
+    p = ProgressBar(len(data))
+    for x in data:
+        p.print_progress()
+        time.sleep(0.2)
+        y += x
+        p.current_iteration += 1
+        p.print_progress()
+
+    print(y)
+
+    # oder einfach bei jedem funktionsaufruf den balken printen
+    p = ProgressBar()
+
+
+    @p.monitor
+    def heavy_computation(t=0.25):
+        time.sleep(t)
+
+
+    p.set_num_steps(10)  # 10 steps pro balken
+    for _ in range(20):  # zeichnet 2 balken
+        heavy_computation(0.25)

+ 106 - 0
lib/threading_timer_decorator.py

@@ -0,0 +1,106 @@
+from __future__ import print_function
+
+import sys
+import threading
+from time import sleep
+
+try:
+    import thread
+except ImportError:
+    import _thread as thread
+
+try:  # use code that works the same in Python 2 and 3
+    range, _print = xrange, print
+
+
+    def print(*args, **kwargs):
+        flush = kwargs.pop('flush', False)
+        _print(*args, **kwargs)
+        if flush:
+            kwargs.get('file', sys.stdout).flush()
+except NameError:
+    pass
+
+
+def cdquit(fn_name):
+    # print to stderr, unbuffered in Python 2.
+    print('{0} took too long'.format(fn_name), file=sys.stderr)
+    sys.stderr.flush()  # Python 3 stderr is likely buffered.
+    thread.interrupt_main()  # raises KeyboardInterrupt
+
+
+def exit_after(s):
+    '''
+    use as decorator to exit process if
+    function takes longer than s seconds
+    '''
+
+    def outer(fn):
+        def inner(*args, **kwargs):
+            timer = threading.Timer(s, cdquit, args=[fn.__name__])
+            timer.start()
+            try:
+                result = fn(*args, **kwargs)
+            finally:
+                timer.cancel()
+            return result
+
+        return inner
+
+    return outer
+
+
+def call_method_with_timeout(method, timeout, *args, **kwargs):
+    return exit_after(timeout)(method)(*args, **kwargs)
+
+
+@exit_after(1)
+def a():
+    print('a')
+
+
+@exit_after(2)
+def b():
+    print('b')
+    sleep(1)
+
+
+@exit_after(3)
+def c():
+    print('c')
+    sleep(2)
+
+
+@exit_after(4)
+def d():
+    print('d started')
+    for i in range(10):
+        sleep(1)
+        print(i)
+
+
+@exit_after(5)
+def countdown(n):
+    print('countdown started', flush=True)
+    for i in range(n, -1, -1):
+        print(i, end=', ', flush=True)
+        sleep(1)
+    print('countdown finished')
+
+
+def main():
+    a()
+    b()
+    c()
+    try:
+        d()
+    except KeyboardInterrupt as error:
+        print('d should not have finished, printing error as expected:')
+        print(error)
+    countdown(3)
+    countdown(10)
+    print('This should not print!!!')
+
+
+if __name__ == '__main__':
+    main()

+ 136 - 0
lib/tuned_cache.py

@@ -0,0 +1,136 @@
+import functools
+import sys
+from copy import deepcopy
+
+assert 'joblib' not in sys.modules, 'Import tuned cache before joblib'
+
+# noinspection PyProtectedMember,PyPep8
+import joblib
+# noinspection PyProtectedMember,PyPep8
+from joblib._compat import PY3_OR_LATER
+# noinspection PyProtectedMember,PyPep8
+from joblib.func_inspect import _clean_win_chars
+# noinspection PyProtectedMember,PyPep8
+from joblib.memory import MemorizedFunc, _FUNCTION_HASHES, NotMemorizedFunc, Memory
+
+_FUNC_NAMES = {}
+
+
+# noinspection SpellCheckingInspection
+class TunedMemory(Memory):
+    def cache(self, func=None, ignore=None, verbose=None, mmap_mode=False):
+        """ Decorates the given function func to only compute its return
+            value for input arguments not cached on disk.
+
+            Parameters
+            ----------
+            func: callable, optional
+                The function to be decorated
+            ignore: list of strings
+                A list of arguments name to ignore in the hashing
+            verbose: integer, optional
+                The verbosity mode of the function. By default that
+                of the memory object is used.
+            mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
+                The memmapping mode used when loading from cache
+                numpy arrays. See numpy.load for the meaning of the
+                arguments. By default that of the memory object is used.
+
+            Returns
+            -------
+            decorated_func: MemorizedFunc object
+                The returned object is a MemorizedFunc object, that is
+                callable (behaves like a function), but offers extra
+                methods for cache lookup and management. See the
+                documentation for :class:`joblib.memory.MemorizedFunc`.
+        """
+        if func is None:
+            # Partial application, to be able to specify extra keyword
+            # arguments in decorators
+            return functools.partial(self.cache, ignore=ignore,
+                                     verbose=verbose, mmap_mode=mmap_mode)
+        if self.store_backend is None:
+            return NotMemorizedFunc(func)
+        if verbose is None:
+            verbose = self._verbose
+        if mmap_mode is False:
+            mmap_mode = self.mmap_mode
+        if isinstance(func, TunedMemorizedFunc):
+            func = func.func
+        return TunedMemorizedFunc(func, location=self.store_backend,
+                                  backend=self.backend,
+                                  ignore=ignore, mmap_mode=mmap_mode,
+                                  compress=self.compress,
+                                  verbose=verbose, timestamp=self.timestamp)
+
+
+class TunedMemorizedFunc(MemorizedFunc):
+    def __call__(self, *args, **kwargs):
+        # Also store in the in-memory store of function hashes
+        if self.func not in _FUNCTION_HASHES:
+            if PY3_OR_LATER:
+                is_named_callable = (hasattr(self.func, '__name__') and
+                                     self.func.__name__ != '<lambda>')
+            else:
+                is_named_callable = (hasattr(self.func, 'func_name') and
+                                     self.func.func_name != '<lambda>')
+            if is_named_callable:
+                # Don't do this for lambda functions or strange callable
+                # objects, as it ends up being too fragile
+                func_hash = self._hash_func()
+                try:
+                    _FUNCTION_HASHES[self.func] = func_hash
+                except TypeError:
+                    # Some callable are not hashable
+                    pass
+
+        # return same result as before
+        return MemorizedFunc.__call__(self, *args, **kwargs)
+
+
+old_get_func_name = joblib.func_inspect.get_func_name
+
+
+def tuned_get_func_name(func, resolv_alias=True, win_characters=True):
+    if (func, resolv_alias, win_characters) not in _FUNC_NAMES:
+        _FUNC_NAMES[(func, resolv_alias, win_characters)] = old_get_func_name(func, resolv_alias, win_characters)
+
+        if len(_FUNC_NAMES) > 1000:
+            # keep cache small and fast
+            for idx, k in enumerate(_FUNC_NAMES.keys()):
+                if idx % 2:
+                    del _FUNC_NAMES[k]
+        # print('cache size ', len(_FUNC_NAMES))
+
+    return deepcopy(_FUNC_NAMES[(func, resolv_alias, win_characters)])
+
+
+joblib.func_inspect.get_func_name = tuned_get_func_name
+joblib.memory.get_func_name = tuned_get_func_name
+
+
+def main():
+    class A:
+        test_cache = TunedMemory('.cache/test_cache', verbose=1)
+
+        def __init__(self, a):
+            self.a = a
+            self.compute = self.test_cache.cache(self.compute)
+
+        def compute(self):
+            return self.a + 1
+
+    a1, a2 = A(2), A(2)
+    print(a1.compute())
+    print('---')
+    print(a2.compute())
+    print('---')
+    a1.a = 3
+    print(a1.compute())
+    print('---')
+    print(a2.compute())
+    print('---')
+
+
+if __name__ == '__main__':
+    main()

+ 0 - 0
notes/.keep


+ 26 - 0
routes.py

@@ -0,0 +1,26 @@
+# for example host:port/json/create_game is a valid route if using the POST method
+valid_post_routes = {
+    'login',
+    'register',
+    'depot',
+    'activate_key',
+    'order',
+    'orders',
+    'news',
+    'trades',
+    'trades_on',
+    'orders_on',
+    'old_orders',
+    'cancel_order',
+    'leaderboard',
+    'tradables',
+    'gift',
+    'change_password'
+}
+
+push_message_types = set()
+
+upload_filtered = set()  # TODO enable upload filter again when accuracy improves
+
+assert len(set(valid_post_routes)) == len(valid_post_routes)
+assert upload_filtered.issubset(valid_post_routes)

+ 340 - 50
run_server.py

@@ -1,66 +1,356 @@
-import sqlite3
+import datetime
+import json
+import os
+import random
+import re
+import sys
 import time
+from json import JSONDecodeError
+from logging import INFO
+from threading import Thread
+from typing import Dict, Any
 
-from bottle import run, response, route, redirect
+import bottle
+# noinspection PyUnresolvedReferences
+from bottle.ext.websocket import GeventWebSocketServer
+# noinspection PyUnresolvedReferences
+from bottle.ext.websocket import websocket
+from gevent import threading
+from gevent.queue import Queue, Empty
+from gevent.threading import Lock
+from geventwebsocket import WebSocketError
+from geventwebsocket.websocket import WebSocket
 
 import connection
 import model
 import server_controller
-import trading_bot
+from application import ROOT_URL, COPYRIGHT_INFRINGEMENT_PROBABILITY, DB_NAME, logger
+from connection import HttpError
 from debug import debug
-from server_controller import not_found
+from lib.print_exc_plus import print_exc_plus
+from lib.threading_timer_decorator import exit_after
+from routes import valid_post_routes, upload_filtered
+from util import round_to_n, rename, profile_wall_time_instead_if_profiling
+
+FRONTEND_RELATIVE_PATH = '../frontend'
+
+profile_wall_time_instead_if_profiling()
+request_lock = Lock()  # locked until the response to the request is computed
+db_commit_threads = Queue()
+if debug:
+    TIMEOUT = 600
+else:
+    TIMEOUT = 10
+
+assert all(getattr(server_controller, route) for route in valid_post_routes)
+
+
+def reset_global_variables():
+    model.current_connection = None
+    model.current_cursor = None
+    model.current_db_name = None
+    model.current_user_id = None
+    del connection.push_message_queue[:]
+    bottle.response.status = 500
+
+
+@exit_after(TIMEOUT)
+def call_controller_method_with_timeout(method, json_request: Dict[str, Any]):
+    return method(json_request)
+
+
+def _process(path, json_request):
+    start = time.clock()
+    path = path.strip().lower()
+    bottle.response.content_type = 'application/json; charset=latin-1'
+    reset_global_variables()
+    original_request = None
+    # noinspection PyBroadException
+    try:
+        json_request = json_request()
+        original_request = json_request
+        logger.log(path, INFO, message_type='handling_http_request', data=json.dumps({
+            'request': json_request,
+            'start': start,
+        }))
+        if json_request is None:
+            bottle.response.status = 400
+            resp = connection.BadRequest('Only json allowed.')
+        elif path not in valid_post_routes:
+            print('Processing time:', time.clock() - start)
+            resp = connection.NotFound('URL not available')
+        else:
+            model.connect(DB_NAME, create_if_not_exists=True)
+            method_to_call = getattr(server_controller, path)
+            try:
+                resp = call_controller_method_with_timeout(method_to_call, json_request)
+                raise connection.Success(resp)
+            except HttpError as e:
+                bottle.response.status = e.code
+                resp = e
+        if not isinstance(resp.body, dict):
+            raise TypeError('The response body should always be a dict')
+        if resp.code // 100 == 2 and path in upload_filtered and random.random() < COPYRIGHT_INFRINGEMENT_PROBABILITY:
+            resp = connection.UnavailableForLegalReasons('An upload filter detected a copyright infringement. '
+                                                         'If you think this is an error, please try again.')
+        bottle.response.status = resp.code
+        if model.current_connection is not None:
+            if bottle.response.status_code == 200:
+                thread = Thread(target=finish_request, args=[], kwargs={'success': True}, daemon=False)
+            else:
+                thread = Thread(target=finish_request, args=[], kwargs={'success': False}, daemon=False)
+            db_commit_threads.put(thread)
+            thread.start()
+        print('route=' + path, 't=' + str(round_to_n(time.clock() - start, 4)) + 's,',
+              'db=' + str(model.current_db_name))
+        logger.log(path, INFO, message_type='http_request_finished', data=json.dumps({
+            'request': json_request,
+            'response': resp.body,
+            'status': resp.code,
+            'start': start,
+            'end': time.clock(),
+        }))
+        return resp.body
+    except JSONDecodeError:
+        return handle_error('Unable to decode JSON', path, start, original_request)
+    except NotImplementedError:
+        return handle_error('This feature has not been fully implemented yet.', path, start, original_request)
+    except KeyboardInterrupt:
+        if time.clock() - start > TIMEOUT:
+            return handle_error('Processing timeout', path, start, original_request)
+        else:
+            raise
+    except Exception:
+        return handle_error('Unknown error', path, start, original_request)
+
+
+def finish_request(success):
+    if success:
+        model.current_connection.commit()
+        connection.push_messages_in_queue()
+    else:
+        model.current_connection.rollback()
+
 
 if __name__ == '__main__':
     print('sqlite3.version', model.db.version)
-    model.connect()
-
-    valid_routes = ['login',
-                    'register',
-                    'depot',
-                    'activate_key',
-                    'order', 'orders',
-                    'news',
-                    'trades',
-                    'trades_on',
-                    'orders_on',
-                    'old_orders',
-                    'cancel_order',
-                    'leaderboard',
-                    'tradables',
-                    'gift',
-                    'change_password']
-
-
-    @route('/<path>', method='POST')
+    if debug:
+        print('Running server in debug mode...')
+
+    print('Preparing backend API...')
+
+
+    @bottle.route('/json/<path>', method='POST')
     def process(path):
-        start = time.clock()
-        path = path.strip().lower()
-        if path not in valid_routes:
-            print('Processing time:', time.clock() - start)
-            return not_found()
-        response.content_type = 'application/json'
-        method_to_call = getattr(server_controller, path)
-        try:
-            expired_orders = model.drop_expired_orders()
-            trading_bot.notify_expired_orders(expired_orders)
-            resp = method_to_call()
-            if response.status_code == 200:
-                model.connection.commit()
+        with request_lock:
+            wait_for_db_commit_threads()
+            return _process(path, lambda: bottle.request.json)
+
+
+    def wait_for_db_commit_threads():
+        while len(db_commit_threads) > 0:
+            try:
+                t = db_commit_threads.get()
+            except Empty:
+                break
+            t.join()
+
+
+    print('Preparing index page...')
+
+
+    @bottle.route('/', method='GET')
+    def index():
+        if ROOT_URL != '/':
+            bottle.redirect(ROOT_URL)
+
+
+    def handle_error(message, path, start, request, status=500):
+        bottle.response.status = status
+        print_exc_plus()
+        if model.current_connection is not None:
+            model.current_connection.rollback()
+        print('route=' + str(path), 't=' + str(round_to_n(time.clock() - start, 4)) + 's,',
+              'db=' + str(model.current_db_name))
+        logger.exception(path, message_type='http_request', data=json.dumps({
+            'status': status,
+            'start': start,
+            'end': time.clock(),
+            'exception': str(sys.exc_info()),
+            'request': request,
+        }))
+        return connection.InternalServerError(message).body
+
+
+    print('Preparing websocket connections...')
+
+
+    @bottle.get('/websocket', apply=[websocket])
+    def websocket(ws: WebSocket):
+        print('websocket connection', *ws.handler.client_address, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
+
+        while True:
+            start = time.clock()
+            path = None
+            request_token = None
+            outer_json = None
+
+            # noinspection PyBroadException
+            try:
+                if ws.closed:
+                    connection.ws_cleanup(ws)
+                    break
+                try:
+                    msg = ws.read_message()
+                except ConnectionResetError:
+                    msg = None
+                except WebSocketError as e:
+                    if e.args[0] == 'Unexpected EOF while decoding header':
+                        msg = None
+                    else:
+                        raise
+
+                if msg is not None:  # received some message
+                    with request_lock:
+                        wait_for_db_commit_threads()
+                        msg = bytes(msg)
+                        outer_json = None
+                        outer_json = bottle.json_loads(msg)
+                        path = outer_json['route']
+                        inner_json = outer_json['body']
+                        request_token = outer_json['request_token']
+                        inner_result_json = _process(path, lambda: inner_json)
+
+                        if 'error' in inner_result_json:
+                            status_code = int(inner_result_json['error'][:3])
+                        else:
+                            status_code = 200
+
+                        if model.current_user_id is not None and status_code == 200:
+                            # if there is a user_id involved, associate it with this websocket
+                            user_id = (model.current_db_name, model.current_user_id)
+
+                            if user_id in connection.websockets_for_user:
+                                if ws not in connection.websockets_for_user[user_id]:
+                                    connection.websockets_for_user[user_id].append(ws)
+                            else:
+                                connection.websockets_for_user[user_id] = [ws]
+                            if ws in connection.users_for_websocket:
+                                if user_id not in connection.users_for_websocket[ws]:
+                                    connection.users_for_websocket[ws].append(user_id)
+                            else:
+                                connection.users_for_websocket[ws] = [user_id]
+
+                        outer_result_json = {
+                            'body': inner_result_json,
+                            'http_status_code': status_code,
+                            'request_token': request_token
+                        }
+                        outer_result_json = json.dumps(outer_result_json)
+                        if ws.closed:
+                            connection.ws_cleanup(ws)
+                            break
+                        ws.send(outer_result_json)
+                        print('websocket message',
+                              *ws.handler.client_address,
+                              datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
+                              status_code,
+                              len(outer_result_json))
+                else:
+                    connection.ws_cleanup(ws)
+                    break
+            except JSONDecodeError:
+                inner_result_json = handle_error('Unable to decode outer JSON', path, start, outer_json)
+                status_code = 403
+                inner_result_json['http_status_code'] = status_code
+                if request_token is not None:
+                    inner_result_json['request_token'] = request_token
+                inner_result_json = json.dumps(inner_result_json)
+                if ws.closed:
+                    connection.ws_cleanup(ws)
+                    break
+                ws.send(inner_result_json)
+                print('websocket message',
+                      *ws.handler.client_address,
+                      datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
+                      status_code,
+                      len(inner_result_json))
+            except Exception:
+                inner_result_json = handle_error('Unknown error', path, start, outer_json)
+                status_code = 500
+                inner_result_json['http_status_code'] = status_code
+                if request_token is not None:
+                    inner_result_json['request_token'] = request_token
+                inner_result_json = json.dumps(inner_result_json)
+                if ws.closed:
+                    connection.ws_cleanup(ws)
+                    break
+                ws.send(inner_result_json)
+                print('websocket message',
+                      *ws.handler.client_address,
+                      datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
+                      status_code,
+                      len(inner_result_json))
+
+
+    def _serve_static_directory(route, root, download=False):
+        method_name = ''.join(c for c in root if re.match(r'[A-Za-z]]', c))
+        assert method_name not in globals()
+
+        @bottle.route(route, method=['GET', 'OPTIONS'])
+        @rename(''.join(c for c in root if re.match(r'[A-Za-z]]', c)))
+        def serve_static_file(filename):
+            # start = time.clock()
+            # logger.log(filename, INFO, message_type='handling_http_request', data=json.dumps({
+            #     'start': start,
+            # }))
+            # try:
+            if filename == 'api.json':
+                return {'endpoint': bottle.request.urlparts[0] + '://' + bottle.request.urlparts[1] + '/json/'}
+            if download:
+                default_name = 'ytm-' + filename
+                return bottle.static_file(filename, root=root, download=default_name)
             else:
-                model.connection.rollback()
-            print('Processing time:', time.clock() - start)
-            return resp
-        except sqlite3.IntegrityError as e:
-            print(e)
-            model.connection.rollback()
-            print('Processing time:', time.clock() - start)
-            return server_controller.internal_server_error('Action violates database constraints.')
+                return bottle.static_file(filename, root=root, download=False)
+            # finally:
+            #     logger.log(filename, INFO, message_type='http_request_finished', data=json.dumps({
+            #         'status': bottle.response.status_code,
+            #         'start': start,
+            #         'end': time.clock(),
+            #     }))
+
+
+    # frontend
+    print('Preparing frontend directories...')
+    for subdir, dirs, files in os.walk(FRONTEND_RELATIVE_PATH):
+        # subdir now has the form   ../frontend/config
+        _serve_static_directory(
+            route=subdir.replace('\\', '/').replace(FRONTEND_RELATIVE_PATH, '') + '/<filename>',
+            root=subdir
+        )
 
+    # app
+    print('Preparing app for download...')
+    _serve_static_directory(
+        route='/app/<filename>',
+        root='../android/app/release',
+        download=True,
+    )
 
-    @route('/', method='GET')
-    def process():
-        redirect('http://koljastrohm-games.com/downloads/orderer_installer.zip')
+    logger.log('Server start', INFO, 'server_start', json.dumps({
+        'host': '0.0.0.0',
+        'port': connection.PORT,
+        'debug': debug,
+    }))
 
+    # commit regularly
+    log_commit_time = logger.commit()
+    log_commit_delay = 15
+    print(f'Committing logfile transaction took {log_commit_time}s, '
+          f'scheduling to run every {log_commit_delay}s')
+    threading.Timer(log_commit_delay, logger.commit).start()
 
-    run(host='0.0.0.0', port=connection.port, debug=debug)
-    model.connection.close()
+    print('Running server...')
+    bottle.run(host='0.0.0.0', port=connection.PORT, debug=debug, server=GeventWebSocketServer)
+    logger.commit()
+    model.cleanup()

+ 0 - 0
test/.keep