changeset 6015:968f2d4214e8

db: remove SQLAlchemy Migrate library and RhodeCode migration scripts It is no longer used or referenced.
author Søren Løvborg <sorenl@unity3d.com>
date Wed, 18 May 2016 14:34:07 +0200
parents c7ef77ab2f95
children c436f337e253
files LICENSE.md MANIFEST.in kallithea/lib/dbmigrate/__init__.py kallithea/lib/dbmigrate/migrate.cfg kallithea/lib/dbmigrate/migrate/__init__.py kallithea/lib/dbmigrate/migrate/changeset/__init__.py kallithea/lib/dbmigrate/migrate/changeset/ansisql.py kallithea/lib/dbmigrate/migrate/changeset/constraint.py kallithea/lib/dbmigrate/migrate/changeset/databases/__init__.py kallithea/lib/dbmigrate/migrate/changeset/databases/firebird.py kallithea/lib/dbmigrate/migrate/changeset/databases/mysql.py kallithea/lib/dbmigrate/migrate/changeset/databases/oracle.py kallithea/lib/dbmigrate/migrate/changeset/databases/postgres.py kallithea/lib/dbmigrate/migrate/changeset/databases/sqlite.py kallithea/lib/dbmigrate/migrate/changeset/databases/visitor.py kallithea/lib/dbmigrate/migrate/changeset/schema.py kallithea/lib/dbmigrate/migrate/exceptions.py kallithea/lib/dbmigrate/migrate/versioning/__init__.py kallithea/lib/dbmigrate/migrate/versioning/api.py kallithea/lib/dbmigrate/migrate/versioning/cfgparse.py kallithea/lib/dbmigrate/migrate/versioning/config.py kallithea/lib/dbmigrate/migrate/versioning/genmodel.py kallithea/lib/dbmigrate/migrate/versioning/migrate_repository.py kallithea/lib/dbmigrate/migrate/versioning/pathed.py kallithea/lib/dbmigrate/migrate/versioning/repository.py kallithea/lib/dbmigrate/migrate/versioning/schema.py kallithea/lib/dbmigrate/migrate/versioning/schemadiff.py kallithea/lib/dbmigrate/migrate/versioning/script/__init__.py kallithea/lib/dbmigrate/migrate/versioning/script/base.py kallithea/lib/dbmigrate/migrate/versioning/script/py.py kallithea/lib/dbmigrate/migrate/versioning/script/sql.py kallithea/lib/dbmigrate/migrate/versioning/shell.py kallithea/lib/dbmigrate/migrate/versioning/template.py kallithea/lib/dbmigrate/migrate/versioning/templates/__init__.py kallithea/lib/dbmigrate/migrate/versioning/templates/manage.py_tmpl kallithea/lib/dbmigrate/migrate/versioning/templates/manage/default.py_tmpl kallithea/lib/dbmigrate/migrate/versioning/templates/manage/pylons.py_tmpl kallithea/lib/dbmigrate/migrate/versioning/templates/repository/__init__.py kallithea/lib/dbmigrate/migrate/versioning/templates/repository/default/README kallithea/lib/dbmigrate/migrate/versioning/templates/repository/default/__init__.py kallithea/lib/dbmigrate/migrate/versioning/templates/repository/default/migrate.cfg kallithea/lib/dbmigrate/migrate/versioning/templates/repository/default/versions/__init__.py kallithea/lib/dbmigrate/migrate/versioning/templates/repository/pylons/README kallithea/lib/dbmigrate/migrate/versioning/templates/repository/pylons/__init__.py kallithea/lib/dbmigrate/migrate/versioning/templates/repository/pylons/migrate.cfg kallithea/lib/dbmigrate/migrate/versioning/templates/repository/pylons/versions/__init__.py kallithea/lib/dbmigrate/migrate/versioning/templates/script/__init__.py kallithea/lib/dbmigrate/migrate/versioning/templates/script/default.py_tmpl kallithea/lib/dbmigrate/migrate/versioning/templates/script/pylons.py_tmpl kallithea/lib/dbmigrate/migrate/versioning/templates/sql_script/default.py_tmpl kallithea/lib/dbmigrate/migrate/versioning/templates/sql_script/pylons.py_tmpl kallithea/lib/dbmigrate/migrate/versioning/util/__init__.py kallithea/lib/dbmigrate/migrate/versioning/util/importpath.py kallithea/lib/dbmigrate/migrate/versioning/util/keyedinstance.py kallithea/lib/dbmigrate/migrate/versioning/version.py kallithea/lib/dbmigrate/schema/__init__.py kallithea/lib/dbmigrate/schema/db_1_1_0.py kallithea/lib/dbmigrate/schema/db_1_2_0.py kallithea/lib/dbmigrate/schema/db_1_3_0.py kallithea/lib/dbmigrate/schema/db_1_4_0.py kallithea/lib/dbmigrate/schema/db_1_5_0.py kallithea/lib/dbmigrate/schema/db_1_5_2.py kallithea/lib/dbmigrate/schema/db_1_6_0.py kallithea/lib/dbmigrate/schema/db_1_7_0.py kallithea/lib/dbmigrate/schema/db_1_8_0.py kallithea/lib/dbmigrate/schema/db_2_0_0.py kallithea/lib/dbmigrate/schema/db_2_0_1.py kallithea/lib/dbmigrate/schema/db_2_0_2.py kallithea/lib/dbmigrate/schema/db_2_1_0.py kallithea/lib/dbmigrate/schema/db_2_2_0.py kallithea/lib/dbmigrate/schema/db_2_2_3.py kallithea/lib/dbmigrate/versions/001_initial_release.py kallithea/lib/dbmigrate/versions/002_version_1_1_0.py kallithea/lib/dbmigrate/versions/003_version_1_2_0.py kallithea/lib/dbmigrate/versions/004_version_1_3_0.py kallithea/lib/dbmigrate/versions/005_version_1_3_0.py kallithea/lib/dbmigrate/versions/006_version_1_4_0.py kallithea/lib/dbmigrate/versions/007_version_1_4_0.py kallithea/lib/dbmigrate/versions/008_version_1_5_0.py kallithea/lib/dbmigrate/versions/009_version_1_5_1.py kallithea/lib/dbmigrate/versions/010_version_1_5_2.py kallithea/lib/dbmigrate/versions/011_version_1_6_0.py kallithea/lib/dbmigrate/versions/012_version_1_7_0.py kallithea/lib/dbmigrate/versions/013_version_1_7_0.py kallithea/lib/dbmigrate/versions/014_version_1_7_1.py kallithea/lib/dbmigrate/versions/015_version_1_8_0.py kallithea/lib/dbmigrate/versions/016_version_2_0_0.py kallithea/lib/dbmigrate/versions/017_version_2_0_0.py kallithea/lib/dbmigrate/versions/018_version_2_0_0.py kallithea/lib/dbmigrate/versions/019_version_2_0_0.py kallithea/lib/dbmigrate/versions/020_version_2_0_1.py kallithea/lib/dbmigrate/versions/021_version_2_0_2.py kallithea/lib/dbmigrate/versions/022_version_2_0_2.py kallithea/lib/dbmigrate/versions/023_version_2_1_0.py kallithea/lib/dbmigrate/versions/024_version_2_1_0.py kallithea/lib/dbmigrate/versions/025_version_2_1_0.py kallithea/lib/dbmigrate/versions/026_version_2_2_0.py kallithea/lib/dbmigrate/versions/027_version_2_2_0.py kallithea/lib/dbmigrate/versions/028_version_2_2_3.py kallithea/lib/dbmigrate/versions/029_version_2_2_3.py kallithea/lib/dbmigrate/versions/030_version_2_2_3.py kallithea/lib/dbmigrate/versions/031_version_2_2_3.py kallithea/lib/dbmigrate/versions/__init__.py
diffstat 94 files changed, 0 insertions(+), 35924 deletions(-) [+]
line wrap: on
line diff
--- a/LICENSE.md	Mon Jul 18 14:08:43 2016 +0200
+++ b/LICENSE.md	Wed May 18 14:34:07 2016 +0200
@@ -255,19 +255,6 @@
 
 
 
-Migrate
--------
-
-Kallithea incorporates in kallithea/lib/dbmigrate/migrate parts of the Python
-system called [Migrate or sqlalchemy-migrate](https://github.com/stackforge/sqlalchemy-migrate),
-which is:
-
-Copyright (c) 2009 Evan Rosson, Jan Dittberner, Domen Kožar
-
-and licensed under the MIT-permissive license, which is
-[included in this distribution](MIT-Permissive-License.txt).
-
-
 Icon fonts
 ----------
 
--- a/MANIFEST.in	Mon Jul 18 14:08:43 2016 +0200
+++ b/MANIFEST.in	Wed May 18 14:34:07 2016 +0200
@@ -12,7 +12,6 @@
 include           kallithea/bin/template.ini.mako
 include           kallithea/config/deployment.ini_tmpl
 recursive-include kallithea/i18n *
-recursive-include kallithea/lib/dbmigrate *.py_tmpl README migrate.cfg
 recursive-include kallithea/public *
 recursive-include kallithea/templates *
 recursive-include kallithea/tests/fixtures *
--- a/kallithea/lib/dbmigrate/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Database migration modules
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Dec 11, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import logging
-
-from kallithea.lib.utils import BasePasterCommand, Command, add_cache
-from kallithea.lib.db_manage import DbManage
-
-log = logging.getLogger(__name__)
-
-
-class UpgradeDb(BasePasterCommand):
-    """Command used for paster to upgrade our database to newer version
-    """
-
-    max_args = 1
-    min_args = 1
-
-    usage = "CONFIG_FILE"
-    summary = "Upgrades current db to newer version"
-    group_name = "Kallithea"
-
-    parser = Command.standard_parser(verbose=True)
-
-    def command(self):
-        from pylons import config
-        add_cache(config)
-
-        db_uri = config['sqlalchemy.db1.url']
-        dbmanage = DbManage(log_sql=True, dbconf=db_uri,
-                            root=config['here'], tests=False,
-                            cli_args=self.options.__dict__)
-        dbmanage.upgrade()
-
-    def update_parser(self):
-        self.parser.add_option('--sql',
-                      action='store_true',
-                      dest='just_sql',
-                      help="Prints upgrade sql for further investigation",
-                      default=False)
-
-        self.parser.add_option('--force-yes',
-                           action='store_true',
-                           dest='force_ask',
-                           default=None,
-                           help='Force yes to every question')
-        self.parser.add_option('--force-no',
-                           action='store_false',
-                           dest='force_ask',
-                           default=None,
-                           help='Force no to every question')
--- a/kallithea/lib/dbmigrate/migrate.cfg	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-[db_settings]
-# Used to identify which repository this database is versioned under.
-# You can use the name of your project.
-repository_id=kallithea_db_migrations
-
-# The name of the database table used to track the schema version.
-# This name shouldn't already be used by your project.
-# If this is changed once a database is under version control, you'll need to
-# change the table name in each database too.
-version_table=db_migrate_version
-
-# When committing a change script, Migrate will attempt to generate the
-# sql for all supported databases; normally, if one of them fails - probably
-# because you don't have that database installed - it is ignored and the
-# commit continues, perhaps ending successfully.
-# Databases in this list MUST compile successfully during a commit, or the
-# entire commit will fail. List the databases your application will actually
-# be using to ensure your updates to that database work properly.
-# This must be a list; example: ['postgres','sqlite']
-required_dbs=['sqlite']
--- a/kallithea/lib/dbmigrate/migrate/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-"""
-   SQLAlchemy migrate provides two APIs :mod:`migrate.versioning` for
-   database schema version and repository management and
-   :mod:`migrate.changeset` that allows to define database schema changes
-   using Python.
-"""
-
-from kallithea.lib.dbmigrate.migrate.versioning import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-__version__ = '0.7.3.dev'
--- a/kallithea/lib/dbmigrate/migrate/changeset/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-"""
-   This module extends SQLAlchemy and provides additional DDL [#]_
-   support.
-
-   .. [#] SQL Data Definition Language
-"""
-import re
-import warnings
-
-import sqlalchemy
-from sqlalchemy import __version__ as _sa_version
-
-warnings.simplefilter('always', DeprecationWarning)
-
-_sa_version = tuple(int(re.match("\d+", x).group(0))
-                    for x in _sa_version.split("."))
-SQLA_06 = _sa_version >= (0, 6)
-SQLA_07 = _sa_version >= (0, 7)
-
-del re
-del _sa_version
-
-from kallithea.lib.dbmigrate.migrate.changeset.schema import *
-from kallithea.lib.dbmigrate.migrate.changeset.constraint import *
-
-sqlalchemy.schema.Table.__bases__ += (ChangesetTable,)
-sqlalchemy.schema.Column.__bases__ += (ChangesetColumn,)
-sqlalchemy.schema.Index.__bases__ += (ChangesetIndex,)
-
-sqlalchemy.schema.DefaultClause.__bases__ += (ChangesetDefaultClause,)
--- a/kallithea/lib/dbmigrate/migrate/changeset/ansisql.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,293 +0,0 @@
-"""
-   Extensions to SQLAlchemy for altering existing tables.
-
-   At the moment, this isn't so much based off of ANSI as much as
-   things that just happen to work with multiple databases.
-"""
-import StringIO
-
-import sqlalchemy as sa
-from sqlalchemy.schema import SchemaVisitor
-from sqlalchemy.engine.default import DefaultDialect
-from sqlalchemy.sql import ClauseElement
-from sqlalchemy.schema import (ForeignKeyConstraint,
-                               PrimaryKeyConstraint,
-                               CheckConstraint,
-                               UniqueConstraint,
-                               Index)
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.changeset import constraint
-
-from sqlalchemy.schema import AddConstraint, DropConstraint
-from sqlalchemy.sql.compiler import DDLCompiler
-SchemaGenerator = SchemaDropper = DDLCompiler
-
-
-class AlterTableVisitor(SchemaVisitor):
-    """Common operations for ``ALTER TABLE`` statements."""
-
-    # engine.Compiler looks for .statement
-    # when it spawns off a new compiler
-    statement = ClauseElement()
-
-    def append(self, s):
-        """Append content to the SchemaIterator's query buffer."""
-
-        self.buffer.write(s)
-
-    def execute(self):
-        """Execute the contents of the SchemaIterator's buffer."""
-        try:
-            return self.connection.execute(self.buffer.getvalue())
-        finally:
-            self.buffer.truncate(0)
-
-    def __init__(self, dialect, connection, **kw):
-        self.connection = connection
-        self.buffer = StringIO.StringIO()
-        self.preparer = dialect.identifier_preparer
-        self.dialect = dialect
-
-    def traverse_single(self, elem):
-        ret = super(AlterTableVisitor, self).traverse_single(elem)
-        if ret:
-            # adapt to 0.6 which uses a string-returning
-            # object
-            self.append(" %s" % ret)
-
-    def _to_table(self, param):
-        """Returns the table object for the given param object."""
-        if isinstance(param, (sa.Column, sa.Index, sa.schema.Constraint)):
-            ret = param.table
-        else:
-            ret = param
-        return ret
-
-    def start_alter_table(self, param):
-        """Returns the start of an ``ALTER TABLE`` SQL-Statement.
-
-        Use the param object to determine the table name and use it
-        for building the SQL statement.
-
-        :param param: object to determine the table from
-        :type param: :class:`sqlalchemy.Column`, :class:`sqlalchemy.Index`,
-          :class:`sqlalchemy.schema.Constraint`, :class:`sqlalchemy.Table`,
-          or string (table name)
-        """
-        table = self._to_table(param)
-        self.append('\nALTER TABLE %s ' % self.preparer.format_table(table))
-        return table
-
-
-class ANSIColumnGenerator(AlterTableVisitor, SchemaGenerator):
-    """Extends ansisql generator for column creation (alter table add col)"""
-
-    def visit_column(self, column):
-        """Create a column (table already exists).
-
-        :param column: column object
-        :type column: :class:`sqlalchemy.Column` instance
-        """
-        if column.default is not None:
-            self.traverse_single(column.default)
-
-        table = self.start_alter_table(column)
-        self.append("ADD ")
-
-        self.append(self.get_column_specification(column))
-
-        for cons in column.constraints:
-            self.traverse_single(cons)
-        self.execute()
-
-        # ALTER TABLE STATEMENTS
-
-        # add indexes and unique constraints
-        if column.index_name:
-            Index(column.index_name,column).create()
-        elif column.unique_name:
-            constraint.UniqueConstraint(column,
-                                        name=column.unique_name).create()
-
-        # SA bounds FK constraints to table, add manually
-        for fk in column.foreign_keys:
-            self.add_foreignkey(fk.constraint)
-
-        # add primary key constraint if needed
-        if column.primary_key_name:
-            cons = constraint.PrimaryKeyConstraint(column,
-                                                   name=column.primary_key_name)
-            cons.create()
-
-    def add_foreignkey(self, fk):
-        self.connection.execute(AddConstraint(fk))
-
-class ANSIColumnDropper(AlterTableVisitor, SchemaDropper):
-    """Extends ANSI SQL dropper for column dropping (``ALTER TABLE
-    DROP COLUMN``).
-    """
-
-    def visit_column(self, column):
-        """Drop a column from its table.
-
-        :param column: the column object
-        :type column: :class:`sqlalchemy.Column`
-        """
-        table = self.start_alter_table(column)
-        self.append('DROP COLUMN %s' % self.preparer.format_column(column))
-        self.execute()
-
-
-class ANSISchemaChanger(AlterTableVisitor, SchemaGenerator):
-    """Manages changes to existing schema elements.
-
-    Note that columns are schema elements; ``ALTER TABLE ADD COLUMN``
-    is in SchemaGenerator.
-
-    All items may be renamed. Columns can also have many of their properties -
-    type, for example - changed.
-
-    Each function is passed a tuple, containing (object, name); where
-    object is a type of object you'd expect for that function
-    (ie. table for visit_table) and name is the object's new
-    name. NONE means the name is unchanged.
-    """
-
-    def visit_table(self, table):
-        """Rename a table. Other ops aren't supported."""
-        self.start_alter_table(table)
-        self.append("RENAME TO %s" % self.preparer.quote(table.new_name,
-                                                         table.quote))
-        self.execute()
-
-    def visit_index(self, index):
-        """Rename an index"""
-        if hasattr(self, '_validate_identifier'):
-            # SA <= 0.6.3
-            self.append("ALTER INDEX %s RENAME TO %s" % (
-                    self.preparer.quote(
-                        self._validate_identifier(
-                            index.name, True), index.quote),
-                    self.preparer.quote(
-                        self._validate_identifier(
-                            index.new_name, True), index.quote)))
-        else:
-            # SA >= 0.6.5
-            self.append("ALTER INDEX %s RENAME TO %s" % (
-                    self.preparer.quote(
-                        self._index_identifier(
-                            index.name), index.quote),
-                    self.preparer.quote(
-                        self._index_identifier(
-                            index.new_name), index.quote)))
-        self.execute()
-
-    def visit_column(self, delta):
-        """Rename/change a column."""
-        # ALTER COLUMN is implemented as several ALTER statements
-        keys = delta.keys()
-        if 'type' in keys:
-            self._run_subvisit(delta, self._visit_column_type)
-        if 'nullable' in keys:
-            self._run_subvisit(delta, self._visit_column_nullable)
-        if 'server_default' in keys:
-            # Skip 'default': only handle server-side defaults, others
-            # are managed by the app, not the db.
-            self._run_subvisit(delta, self._visit_column_default)
-        if 'name' in keys:
-            self._run_subvisit(delta, self._visit_column_name, start_alter=False)
-
-    def _run_subvisit(self, delta, func, start_alter=True):
-        """Runs visit method based on what needs to be changed on column"""
-        table = self._to_table(delta.table)
-        col_name = delta.current_name
-        if start_alter:
-            self.start_alter_column(table, col_name)
-        ret = func(table, delta.result_column, delta)
-        self.execute()
-
-    def start_alter_column(self, table, col_name):
-        """Starts ALTER COLUMN"""
-        self.start_alter_table(table)
-        self.append("ALTER COLUMN %s " % self.preparer.quote(col_name, table.quote))
-
-    def _visit_column_nullable(self, table, column, delta):
-        nullable = delta['nullable']
-        if nullable:
-            self.append("DROP NOT NULL")
-        else:
-            self.append("SET NOT NULL")
-
-    def _visit_column_default(self, table, column, delta):
-        default_text = self.get_column_default_string(column)
-        if default_text is not None:
-            self.append("SET DEFAULT %s" % default_text)
-        else:
-            self.append("DROP DEFAULT")
-
-    def _visit_column_type(self, table, column, delta):
-        type_ = delta['type']
-        type_text = str(type_.compile(dialect=self.dialect))
-        self.append("TYPE %s" % type_text)
-
-    def _visit_column_name(self, table, column, delta):
-        self.start_alter_table(table)
-        col_name = self.preparer.quote(delta.current_name, table.quote)
-        new_name = self.preparer.format_column(delta.result_column)
-        self.append('RENAME COLUMN %s TO %s' % (col_name, new_name))
-
-
-class ANSIConstraintCommon(AlterTableVisitor):
-    """
-    Migrate's constraints require a separate creation function from
-    SA's: Migrate's constraints are created independently of a table;
-    SA's are created at the same time as the table.
-    """
-
-    def get_constraint_name(self, cons):
-        """Gets a name for the given constraint.
-
-        If the name is already set it will be used otherwise the
-        constraint's :meth:`autoname <migrate.changeset.constraint.ConstraintChangeset.autoname>`
-        method is used.
-
-        :param cons: constraint object
-        """
-        if cons.name is not None:
-            ret = cons.name
-        else:
-            ret = cons.name = cons.autoname()
-        return self.preparer.quote(ret, cons.quote)
-
-    def visit_migrate_primary_key_constraint(self, *p, **k):
-        self._visit_constraint(*p, **k)
-
-    def visit_migrate_foreign_key_constraint(self, *p, **k):
-        self._visit_constraint(*p, **k)
-
-    def visit_migrate_check_constraint(self, *p, **k):
-        self._visit_constraint(*p, **k)
-
-    def visit_migrate_unique_constraint(self, *p, **k):
-        self._visit_constraint(*p, **k)
-
-class ANSIConstraintGenerator(ANSIConstraintCommon, SchemaGenerator):
-    def _visit_constraint(self, constraint):
-        constraint.name = self.get_constraint_name(constraint)
-        self.append(self.process(AddConstraint(constraint)))
-        self.execute()
-
-class ANSIConstraintDropper(ANSIConstraintCommon, SchemaDropper):
-    def _visit_constraint(self, constraint):
-        constraint.name = self.get_constraint_name(constraint)
-        self.append(self.process(DropConstraint(constraint, cascade=constraint.cascade)))
-        self.execute()
-
-
-class ANSIDialect(DefaultDialect):
-    columngenerator = ANSIColumnGenerator
-    columndropper = ANSIColumnDropper
-    schemachanger = ANSISchemaChanger
-    constraintgenerator = ANSIConstraintGenerator
-    constraintdropper = ANSIConstraintDropper
--- a/kallithea/lib/dbmigrate/migrate/changeset/constraint.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,200 +0,0 @@
-"""
-   This module defines standalone schema constraint classes.
-"""
-from sqlalchemy import schema
-
-from kallithea.lib.dbmigrate.migrate.exceptions import *
-
-
-class ConstraintChangeset(object):
-    """Base class for Constraint classes."""
-
-    def _normalize_columns(self, cols, table_name=False):
-        """Given: column objects or names; return col names and
-        (maybe) a table"""
-        colnames = []
-        table = None
-        for col in cols:
-            if isinstance(col, schema.Column):
-                if col.table is not None and table is None:
-                    table = col.table
-                if table_name:
-                    col = '.'.join((col.table.name, col.name))
-                else:
-                    col = col.name
-            colnames.append(col)
-        return colnames, table
-
-    def __do_imports(self, visitor_name, *a, **kw):
-        engine = kw.pop('engine', self.table.bind)
-        from kallithea.lib.dbmigrate.migrate.changeset.databases.visitor import (get_engine_visitor,
-                                                                                 run_single_visitor)
-        visitorcallable = get_engine_visitor(engine, visitor_name)
-        run_single_visitor(engine, visitorcallable, self, *a, **kw)
-
-    def create(self, *a, **kw):
-        """Create the constraint in the database.
-
-        :param engine: the database engine to use. If this is \
-        :keyword:`None` the instance's engine will be used
-        :type engine: :class:`sqlalchemy.engine.base.Engine`
-        :param connection: reuse connection instead of creating new one.
-        :type connection: :class:`sqlalchemy.engine.base.Connection` instance
-        """
-        # TODO: set the parent here instead of in __init__
-        self.__do_imports('constraintgenerator', *a, **kw)
-
-    def drop(self, *a, **kw):
-        """Drop the constraint from the database.
-
-        :param engine: the database engine to use. If this is
-          :keyword:`None` the instance's engine will be used
-        :param cascade: Issue CASCADE drop if database supports it
-        :type engine: :class:`sqlalchemy.engine.base.Engine`
-        :type cascade: bool
-        :param connection: reuse connection instead of creating new one.
-        :type connection: :class:`sqlalchemy.engine.base.Connection` instance
-        :returns: Instance with cleared columns
-        """
-        self.cascade = kw.pop('cascade', False)
-        self.__do_imports('constraintdropper', *a, **kw)
-        # the spirit of Constraint objects is that they
-        # are immutable (just like in a DB.  they're only ADDed
-        # or DROPped).
-        #self.columns.clear()
-        return self
-
-
-class PrimaryKeyConstraint(ConstraintChangeset, schema.PrimaryKeyConstraint):
-    """Construct PrimaryKeyConstraint
-
-    Migrate's additional parameters:
-
-    :param cols: Columns in constraint.
-    :param table: If columns are passed as strings, this kw is required
-    :type table: Table instance
-    :type cols: strings or Column instances
-    """
-
-    __migrate_visit_name__ = 'migrate_primary_key_constraint'
-
-    def __init__(self, *cols, **kwargs):
-        colnames, table = self._normalize_columns(cols)
-        table = kwargs.pop('table', table)
-        super(PrimaryKeyConstraint, self).__init__(*colnames, **kwargs)
-        if table is not None:
-            self._set_parent(table)
-
-    def autoname(self):
-        """Mimic the database's automatic constraint names"""
-        return "%s_pkey" % self.table.name
-
-
-class ForeignKeyConstraint(ConstraintChangeset, schema.ForeignKeyConstraint):
-    """Construct ForeignKeyConstraint
-
-    Migrate's additional parameters:
-
-    :param columns: Columns in constraint
-    :param refcolumns: Columns that this FK refers to in another table.
-    :param table: If columns are passed as strings, this kw is required
-    :type table: Table instance
-    :type columns: list of strings or Column instances
-    :type refcolumns: list of strings or Column instances
-    """
-
-    __migrate_visit_name__ = 'migrate_foreign_key_constraint'
-
-    def __init__(self, columns, refcolumns, *args, **kwargs):
-        colnames, table = self._normalize_columns(columns)
-        table = kwargs.pop('table', table)
-        refcolnames, reftable = self._normalize_columns(refcolumns,
-                                                        table_name=True)
-        super(ForeignKeyConstraint, self).__init__(
-            colnames, refcolnames, *args,**kwargs
-        )
-        if table is not None:
-            self._set_parent(table)
-
-    @property
-    def referenced(self):
-        return [e.column for e in self.elements]
-
-    @property
-    def reftable(self):
-        return self.referenced[0].table
-
-    def autoname(self):
-        """Mimic the database's automatic constraint names"""
-        if hasattr(self.columns, 'keys'):
-            # SA <= 0.5
-            firstcol = self.columns[self.columns.keys()[0]]
-            ret = "%(table)s_%(firstcolumn)s_fkey" % dict(
-                table=firstcol.table.name,
-                firstcolumn=firstcol.name,)
-        else:
-            # SA >= 0.6
-            ret = "%(table)s_%(firstcolumn)s_fkey" % dict(
-                table=self.table.name,
-                firstcolumn=self.columns[0],)
-        return ret
-
-
-class CheckConstraint(ConstraintChangeset, schema.CheckConstraint):
-    """Construct CheckConstraint
-
-    Migrate's additional parameters:
-
-    :param sqltext: Plain SQL text to check condition
-    :param columns: If not name is applied, you must supply this kw \
-    to autoname constraint
-    :param table: If columns are passed as strings, this kw is required
-    :type table: Table instance
-    :type columns: list of Columns instances
-    :type sqltext: string
-    """
-
-    __migrate_visit_name__ = 'migrate_check_constraint'
-
-    def __init__(self, sqltext, *args, **kwargs):
-        cols = kwargs.pop('columns', [])
-        if not cols and not kwargs.get('name', False):
-            raise InvalidConstraintError('You must either set "name"'
-                'parameter or "columns" to autogenarate it.')
-        colnames, table = self._normalize_columns(cols)
-        table = kwargs.pop('table', table)
-        schema.CheckConstraint.__init__(self, sqltext, *args, **kwargs)
-        if table is not None:
-            self._set_parent(table)
-        self.colnames = colnames
-
-    def autoname(self):
-        return "%(table)s_%(cols)s_check" % \
-            dict(table=self.table.name, cols="_".join(self.colnames))
-
-
-class UniqueConstraint(ConstraintChangeset, schema.UniqueConstraint):
-    """Construct UniqueConstraint
-
-    Migrate's additional parameters:
-
-    :param cols: Columns in constraint.
-    :param table: If columns are passed as strings, this kw is required
-    :type table: Table instance
-    :type cols: strings or Column instances
-
-    .. versionadded:: 0.6.0
-    """
-
-    __migrate_visit_name__ = 'migrate_unique_constraint'
-
-    def __init__(self, *cols, **kwargs):
-        self.colnames, table = self._normalize_columns(cols)
-        table = kwargs.pop('table', table)
-        super(UniqueConstraint, self).__init__(*self.colnames, **kwargs)
-        if table is not None:
-            self._set_parent(table)
-
-    def autoname(self):
-        """Mimic the database's automatic constraint names"""
-        return "%s_%s_key" % (self.table.name, '_'.join(self.colnames))
--- a/kallithea/lib/dbmigrate/migrate/changeset/databases/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-"""
-   This module contains database dialect specific changeset
-   implementations.
-"""
-__all__ = [
-    'postgres',
-    'sqlite',
-    'mysql',
-    'oracle',
-]
--- a/kallithea/lib/dbmigrate/migrate/changeset/databases/firebird.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,93 +0,0 @@
-"""
-   Firebird database specific implementations of changeset classes.
-"""
-from sqlalchemy.databases import firebird as sa_base
-from sqlalchemy.schema import PrimaryKeyConstraint
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.changeset import ansisql
-
-
-FBSchemaGenerator = sa_base.FBDDLCompiler
-
-class FBColumnGenerator(FBSchemaGenerator, ansisql.ANSIColumnGenerator):
-    """Firebird column generator implementation."""
-
-
-class FBColumnDropper(ansisql.ANSIColumnDropper):
-    """Firebird column dropper implementation."""
-
-    def visit_column(self, column):
-        """Firebird supports 'DROP col' instead of 'DROP COLUMN col' syntax
-
-        Drop primary key and unique constraints if dropped column is referencing it."""
-        if column.primary_key:
-            if column.table.primary_key.columns.contains_column(column):
-                column.table.primary_key.drop()
-                # TODO: recreate primary key if it references more than this column
-
-        for index in column.table.indexes:
-            # "column in index.columns" causes problems as all
-            # column objects compare equal and return a SQL expression
-            if column.name in [col.name for col in index.columns]:
-                index.drop()
-                # TODO: recreate index if it references more than this column
-
-        for cons in column.table.constraints:
-            if isinstance(cons,PrimaryKeyConstraint):
-                # will be deleted only when the column its on
-                # is deleted!
-                continue
-
-            should_drop = column.name in cons.columns
-            if should_drop:
-                self.start_alter_table(column)
-                self.append("DROP CONSTRAINT ")
-                self.append(self.preparer.format_constraint(cons))
-                self.execute()
-            # TODO: recreate unique constraint if it references more than this column
-
-        self.start_alter_table(column)
-        self.append('DROP %s' % self.preparer.format_column(column))
-        self.execute()
-
-
-class FBSchemaChanger(ansisql.ANSISchemaChanger):
-    """Firebird schema changer implementation."""
-
-    def visit_table(self, table):
-        """Rename table not supported"""
-        raise exceptions.NotSupportedError(
-            "Firebird does not support renaming tables.")
-
-    def _visit_column_name(self, table, column, delta):
-        self.start_alter_table(table)
-        col_name = self.preparer.quote(delta.current_name, table.quote)
-        new_name = self.preparer.format_column(delta.result_column)
-        self.append('ALTER COLUMN %s TO %s' % (col_name, new_name))
-
-    def _visit_column_nullable(self, table, column, delta):
-        """Changing NULL is not supported"""
-        # TODO: http://www.firebirdfaq.org/faq103/
-        raise exceptions.NotSupportedError(
-            "Firebird does not support altering NULL behavior.")
-
-
-class FBConstraintGenerator(ansisql.ANSIConstraintGenerator):
-    """Firebird constraint generator implementation."""
-
-
-class FBConstraintDropper(ansisql.ANSIConstraintDropper):
-    """Firebird constraint dropper implementation."""
-
-    def cascade_constraint(self, constraint):
-        """Cascading constraints is not supported"""
-        raise exceptions.NotSupportedError(
-            "Firebird does not support cascading constraints")
-
-
-class FBDialect(ansisql.ANSIDialect):
-    columngenerator = FBColumnGenerator
-    columndropper = FBColumnDropper
-    schemachanger = FBSchemaChanger
-    constraintgenerator = FBConstraintGenerator
-    constraintdropper = FBConstraintDropper
--- a/kallithea/lib/dbmigrate/migrate/changeset/databases/mysql.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,65 +0,0 @@
-"""
-   MySQL database specific implementations of changeset classes.
-"""
-
-from sqlalchemy.databases import mysql as sa_base
-from sqlalchemy import types as sqltypes
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.changeset import ansisql
-
-
-MySQLSchemaGenerator = sa_base.MySQLDDLCompiler
-
-class MySQLColumnGenerator(MySQLSchemaGenerator, ansisql.ANSIColumnGenerator):
-    pass
-
-
-class MySQLColumnDropper(ansisql.ANSIColumnDropper):
-    pass
-
-
-class MySQLSchemaChanger(MySQLSchemaGenerator, ansisql.ANSISchemaChanger):
-
-    def visit_column(self, delta):
-        table = delta.table
-        colspec = self.get_column_specification(delta.result_column)
-        if delta.result_column.autoincrement:
-            primary_keys = [c for c in table.primary_key.columns
-                       if (c.autoincrement and
-                            isinstance(c.type, sqltypes.Integer) and
-                            not c.foreign_keys)]
-
-            if primary_keys:
-                first = primary_keys.pop(0)
-                if first.name == delta.current_name:
-                    colspec += " AUTO_INCREMENT"
-        old_col_name = self.preparer.quote(delta.current_name, table.quote)
-
-        self.start_alter_table(table)
-
-        self.append("CHANGE COLUMN %s " % old_col_name)
-        self.append(colspec)
-        self.execute()
-
-    def visit_index(self, param):
-        # If MySQL can do this, I can't find how
-        raise exceptions.NotSupportedError("MySQL cannot rename indexes")
-
-
-class MySQLConstraintGenerator(ansisql.ANSIConstraintGenerator):
-    pass
-
-
-class MySQLConstraintDropper(MySQLSchemaGenerator, ansisql.ANSIConstraintDropper):
-    def visit_migrate_check_constraint(self, *p, **k):
-        raise exceptions.NotSupportedError("MySQL does not support CHECK"
-            " constraints, use triggers instead.")
-
-
-class MySQLDialect(ansisql.ANSIDialect):
-    columngenerator = MySQLColumnGenerator
-    columndropper = MySQLColumnDropper
-    schemachanger = MySQLSchemaChanger
-    constraintgenerator = MySQLConstraintGenerator
-    constraintdropper = MySQLConstraintDropper
--- a/kallithea/lib/dbmigrate/migrate/changeset/databases/oracle.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,111 +0,0 @@
-"""
-   Oracle database specific implementations of changeset classes.
-"""
-import sqlalchemy as sa
-from sqlalchemy.databases import oracle as sa_base
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.changeset import ansisql, SQLA_06
-
-
-if not SQLA_06:
-    OracleSchemaGenerator = sa_base.OracleSchemaGenerator
-else:
-    OracleSchemaGenerator = sa_base.OracleDDLCompiler
-
-
-class OracleColumnGenerator(OracleSchemaGenerator, ansisql.ANSIColumnGenerator):
-    pass
-
-
-class OracleColumnDropper(ansisql.ANSIColumnDropper):
-    pass
-
-
-class OracleSchemaChanger(OracleSchemaGenerator, ansisql.ANSISchemaChanger):
-
-    def get_column_specification(self, column, **kwargs):
-        # Ignore the NOT NULL generated
-        override_nullable = kwargs.pop('override_nullable', None)
-        if override_nullable:
-            orig = column.nullable
-            column.nullable = True
-        ret = super(OracleSchemaChanger, self).get_column_specification(
-            column, **kwargs)
-        if override_nullable:
-            column.nullable = orig
-        return ret
-
-    def visit_column(self, delta):
-        keys = delta.keys()
-
-        if 'name' in keys:
-            self._run_subvisit(delta,
-                               self._visit_column_name,
-                               start_alter=False)
-
-        if len(set(('type', 'nullable', 'server_default')).intersection(keys)):
-            self._run_subvisit(delta,
-                               self._visit_column_change,
-                               start_alter=False)
-
-    def _visit_column_change(self, table, column, delta):
-        # Oracle cannot drop a default once created, but it can set it
-        # to null.  We'll do that if default=None
-        # http://forums.oracle.com/forums/message.jspa?messageID=1273234#1273234
-        dropdefault_hack = (column.server_default is None \
-                                and 'server_default' in delta.keys())
-        # Oracle apparently doesn't like it when we say "not null" if
-        # the column's already not null. Fudge it, so we don't need a
-        # new function
-        notnull_hack = ((not column.nullable) \
-                            and ('nullable' not in delta.keys()))
-        # We need to specify NULL if we're removing a NOT NULL
-        # constraint
-        null_hack = (column.nullable and ('nullable' in delta.keys()))
-
-        if dropdefault_hack:
-            column.server_default = sa.PassiveDefault(sa.sql.null())
-        if notnull_hack:
-            column.nullable = True
-        colspec = self.get_column_specification(column,
-            override_nullable=null_hack)
-        if null_hack:
-            colspec += ' NULL'
-        if notnull_hack:
-            column.nullable = False
-        if dropdefault_hack:
-            column.server_default = None
-
-        self.start_alter_table(table)
-        self.append("MODIFY (")
-        self.append(colspec)
-        self.append(")")
-
-
-class OracleConstraintCommon(object):
-
-    def get_constraint_name(self, cons):
-        # Oracle constraints can't guess their name like other DBs
-        if not cons.name:
-            raise exceptions.NotSupportedError(
-                "Oracle constraint names must be explicitly stated")
-        return cons.name
-
-
-class OracleConstraintGenerator(OracleConstraintCommon,
-                                ansisql.ANSIConstraintGenerator):
-    pass
-
-
-class OracleConstraintDropper(OracleConstraintCommon,
-                              ansisql.ANSIConstraintDropper):
-    pass
-
-
-class OracleDialect(ansisql.ANSIDialect):
-    columngenerator = OracleColumnGenerator
-    columndropper = OracleColumnDropper
-    schemachanger = OracleSchemaChanger
-    constraintgenerator = OracleConstraintGenerator
-    constraintdropper = OracleConstraintDropper
--- a/kallithea/lib/dbmigrate/migrate/changeset/databases/postgres.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,43 +0,0 @@
-"""
-   `PostgreSQL`_ database specific implementations of changeset classes.
-
-   .. _`PostgreSQL`: http://www.postgresql.org/
-"""
-from kallithea.lib.dbmigrate.migrate.changeset import ansisql
-
-
-from sqlalchemy.databases import postgresql as sa_base
-PGSchemaGenerator = sa_base.PGDDLCompiler
-
-
-class PGColumnGenerator(PGSchemaGenerator, ansisql.ANSIColumnGenerator):
-    """PostgreSQL column generator implementation."""
-    pass
-
-
-class PGColumnDropper(ansisql.ANSIColumnDropper):
-    """PostgreSQL column dropper implementation."""
-    pass
-
-
-class PGSchemaChanger(ansisql.ANSISchemaChanger):
-    """PostgreSQL schema changer implementation."""
-    pass
-
-
-class PGConstraintGenerator(ansisql.ANSIConstraintGenerator):
-    """PostgreSQL constraint generator implementation."""
-    pass
-
-
-class PGConstraintDropper(ansisql.ANSIConstraintDropper):
-    """PostgreSQL constraint dropper implementation."""
-    pass
-
-
-class PGDialect(ansisql.ANSIDialect):
-    columngenerator = PGColumnGenerator
-    columndropper = PGColumnDropper
-    schemachanger = PGSchemaChanger
-    constraintgenerator = PGConstraintGenerator
-    constraintdropper = PGConstraintDropper
--- a/kallithea/lib/dbmigrate/migrate/changeset/databases/sqlite.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,153 +0,0 @@
-"""
-   `SQLite`_ database specific implementations of changeset classes.
-
-   .. _`SQLite`: http://www.sqlite.org/
-"""
-from UserDict import DictMixin
-from copy import copy
-
-from sqlalchemy.databases import sqlite as sa_base
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.changeset import ansisql, SQLA_06
-
-SQLiteSchemaGenerator = sa_base.SQLiteDDLCompiler
-
-
-class SQLiteCommon(object):
-
-    def _not_supported(self, op):
-        raise exceptions.NotSupportedError("SQLite does not support "
-            "%s; see http://www.sqlite.org/lang_altertable.html" % op)
-
-
-class SQLiteHelper(SQLiteCommon):
-
-    def recreate_table(self,table,column=None,delta=None):
-        table_name = self.preparer.format_table(table)
-
-        # we remove all indexes so as not to have
-        # problems during copy and re-create
-        for index in table.indexes:
-            index.drop()
-
-        self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name)
-        self.execute()
-
-        insertion_string = self._modify_table(table, column, delta)
-
-        table.create(bind=self.connection)
-        self.append(insertion_string % {'table_name': table_name})
-        self.execute()
-        self.append('DROP TABLE migration_tmp')
-        self.execute()
-
-    def visit_column(self, delta):
-        if isinstance(delta, DictMixin):
-            column = delta.result_column
-            table = self._to_table(delta.table)
-        else:
-            column = delta
-            table = self._to_table(column.table)
-
-        self.recreate_table(table,column,delta)
-
-class SQLiteColumnGenerator(SQLiteSchemaGenerator,
-                            ansisql.ANSIColumnGenerator,
-                            # at the end so we get the normal
-                            # visit_column by default
-                            SQLiteHelper,
-                            SQLiteCommon
-                            ):
-    """SQLite ColumnGenerator"""
-
-    def _modify_table(self, table, column, delta):
-        columns = ' ,'.join(map(
-                self.preparer.format_column,
-                [c for c in table.columns if c.name!=column.name]))
-        return ('INSERT INTO %%(table_name)s (%(cols)s) '
-                'SELECT %(cols)s from migration_tmp')%{'cols':columns}
-
-    def visit_column(self,column):
-        if column.foreign_keys:
-            SQLiteHelper.visit_column(self,column)
-        else:
-            super(SQLiteColumnGenerator,self).visit_column(column)
-
-class SQLiteColumnDropper(SQLiteHelper, ansisql.ANSIColumnDropper):
-    """SQLite ColumnDropper"""
-
-    def _modify_table(self, table, column, delta):
-
-        columns = ' ,'.join(map(self.preparer.format_column, table.columns))
-        return 'INSERT INTO %(table_name)s SELECT ' + columns + \
-            ' from migration_tmp'
-
-    def visit_column(self,column):
-        # For SQLite, we *have* to remove the column here so the table
-        # is re-created properly.
-        column.remove_from_table(column.table,unset_table=False)
-        super(SQLiteColumnDropper,self).visit_column(column)
-
-
-class SQLiteSchemaChanger(SQLiteHelper, ansisql.ANSISchemaChanger):
-    """SQLite SchemaChanger"""
-
-    def _modify_table(self, table, column, delta):
-        return 'INSERT INTO %(table_name)s SELECT * from migration_tmp'
-
-    def visit_index(self, index):
-        """Does not support ALTER INDEX"""
-        self._not_supported('ALTER INDEX')
-
-
-class SQLiteConstraintGenerator(ansisql.ANSIConstraintGenerator, SQLiteHelper, SQLiteCommon):
-
-    def visit_migrate_primary_key_constraint(self, constraint):
-        tmpl = "CREATE UNIQUE INDEX %s ON %s ( %s )"
-        cols = ', '.join(map(self.preparer.format_column, constraint.columns))
-        tname = self.preparer.format_table(constraint.table)
-        name = self.get_constraint_name(constraint)
-        msg = tmpl % (name, tname, cols)
-        self.append(msg)
-        self.execute()
-
-    def _modify_table(self, table, column, delta):
-        return 'INSERT INTO %(table_name)s SELECT * from migration_tmp'
-
-    def visit_migrate_foreign_key_constraint(self, *p, **k):
-        self.recreate_table(p[0].table)
-
-    def visit_migrate_unique_constraint(self, *p, **k):
-        self.recreate_table(p[0].table)
-
-
-class SQLiteConstraintDropper(ansisql.ANSIColumnDropper,
-                              SQLiteCommon,
-                              ansisql.ANSIConstraintCommon):
-
-    def visit_migrate_primary_key_constraint(self, constraint):
-        tmpl = "DROP INDEX %s "
-        name = self.get_constraint_name(constraint)
-        msg = tmpl % (name)
-        self.append(msg)
-        self.execute()
-
-    def visit_migrate_foreign_key_constraint(self, *p, **k):
-        self._not_supported('ALTER TABLE DROP CONSTRAINT')
-
-    def visit_migrate_check_constraint(self, *p, **k):
-        self._not_supported('ALTER TABLE DROP CONSTRAINT')
-
-    def visit_migrate_unique_constraint(self, *p, **k):
-        self._not_supported('ALTER TABLE DROP CONSTRAINT')
-
-
-# TODO: technically primary key is a NOT NULL + UNIQUE constraint, should add NOT NULL to index
-
-class SQLiteDialect(ansisql.ANSIDialect):
-    columngenerator = SQLiteColumnGenerator
-    columndropper = SQLiteColumnDropper
-    schemachanger = SQLiteSchemaChanger
-    constraintgenerator = SQLiteConstraintGenerator
-    constraintdropper = SQLiteConstraintDropper
--- a/kallithea/lib/dbmigrate/migrate/changeset/databases/visitor.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,78 +0,0 @@
-"""
-   Module for visitor class mapping.
-"""
-import sqlalchemy as sa
-
-from kallithea.lib.dbmigrate.migrate.changeset import ansisql
-from kallithea.lib.dbmigrate.migrate.changeset.databases import (sqlite,
-                                         postgres,
-                                         mysql,
-                                         oracle,
-                                         firebird)
-
-
-# Map SA dialects to the corresponding Migrate extensions
-DIALECTS = {
-    "default": ansisql.ANSIDialect,
-    "sqlite": sqlite.SQLiteDialect,
-    "postgres": postgres.PGDialect,
-    "postgresql": postgres.PGDialect,
-    "mysql": mysql.MySQLDialect,
-    "oracle": oracle.OracleDialect,
-    "firebird": firebird.FBDialect,
-}
-
-
-def get_engine_visitor(engine, name):
-    """
-    Get the visitor implementation for the given database engine.
-
-    :param engine: SQLAlchemy Engine
-    :param name: Name of the visitor
-    :type name: string
-    :type engine: Engine
-    :returns: visitor
-    """
-    # TODO: link to supported visitors
-    return get_dialect_visitor(engine.dialect, name)
-
-
-def get_dialect_visitor(sa_dialect, name):
-    """
-    Get the visitor implementation for the given dialect.
-
-    Finds the visitor implementation based on the dialect class and
-    returns and instance initialized with the given name.
-
-    Binds dialect specific preparer to visitor.
-    """
-
-    # map sa dialect to migrate dialect and return visitor
-    sa_dialect_name = getattr(sa_dialect, 'name', 'default')
-    migrate_dialect_cls = DIALECTS[sa_dialect_name]
-    visitor = getattr(migrate_dialect_cls, name)
-
-    # bind preparer
-    visitor.preparer = sa_dialect.preparer(sa_dialect)
-
-    return visitor
-
-def run_single_visitor(engine, visitorcallable, element,
-    connection=None, **kwargs):
-    """Taken from :meth:`sqlalchemy.engine.base.Engine._run_single_visitor`
-    with support for migrate visitors.
-    """
-    if connection is None:
-        conn = engine.contextual_connect(close_with_result=False)
-    else:
-        conn = connection
-    visitor = visitorcallable(engine.dialect, conn)
-    try:
-        if hasattr(element, '__migrate_visit_name__'):
-            fn = getattr(visitor, 'visit_' + element.__migrate_visit_name__)
-        else:
-            fn = getattr(visitor, 'visit_' + element.__visit_name__)
-        fn(element, **kwargs)
-    finally:
-        if connection is None:
-            conn.close()
--- a/kallithea/lib/dbmigrate/migrate/changeset/schema.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,655 +0,0 @@
-"""
-   Schema module providing common schema operations.
-"""
-import warnings
-
-from UserDict import DictMixin
-
-import sqlalchemy
-
-from sqlalchemy.schema import ForeignKeyConstraint
-from sqlalchemy.schema import UniqueConstraint
-
-from kallithea.lib.dbmigrate.migrate.exceptions import *
-from kallithea.lib.dbmigrate.migrate.changeset import SQLA_06, SQLA_07
-from kallithea.lib.dbmigrate.migrate.changeset.databases.visitor import (get_engine_visitor,
-                                                                         run_single_visitor)
-
-
-__all__ = [
-    'create_column',
-    'drop_column',
-    'alter_column',
-    'rename_table',
-    'rename_index',
-    'ChangesetTable',
-    'ChangesetColumn',
-    'ChangesetIndex',
-    'ChangesetDefaultClause',
-    'ColumnDelta',
-]
-
-def create_column(column, table=None, *p, **kw):
-    """Create a column, given the table.
-
-    API to :meth:`ChangesetColumn.create`.
-    """
-    if table is not None:
-        return table.create_column(column, *p, **kw)
-    return column.create(*p, **kw)
-
-
-def drop_column(column, table=None, *p, **kw):
-    """Drop a column, given the table.
-
-    API to :meth:`ChangesetColumn.drop`.
-    """
-    if table is not None:
-        return table.drop_column(column, *p, **kw)
-    return column.drop(*p, **kw)
-
-
-def rename_table(table, name, engine=None, **kw):
-    """Rename a table.
-
-    If Table instance is given, engine is not used.
-
-    API to :meth:`ChangesetTable.rename`.
-
-    :param table: Table to be renamed.
-    :param name: New name for Table.
-    :param engine: Engine instance.
-    :type table: string or Table instance
-    :type name: string
-    :type engine: obj
-    """
-    table = _to_table(table, engine)
-    table.rename(name, **kw)
-
-
-def rename_index(index, name, table=None, engine=None, **kw):
-    """Rename an index.
-
-    If Index instance is given,
-    table and engine are not used.
-
-    API to :meth:`ChangesetIndex.rename`.
-
-    :param index: Index to be renamed.
-    :param name: New name for index.
-    :param table: Table to which Index is referred.
-    :param engine: Engine instance.
-    :type index: string or Index instance
-    :type name: string
-    :type table: string or Table instance
-    :type engine: obj
-    """
-    index = _to_index(index, table, engine)
-    index.rename(name, **kw)
-
-
-def alter_column(*p, **k):
-    """Alter a column.
-
-    This is a helper function that creates a :class:`ColumnDelta` and
-    runs it.
-
-    :argument column:
-      The name of the column to be altered or a
-      :class:`ChangesetColumn` column representing it.
-
-    :param table:
-      A :class:`~sqlalchemy.schema.Table` or table name to
-      for the table where the column will be changed.
-
-    :param engine:
-      The :class:`~sqlalchemy.engine.base.Engine` to use for table
-      reflection and schema alterations.
-
-    :returns: A :class:`ColumnDelta` instance representing the change.
-
-
-    """
-
-    if 'table' not in k and isinstance(p[0], sqlalchemy.Column):
-        k['table'] = p[0].table
-    if 'engine' not in k:
-        k['engine'] = k['table'].bind
-
-    # deprecation
-    if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
-        warnings.warn(
-            "Passing a Column object to alter_column is deprecated."
-            " Just pass in keyword parameters instead.",
-            MigrateDeprecationWarning
-            )
-    engine = k['engine']
-
-    # enough tests seem to break when metadata is always altered
-    # that this crutch has to be left in until they can be sorted
-    # out
-    k['alter_metadata']=True
-
-    delta = ColumnDelta(*p, **k)
-
-    visitorcallable = get_engine_visitor(engine, 'schemachanger')
-    engine._run_visitor(visitorcallable, delta)
-
-    return delta
-
-
-def _to_table(table, engine=None):
-    """Return if instance of Table, else construct new with metadata"""
-    if isinstance(table, sqlalchemy.Table):
-        return table
-
-    # Given: table name, maybe an engine
-    meta = sqlalchemy.MetaData()
-    if engine is not None:
-        meta.bind = engine
-    return sqlalchemy.Table(table, meta)
-
-
-def _to_index(index, table=None, engine=None):
-    """Return if instance of Index, else construct new with metadata"""
-    if isinstance(index, sqlalchemy.Index):
-        return index
-
-    # Given: index name; table name required
-    table = _to_table(table, engine)
-    ret = sqlalchemy.Index(index)
-    ret.table = table
-    return ret
-
-
-class ColumnDelta(DictMixin, sqlalchemy.schema.SchemaItem):
-    """Extracts the differences between two columns/column-parameters
-
-        May receive parameters arranged in several different ways:
-
-        * **current_column, new_column, \*p, \*\*kw**
-            Additional parameters can be specified to override column
-            differences.
-
-        * **current_column, \*p, \*\*kw**
-            Additional parameters alter current_column. Table name is extracted
-            from current_column object.
-            Name is changed to current_column.name from current_name,
-            if current_name is specified.
-
-        * **current_col_name, \*p, \*\*kw**
-            Table kw must specified.
-
-        :param table: Table at which current Column should be bound to. \
-        If table name is given, reflection will be used.
-        :type table: string or Table instance
-
-        :param metadata: A :class:`MetaData` instance to store
-                         reflected table names
-
-        :param engine: When reflecting tables, either engine or metadata must \
-        be specified to acquire engine object.
-        :type engine: :class:`Engine` instance
-        :returns: :class:`ColumnDelta` instance provides interface for altered attributes to \
-        `result_column` through :func:`dict` alike object.
-
-        * :class:`ColumnDelta`.result_column is altered column with new attributes
-
-        * :class:`ColumnDelta`.current_name is current name of column in db
-
-
-    """
-
-    # Column attributes that can be altered
-    diff_keys = ('name', 'type', 'primary_key', 'nullable',
-        'server_onupdate', 'server_default', 'autoincrement')
-    diffs = dict()
-    __visit_name__ = 'column'
-
-    def __init__(self, *p, **kw):
-        # 'alter_metadata' is not a public api. It exists purely
-        # as a crutch until the tests that fail when 'alter_metadata'
-        # behaviour always happens can be sorted out
-        self.alter_metadata = kw.pop("alter_metadata", False)
-
-        self.meta = kw.pop("metadata", None)
-        self.engine = kw.pop("engine", None)
-
-        # Things are initialized differently depending on how many column
-        # parameters are given. Figure out how many and call the appropriate
-        # method.
-        if len(p) >= 1 and isinstance(p[0], sqlalchemy.Column):
-            # At least one column specified
-            if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
-                # Two columns specified
-                diffs = self.compare_2_columns(*p, **kw)
-            else:
-                # Exactly one column specified
-                diffs = self.compare_1_column(*p, **kw)
-        else:
-            # Zero columns specified
-            if not len(p) or not isinstance(p[0], basestring):
-                raise ValueError("First argument must be column name")
-            diffs = self.compare_parameters(*p, **kw)
-
-        self.apply_diffs(diffs)
-
-    def __repr__(self):
-        return '<ColumnDelta altermetadata=%r, %s>' % (
-            self.alter_metadata,
-            super(ColumnDelta, self).__repr__()
-            )
-
-    def __getitem__(self, key):
-        if key not in self.keys():
-            raise KeyError("No such diff key, available: %s" % self.diffs )
-        return getattr(self.result_column, key)
-
-    def __setitem__(self, key, value):
-        if key not in self.keys():
-            raise KeyError("No such diff key, available: %s" % self.diffs )
-        setattr(self.result_column, key, value)
-
-    def __delitem__(self, key):
-        raise NotImplementedError
-
-    def keys(self):
-        return self.diffs.keys()
-
-    def compare_parameters(self, current_name, *p, **k):
-        """Compares Column objects with reflection"""
-        self.table = k.pop('table')
-        self.result_column = self._table.c.get(current_name)
-        if len(p):
-            k = self._extract_parameters(p, k, self.result_column)
-        return k
-
-    def compare_1_column(self, col, *p, **k):
-        """Compares one Column object"""
-        self.table = k.pop('table', None)
-        if self.table is None:
-            self.table = col.table
-        self.result_column = col
-        if len(p):
-            k = self._extract_parameters(p, k, self.result_column)
-        return k
-
-    def compare_2_columns(self, old_col, new_col, *p, **k):
-        """Compares two Column objects"""
-        self.process_column(new_col)
-        self.table = k.pop('table', None)
-        # we cannot use bool() on table in SA06
-        if self.table is None:
-            self.table = old_col.table
-        if self.table is None:
-            new_col.table
-        self.result_column = old_col
-
-        # set differences
-        # leave out some stuff for later comp
-        for key in (set(self.diff_keys) - set(('type',))):
-            val = getattr(new_col, key, None)
-            if getattr(self.result_column, key, None) != val:
-                k.setdefault(key, val)
-
-        # inspect types
-        if not self.are_column_types_eq(self.result_column.type, new_col.type):
-            k.setdefault('type', new_col.type)
-
-        if len(p):
-            k = self._extract_parameters(p, k, self.result_column)
-        return k
-
-    def apply_diffs(self, diffs):
-        """Populate dict and column object with new values"""
-        self.diffs = diffs
-        for key in self.diff_keys:
-            if key in diffs:
-                setattr(self.result_column, key, diffs[key])
-
-        self.process_column(self.result_column)
-
-        # create an instance of class type if not yet
-        if 'type' in diffs and callable(self.result_column.type):
-            self.result_column.type = self.result_column.type()
-
-        # add column to the table
-        if self.table is not None and self.alter_metadata:
-            self.result_column.add_to_table(self.table)
-
-    def are_column_types_eq(self, old_type, new_type):
-        """Compares two types to be equal"""
-        ret = old_type.__class__ == new_type.__class__
-
-        # String length is a special case
-        if ret and isinstance(new_type, sqlalchemy.types.String):
-            ret = (getattr(old_type, 'length', None) == \
-                       getattr(new_type, 'length', None))
-        return ret
-
-    def _extract_parameters(self, p, k, column):
-        """Extracts data from p and modifies diffs"""
-        p = list(p)
-        while len(p):
-            if isinstance(p[0], basestring):
-                k.setdefault('name', p.pop(0))
-            elif isinstance(p[0], sqlalchemy.types.AbstractType):
-                k.setdefault('type', p.pop(0))
-            elif callable(p[0]):
-                p[0] = p[0]()
-            else:
-                break
-
-        if len(p):
-            new_col = column.copy_fixed()
-            new_col._init_items(*p)
-            k = self.compare_2_columns(column, new_col, **k)
-        return k
-
-    def process_column(self, column):
-        """Processes default values for column"""
-        # XXX: this is a snippet from SA processing of positional parameters
-        toinit = list()
-
-        if column.server_default is not None:
-            if isinstance(column.server_default, sqlalchemy.FetchedValue):
-                toinit.append(column.server_default)
-            else:
-                toinit.append(sqlalchemy.DefaultClause(column.server_default))
-        if column.server_onupdate is not None:
-            if isinstance(column.server_onupdate, FetchedValue):
-                toinit.append(column.server_default)
-            else:
-                toinit.append(sqlalchemy.DefaultClause(column.server_onupdate,
-                                            for_update=True))
-        if toinit:
-            column._init_items(*toinit)
-
-    def _get_table(self):
-        return getattr(self, '_table', None)
-
-    def _set_table(self, table):
-        if isinstance(table, basestring):
-            if self.alter_metadata:
-                if not self.meta:
-                    raise ValueError("metadata must be specified for table"
-                        " reflection when using alter_metadata")
-                meta = self.meta
-                if self.engine:
-                    meta.bind = self.engine
-            else:
-                if not self.engine and not self.meta:
-                    raise ValueError("engine or metadata must be specified"
-                        " to reflect tables")
-                if not self.engine:
-                    self.engine = self.meta.bind
-                meta = sqlalchemy.MetaData(bind=self.engine)
-            self._table = sqlalchemy.Table(table, meta, autoload=True)
-        elif isinstance(table, sqlalchemy.Table):
-            self._table = table
-            if not self.alter_metadata:
-                self._table.meta = sqlalchemy.MetaData(bind=self._table.bind)
-    def _get_result_column(self):
-        return getattr(self, '_result_column', None)
-
-    def _set_result_column(self, column):
-        """Set Column to Table based on alter_metadata evaluation."""
-        self.process_column(column)
-        if not hasattr(self, 'current_name'):
-            self.current_name = column.name
-        if self.alter_metadata:
-            self._result_column = column
-        else:
-            self._result_column = column.copy_fixed()
-
-    table = property(_get_table, _set_table)
-    result_column = property(_get_result_column, _set_result_column)
-
-
-class ChangesetTable(object):
-    """Changeset extensions to SQLAlchemy tables."""
-
-    def create_column(self, column, *p, **kw):
-        """Creates a column.
-
-        The column parameter may be a column definition or the name of
-        a column in this table.
-
-        API to :meth:`ChangesetColumn.create`
-
-        :param column: Column to be created
-        :type column: Column instance or string
-        """
-        if not isinstance(column, sqlalchemy.Column):
-            # It's a column name
-            column = getattr(self.c, str(column))
-        column.create(table=self, *p, **kw)
-
-    def drop_column(self, column, *p, **kw):
-        """Drop a column, given its name or definition.
-
-        API to :meth:`ChangesetColumn.drop`
-
-        :param column: Column to be dropped
-        :type column: Column instance or string
-        """
-        if not isinstance(column, sqlalchemy.Column):
-            # It's a column name
-            try:
-                column = getattr(self.c, str(column))
-            except AttributeError:
-                # That column isn't part of the table. We don't need
-                # its entire definition to drop the column, just its
-                # name, so create a dummy column with the same name.
-                column = sqlalchemy.Column(str(column), sqlalchemy.Integer())
-        column.drop(table=self, *p, **kw)
-
-    def rename(self, name, connection=None, **kwargs):
-        """Rename this table.
-
-        :param name: New name of the table.
-        :type name: string
-        :param connection: reuse connection instead of creating new one.
-        :type connection: :class:`sqlalchemy.engine.base.Connection` instance
-        """
-        engine = self.bind
-        self.new_name = name
-        visitorcallable = get_engine_visitor(engine, 'schemachanger')
-        run_single_visitor(engine, visitorcallable, self, connection, **kwargs)
-
-        # Fix metadata registration
-        self.name = name
-        self.deregister()
-        self._set_parent(self.metadata)
-
-    def _meta_key(self):
-        """Get the meta key for this table."""
-        return sqlalchemy.schema._get_table_key(self.name, self.schema)
-
-    def deregister(self):
-        """Remove this table from its metadata"""
-        if SQLA_07:
-            self.metadata._remove_table(self.name, self.schema)
-        else:
-            key = self._meta_key()
-            meta = self.metadata
-            if key in meta.tables:
-                del meta.tables[key]
-
-
-class ChangesetColumn(object):
-    """Changeset extensions to SQLAlchemy columns."""
-
-    def alter(self, *p, **k):
-        """Makes a call to :func:`alter_column` for the column this
-        method is called on.
-        """
-        if 'table' not in k:
-            k['table'] = self.table
-        if 'engine' not in k:
-            k['engine'] = k['table'].bind
-        return alter_column(self, *p, **k)
-
-    def create(self, table=None, index_name=None, unique_name=None,
-               primary_key_name=None, populate_default=True, connection=None, **kwargs):
-        """Create this column in the database.
-
-        Assumes the given table exists. ``ALTER TABLE ADD COLUMN``,
-        for most databases.
-
-        :param table: Table instance to create on.
-        :param index_name: Creates :class:`ChangesetIndex` on this column.
-        :param unique_name: Creates :class: \
-`~migrate.changeset.constraint.UniqueConstraint` on this column.
-        :param primary_key_name: Creates :class: \
-`~migrate.changeset.constraint.PrimaryKeyConstraint` on this column.
-        :param populate_default: If True, created column will be \
-populated with defaults
-        :param connection: reuse connection instead of creating new one.
-        :type table: Table instance
-        :type index_name: string
-        :type unique_name: string
-        :type primary_key_name: string
-        :type populate_default: bool
-        :type connection: :class:`sqlalchemy.engine.base.Connection` instance
-
-        :returns: self
-        """
-        self.populate_default = populate_default
-        self.index_name = index_name
-        self.unique_name = unique_name
-        self.primary_key_name = primary_key_name
-        for cons in ('index_name', 'unique_name', 'primary_key_name'):
-            self._check_sanity_constraints(cons)
-
-        self.add_to_table(table)
-        engine = self.table.bind
-        visitorcallable = get_engine_visitor(engine, 'columngenerator')
-        engine._run_visitor(visitorcallable, self, connection, **kwargs)
-
-        # TODO: reuse existing connection
-        if self.populate_default and self.default is not None:
-            stmt = table.update().values({self: engine._execute_default(self.default)})
-            engine.execute(stmt)
-
-        return self
-
-    def drop(self, table=None, connection=None, **kwargs):
-        """Drop this column from the database, leaving its table intact.
-
-        ``ALTER TABLE DROP COLUMN``, for most databases.
-
-        :param connection: reuse connection instead of creating new one.
-        :type connection: :class:`sqlalchemy.engine.base.Connection` instance
-        """
-        if table is not None:
-            self.table = table
-        engine = self.table.bind
-        visitorcallable = get_engine_visitor(engine, 'columndropper')
-        engine._run_visitor(visitorcallable, self, connection, **kwargs)
-        self.remove_from_table(self.table, unset_table=False)
-        self.table = None
-        return self
-
-    def add_to_table(self, table):
-        if table is not None  and self.table is None:
-            if SQLA_07:
-                table.append_column(self)
-            else:
-                self._set_parent(table)
-
-    def _col_name_in_constraint(self,cons,name):
-        return False
-
-    def remove_from_table(self, table, unset_table=True):
-        # TODO: remove primary keys, constraints, etc
-        if unset_table:
-            self.table = None
-
-        to_drop = set()
-        for index in table.indexes:
-            columns = []
-            for col in index.columns:
-                if col.name!=self.name:
-                    columns.append(col)
-            if columns:
-                index.columns=columns
-            else:
-                to_drop.add(index)
-        table.indexes = table.indexes - to_drop
-
-        to_drop = set()
-        for cons in table.constraints:
-            # TODO: deal with other types of constraint
-            if isinstance(cons,(ForeignKeyConstraint,
-                                UniqueConstraint)):
-                for col_name in cons.columns:
-                    if not isinstance(col_name,basestring):
-                        col_name = col_name.name
-                    if self.name==col_name:
-                        to_drop.add(cons)
-        table.constraints = table.constraints - to_drop
-
-        if table.c.contains_column(self):
-            if SQLA_07:
-                table._columns.remove(self)
-            else:
-                table.c.remove(self)
-
-    # TODO: this is fixed in 0.6
-    def copy_fixed(self, **kw):
-        """Create a copy of this ``Column``, with all attributes."""
-        return sqlalchemy.Column(self.name, self.type, self.default,
-            key=self.key,
-            primary_key=self.primary_key,
-            nullable=self.nullable,
-            quote=self.quote,
-            index=self.index,
-            unique=self.unique,
-            onupdate=self.onupdate,
-            autoincrement=self.autoincrement,
-            server_default=self.server_default,
-            server_onupdate=self.server_onupdate,
-            *[c.copy(**kw) for c in self.constraints])
-
-    def _check_sanity_constraints(self, name):
-        """Check if constraints names are correct"""
-        obj = getattr(self, name)
-        if (getattr(self, name[:-5]) and not obj):
-            raise InvalidConstraintError("Column.create() accepts index_name,"
-            " primary_key_name and unique_name to generate constraints")
-        if not isinstance(obj, basestring) and obj is not None:
-            raise InvalidConstraintError(
-            "%s argument for column must be constraint name" % name)
-
-
-class ChangesetIndex(object):
-    """Changeset extensions to SQLAlchemy Indexes."""
-
-    __visit_name__ = 'index'
-
-    def rename(self, name, connection=None, **kwargs):
-        """Change the name of an index.
-
-        :param name: New name of the Index.
-        :type name: string
-        :param connection: reuse connection instead of creating new one.
-        :type connection: :class:`sqlalchemy.engine.base.Connection` instance
-        """
-        engine = self.table.bind
-        self.new_name = name
-        visitorcallable = get_engine_visitor(engine, 'schemachanger')
-        engine._run_visitor(visitorcallable, self, connection, **kwargs)
-        self.name = name
-
-
-class ChangesetDefaultClause(object):
-    """Implements comparison between :class:`DefaultClause` instances"""
-
-    def __eq__(self, other):
-        if isinstance(other, self.__class__):
-            if self.arg == other.arg:
-                return True
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
--- a/kallithea/lib/dbmigrate/migrate/exceptions.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,84 +0,0 @@
-"""
-   Provide exception classes for :mod:`migrate`
-"""
-
-
-class Error(Exception):
-    """Error base class."""
-
-
-class ApiError(Error):
-    """Base class for API errors."""
-
-
-class KnownError(ApiError):
-    """A known error condition."""
-
-
-class UsageError(ApiError):
-    """A known error condition where help should be displayed."""
-
-
-class ControlledSchemaError(Error):
-    """Base class for controlled schema errors."""
-
-
-class InvalidVersionError(ControlledSchemaError):
-    """Invalid version number."""
-
-
-class DatabaseNotControlledError(ControlledSchemaError):
-    """Database should be under version control, but it's not."""
-
-
-class DatabaseAlreadyControlledError(ControlledSchemaError):
-    """Database shouldn't be under version control, but it is"""
-
-
-class WrongRepositoryError(ControlledSchemaError):
-    """This database is under version control by another repository."""
-
-
-class NoSuchTableError(ControlledSchemaError):
-    """The table does not exist."""
-
-
-class PathError(Error):
-    """Base class for path errors."""
-
-
-class PathNotFoundError(PathError):
-    """A path with no file was required; found a file."""
-
-
-class PathFoundError(PathError):
-    """A path with a file was required; found no file."""
-
-
-class RepositoryError(Error):
-    """Base class for repository errors."""
-
-
-class InvalidRepositoryError(RepositoryError):
-    """Invalid repository error."""
-
-
-class ScriptError(Error):
-    """Base class for script errors."""
-
-
-class InvalidScriptError(ScriptError):
-    """Invalid script error."""
-
-
-# migrate.changeset
-
-class NotSupportedError(Error):
-    """Not supported error"""
-
-
-class InvalidConstraintError(Error):
-    """Invalid constraint error"""
-
-class MigrateDeprecationWarning(DeprecationWarning):
-    """Warning for deprecated features in Migrate"""
--- a/kallithea/lib/dbmigrate/migrate/versioning/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-"""
-   This package provides functionality to create and manage
-   repositories of database schema changesets and to apply these
-   changesets to databases.
-"""
--- a/kallithea/lib/dbmigrate/migrate/versioning/api.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,384 +0,0 @@
-"""
-   This module provides an external API to the versioning system.
-
-   .. versionchanged:: 0.6.0
-    :func:`migrate.versioning.api.test` and schema diff functions
-    changed order of positional arguments so all accept `url` and `repository`
-    as first arguments.
-
-   .. versionchanged:: 0.5.4
-    ``--preview_sql`` displays source file when using SQL scripts.
-    If Python script is used, it runs the action with mocked engine and
-    returns captured SQL statements.
-
-   .. versionchanged:: 0.5.4
-    Deprecated ``--echo`` parameter in favour of new
-    :func:`migrate.versioning.util.construct_engine` behavior.
-"""
-
-# Dear migrate developers,
-#
-# please do not comment this module using sphinx syntax because its
-# docstrings are presented as user help and most users cannot
-# interpret sphinx annotated ReStructuredText.
-#
-# Thanks,
-# Jan Dittberner
-
-import sys
-import inspect
-import logging
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.versioning import repository, schema, version, \
-    script as script_ # command name conflict
-from kallithea.lib.dbmigrate.migrate.versioning.util import catch_known_errors, with_engine
-
-
-log = logging.getLogger(__name__)
-command_desc = {
-    'help': 'displays help on a given command',
-    'create': 'create an empty repository at the specified path',
-    'script': 'create an empty change Python script',
-    'script_sql': 'create empty change SQL scripts for given database',
-    'version': 'display the latest version available in a repository',
-    'db_version': 'show the current version of the repository under version control',
-    'source': 'display the Python code for a particular version in this repository',
-    'version_control': 'mark a database as under this repository\'s version control',
-    'upgrade': 'upgrade a database to a later version',
-    'downgrade': 'downgrade a database to an earlier version',
-    'drop_version_control': 'removes version control from a database',
-    'manage': 'creates a Python script that runs Migrate with a set of default values',
-    'test': 'performs the upgrade and downgrade command on the given database',
-    'compare_model_to_db': 'compare MetaData against the current database state',
-    'create_model': 'dump the current database as a Python model to stdout',
-    'make_update_script_for_model': 'create a script changing the old MetaData to the new (current) MetaData',
-    'update_db_from_model': 'modify the database to match the structure of the current MetaData',
-}
-__all__ = command_desc.keys()
-
-Repository = repository.Repository
-ControlledSchema = schema.ControlledSchema
-VerNum = version.VerNum
-PythonScript = script_.PythonScript
-SqlScript = script_.SqlScript
-
-
-# deprecated
-def help(cmd=None, **opts):
-    """%prog help COMMAND
-
-    Displays help on a given command.
-    """
-    if cmd is None:
-        raise exceptions.UsageError(None)
-    try:
-        func = globals()[cmd]
-    except:
-        raise exceptions.UsageError(
-            "'%s' isn't a valid command. Try 'help COMMAND'" % cmd)
-    ret = func.__doc__
-    if sys.argv[0]:
-        ret = ret.replace('%prog', sys.argv[0])
-    return ret
-
-@catch_known_errors
-def create(repository, name, **opts):
-    """%prog create REPOSITORY_PATH NAME [--table=TABLE]
-
-    Create an empty repository at the specified path.
-
-    You can specify the version_table to be used; by default, it is
-    'migrate_version'.  This table is created in all version-controlled
-    databases.
-    """
-    repo_path = Repository.create(repository, name, **opts)
-
-
-@catch_known_errors
-def script(description, repository, **opts):
-    """%prog script DESCRIPTION REPOSITORY_PATH
-
-    Create an empty change script using the next unused version number
-    appended with the given description.
-
-    For instance, manage.py script "Add initial tables" creates:
-    repository/versions/001_Add_initial_tables.py
-    """
-    repo = Repository(repository)
-    repo.create_script(description, **opts)
-
-
-@catch_known_errors
-def script_sql(database, description, repository, **opts):
-    """%prog script_sql DATABASE DESCRIPTION REPOSITORY_PATH
-
-    Create empty change SQL scripts for given DATABASE, where DATABASE
-    is either specific ('postgresql', 'mysql', 'oracle', 'sqlite', etc.)
-    or generic ('default').
-
-    For instance, manage.py script_sql postgresql description creates:
-    repository/versions/001_description_postgresql_upgrade.sql and
-    repository/versions/001_description_postgresql_downgrade.sql
-    """
-    repo = Repository(repository)
-    repo.create_script_sql(database, description, **opts)
-
-
-def version(repository, **opts):
-    """%prog version REPOSITORY_PATH
-
-    Display the latest version available in a repository.
-    """
-    repo = Repository(repository)
-    return repo.latest
-
-
-@with_engine
-def db_version(url, repository, **opts):
-    """%prog db_version URL REPOSITORY_PATH
-
-    Show the current version of the repository with the given
-    connection string, under version control of the specified
-    repository.
-
-    The url should be any valid SQLAlchemy connection string.
-    """
-    engine = opts.pop('engine')
-    schema = ControlledSchema(engine, repository)
-    return schema.version
-
-
-def source(version, dest=None, repository=None, **opts):
-    """%prog source VERSION [DESTINATION] --repository=REPOSITORY_PATH
-
-    Display the Python code for a particular version in this
-    repository.  Save it to the file at DESTINATION or, if omitted,
-    send to stdout.
-    """
-    if repository is None:
-        raise exceptions.UsageError("A repository must be specified")
-    repo = Repository(repository)
-    ret = repo.version(version).script().source()
-    if dest is not None:
-        dest = open(dest, 'w')
-        dest.write(ret)
-        dest.close()
-        ret = None
-    return ret
-
-
-def upgrade(url, repository, version=None, **opts):
-    """%prog upgrade URL REPOSITORY_PATH [VERSION] [--preview_py|--preview_sql]
-
-    Upgrade a database to a later version.
-
-    This runs the upgrade() function defined in your change scripts.
-
-    By default, the database is updated to the latest available
-    version. You may specify a version instead, if you wish.
-
-    You may preview the Python or SQL code to be executed, rather than
-    actually executing it, using the appropriate 'preview' option.
-    """
-    err = "Cannot upgrade a database of version %s to version %s. " \
-        "Try 'downgrade' instead."
-    return _migrate(url, repository, version, upgrade=True, err=err, **opts)
-
-
-def downgrade(url, repository, version, **opts):
-    """%prog downgrade URL REPOSITORY_PATH VERSION [--preview_py|--preview_sql]
-
-    Downgrade a database to an earlier version.
-
-    This is the reverse of upgrade; this runs the downgrade() function
-    defined in your change scripts.
-
-    You may preview the Python or SQL code to be executed, rather than
-    actually executing it, using the appropriate 'preview' option.
-    """
-    err = "Cannot downgrade a database of version %s to version %s. " \
-        "Try 'upgrade' instead."
-    return _migrate(url, repository, version, upgrade=False, err=err, **opts)
-
-@with_engine
-def test(url, repository, **opts):
-    """%prog test URL REPOSITORY_PATH [VERSION]
-
-    Performs the upgrade and downgrade option on the given
-    database. This is not a real test and may leave the database in a
-    bad state. You should therefore better run the test on a copy of
-    your database.
-    """
-    engine = opts.pop('engine')
-    repos = Repository(repository)
-
-    # Upgrade
-    log.info("Upgrading...")
-    script = repos.version(None).script(engine.name, 'upgrade')
-    script.run(engine, 1)
-    log.info("done")
-
-    log.info("Downgrading...")
-    script = repos.version(None).script(engine.name, 'downgrade')
-    script.run(engine, -1)
-    log.info("done")
-    log.info("Success")
-
-
-@with_engine
-def version_control(url, repository, version=None, **opts):
-    """%prog version_control URL REPOSITORY_PATH [VERSION]
-
-    Mark a database as under this repository's version control.
-
-    Once a database is under version control, schema changes should
-    only be done via change scripts in this repository.
-
-    This creates the table version_table in the database.
-
-    The url should be any valid SQLAlchemy connection string.
-
-    By default, the database begins at version 0 and is assumed to be
-    empty.  If the database is not empty, you may specify a version at
-    which to begin instead. No attempt is made to verify this
-    version's correctness - the database schema is expected to be
-    identical to what it would be if the database were created from
-    scratch.
-    """
-    engine = opts.pop('engine')
-    ControlledSchema.create(engine, repository, version)
-
-
-@with_engine
-def drop_version_control(url, repository, **opts):
-    """%prog drop_version_control URL REPOSITORY_PATH
-
-    Removes version control from a database.
-    """
-    engine = opts.pop('engine')
-    schema = ControlledSchema(engine, repository)
-    schema.drop()
-
-
-def manage(file, **opts):
-    """%prog manage FILENAME [VARIABLES...]
-
-    Creates a script that runs Migrate with a set of default values.
-
-    For example::
-
-        %prog manage manage.py --repository=/path/to/repository \
---url=sqlite:///project.db
-
-    would create the script manage.py. The following two commands
-    would then have exactly the same results::
-
-        python manage.py version
-        %prog version --repository=/path/to/repository
-    """
-    Repository.create_manage_file(file, **opts)
-
-
-@with_engine
-def compare_model_to_db(url, repository, model, **opts):
-    """%prog compare_model_to_db URL REPOSITORY_PATH MODEL
-
-    Compare the current model (assumed to be a module level variable
-    of type sqlalchemy.MetaData) against the current database.
-
-    NOTE: This is EXPERIMENTAL.
-    """  # TODO: get rid of EXPERIMENTAL label
-    engine = opts.pop('engine')
-    return ControlledSchema.compare_model_to_db(engine, model, repository)
-
-
-@with_engine
-def create_model(url, repository, **opts):
-    """%prog create_model URL REPOSITORY_PATH [DECLERATIVE=True]
-
-    Dump the current database as a Python model to stdout.
-
-    NOTE: This is EXPERIMENTAL.
-    """  # TODO: get rid of EXPERIMENTAL label
-    engine = opts.pop('engine')
-    declarative = opts.get('declarative', False)
-    return ControlledSchema.create_model(engine, repository, declarative)
-
-
-@catch_known_errors
-@with_engine
-def make_update_script_for_model(url, repository, oldmodel, model, **opts):
-    """%prog make_update_script_for_model URL OLDMODEL MODEL REPOSITORY_PATH
-
-    Create a script changing the old Python model to the new (current)
-    Python model, sending to stdout.
-
-    NOTE: This is EXPERIMENTAL.
-    """  # TODO: get rid of EXPERIMENTAL label
-    engine = opts.pop('engine')
-    return PythonScript.make_update_script_for_model(
-        engine, oldmodel, model, repository, **opts)
-
-
-@with_engine
-def update_db_from_model(url, repository, model, **opts):
-    """%prog update_db_from_model URL REPOSITORY_PATH MODEL
-
-    Modify the database to match the structure of the current Python
-    model. This also sets the db_version number to the latest in the
-    repository.
-
-    NOTE: This is EXPERIMENTAL.
-    """  # TODO: get rid of EXPERIMENTAL label
-    engine = opts.pop('engine')
-    schema = ControlledSchema(engine, repository)
-    schema.update_db_from_model(model)
-
-@with_engine
-def _migrate(url, repository, version, upgrade, err, **opts):
-    engine = opts.pop('engine')
-    url = str(engine.url)
-    schema = ControlledSchema(engine, repository)
-    version = _migrate_version(schema, version, upgrade, err)
-
-    changeset = schema.changeset(version)
-    for ver, change in changeset:
-        nextver = ver + changeset.step
-        log.info('%s -> %s... ', ver, nextver)
-
-        if opts.get('preview_sql'):
-            if isinstance(change, PythonScript):
-                log.info(change.preview_sql(url, changeset.step, **opts))
-            elif isinstance(change, SqlScript):
-                log.info(change.source())
-
-        elif opts.get('preview_py'):
-            if not isinstance(change, PythonScript):
-                raise exceptions.UsageError("Python source can be only displayed"
-                    " for python migration files")
-            source_ver = max(ver, nextver)
-            module = schema.repository.version(source_ver).script().module
-            funcname = upgrade and "upgrade" or "downgrade"
-            func = getattr(module, funcname)
-            log.info(inspect.getsource(func))
-        else:
-            schema.runchange(ver, change, changeset.step)
-            log.info('done')
-
-
-def _migrate_version(schema, version, upgrade, err):
-    if version is None:
-        return version
-    # Version is specified: ensure we're upgrading in the right direction
-    # (current version < target version for upgrading; reverse for down)
-    version = VerNum(version)
-    cur = schema.version
-    if upgrade is not None:
-        if upgrade:
-            direction = cur <= version
-        else:
-            direction = cur >= version
-        if not direction:
-            raise exceptions.KnownError(err % (cur, version))
-    return version
--- a/kallithea/lib/dbmigrate/migrate/versioning/cfgparse.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,27 +0,0 @@
-"""
-   Configuration parser module.
-"""
-
-from ConfigParser import ConfigParser
-
-from kallithea.lib.dbmigrate.migrate.versioning.config import *
-from kallithea.lib.dbmigrate.migrate.versioning import pathed
-
-
-class Parser(ConfigParser):
-    """A project configuration file."""
-
-    def to_dict(self, sections=None):
-        """It's easier to access config values like dictionaries"""
-        return self._sections
-
-
-class Config(pathed.Pathed, Parser):
-    """Configuration class."""
-
-    def __init__(self, path, *p, **k):
-        """Confirm the config file exists; read it."""
-        self.require_found(path)
-        pathed.Pathed.__init__(self, path)
-        Parser.__init__(self, *p, **k)
-        self.read(path)
--- a/kallithea/lib/dbmigrate/migrate/versioning/config.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,14 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-from sqlalchemy.util import OrderedDict
-
-
-__all__ = ['databases', 'operations']
-
-databases = ('sqlite', 'postgres', 'mysql', 'oracle', 'mssql', 'firebird')
-
-# Map operation names to function names
-operations = OrderedDict()
-operations['upgrade'] = 'upgrade'
-operations['downgrade'] = 'downgrade'
--- a/kallithea/lib/dbmigrate/migrate/versioning/genmodel.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,284 +0,0 @@
-"""
-Code to generate a Python model from a database or differences
-between a model and database.
-
-Some of this is borrowed heavily from the AutoCode project at:
-http://code.google.com/p/sqlautocode/
-"""
-
-import sys
-import logging
-
-import sqlalchemy
-
-from kallithea.lib.dbmigrate import migrate
-from kallithea.lib.dbmigrate.migrate import changeset
-
-
-log = logging.getLogger(__name__)
-HEADER = """
-## File autogenerated by genmodel.py
-
-from sqlalchemy import *
-meta = MetaData()
-"""
-
-DECLARATIVE_HEADER = """
-## File autogenerated by genmodel.py
-
-from sqlalchemy import *
-from sqlalchemy.ext import declarative
-
-Base = declarative.declarative_base()
-"""
-
-
-class ModelGenerator(object):
-    """Various transformations from an A, B diff.
-
-    In the implementation, A tends to be called the model and B
-    the database (although this is not true of all diffs).
-    The diff is directionless, but transformations apply the diff
-    in a particular direction, described in the method name.
-    """
-
-    def __init__(self, diff, engine, declarative=False):
-        self.diff = diff
-        self.engine = engine
-        self.declarative = declarative
-
-    def column_repr(self, col):
-        kwarg = []
-        if col.key != col.name:
-            kwarg.append('key')
-        if col.primary_key:
-            col.primary_key = True  # otherwise it dumps it as 1
-            kwarg.append('primary_key')
-        if not col.nullable:
-            kwarg.append('nullable')
-        if col.onupdate:
-            kwarg.append('onupdate')
-        if col.default:
-            if col.primary_key:
-                # I found that PostgreSQL automatically creates a
-                # default value for the sequence, but let's not show
-                # that.
-                pass
-            else:
-                kwarg.append('default')
-        args = ['%s=%r' % (k, getattr(col, k)) for k in kwarg]
-
-        # crs: not sure if this is good idea, but it gets rid of extra
-        # u''
-        name = col.name.encode('utf8')
-
-        type_ = col.type
-        for cls in col.type.__class__.__mro__:
-            if cls.__module__ == 'sqlalchemy.types' and \
-                not cls.__name__.isupper():
-                if cls is not type_.__class__:
-                    type_ = cls()
-                break
-
-        type_repr = repr(type_)
-        if type_repr.endswith('()'):
-            type_repr = type_repr[:-2]
-
-        constraints = [repr(cn) for cn in col.constraints]
-
-        data = {
-            'name': name,
-            'commonStuff': ', '.join([type_repr] + constraints + args),
-        }
-
-        if self.declarative:
-            return """%(name)s = Column(%(commonStuff)s)""" % data
-        else:
-            return """Column(%(name)r, %(commonStuff)s)""" % data
-
-    def _getTableDefn(self, table, metaName='meta'):
-        out = []
-        tableName = table.name
-        if self.declarative:
-            out.append("class %(table)s(Base):" % {'table': tableName})
-            out.append("    __tablename__ = '%(table)s'\n" %
-                            {'table': tableName})
-            for col in table.columns:
-                out.append("    %s" % self.column_repr(col))
-            out.append('\n')
-        else:
-            out.append("%(table)s = Table('%(table)s', %(meta)s," %
-                       {'table': tableName, 'meta': metaName})
-            for col in table.columns:
-                out.append("    %s," % self.column_repr(col))
-            out.append(")\n")
-        return out
-
-    def _get_tables(self,missingA=False,missingB=False,modified=False):
-        to_process = []
-        for bool_,names,metadata in (
-            (missingA,self.diff.tables_missing_from_A,self.diff.metadataB),
-            (missingB,self.diff.tables_missing_from_B,self.diff.metadataA),
-            (modified,self.diff.tables_different,self.diff.metadataA),
-                ):
-            if bool_:
-                for name in names:
-                    yield metadata.tables.get(name)
-
-    def genBDefinition(self):
-        """Generates the source code for a definition of B.
-
-        Assumes a diff where A is empty.
-
-        Was: toPython. Assume database (B) is current and model (A) is empty.
-        """
-
-        out = []
-        if self.declarative:
-            out.append(DECLARATIVE_HEADER)
-        else:
-            out.append(HEADER)
-        out.append("")
-        for table in self._get_tables(missingA=True):
-            out.extend(self._getTableDefn(table))
-        return '\n'.join(out)
-
-    def genB2AMigration(self, indent='    '):
-        """Generate a migration from B to A.
-
-        Was: toUpgradeDowngradePython
-        Assume model (A) is most current and database (B) is out-of-date.
-        """
-
-        decls = ['from migrate.changeset import schema',
-                 'pre_meta = MetaData()',
-                 'post_meta = MetaData()',
-                ]
-        upgradeCommands = ['pre_meta.bind = migrate_engine',
-                           'post_meta.bind = migrate_engine']
-        downgradeCommands = list(upgradeCommands)
-
-        for tn in self.diff.tables_missing_from_A:
-            pre_table = self.diff.metadataB.tables[tn]
-            decls.extend(self._getTableDefn(pre_table, metaName='pre_meta'))
-            upgradeCommands.append(
-                "pre_meta.tables[%(table)r].drop()" % {'table': tn})
-            downgradeCommands.append(
-                "pre_meta.tables[%(table)r].create()" % {'table': tn})
-
-        for tn in self.diff.tables_missing_from_B:
-            post_table = self.diff.metadataA.tables[tn]
-            decls.extend(self._getTableDefn(post_table, metaName='post_meta'))
-            upgradeCommands.append(
-                "post_meta.tables[%(table)r].create()" % {'table': tn})
-            downgradeCommands.append(
-                "post_meta.tables[%(table)r].drop()" % {'table': tn})
-
-        for (tn, td) in self.diff.tables_different.iteritems():
-            if td.columns_missing_from_A or td.columns_different:
-                pre_table = self.diff.metadataB.tables[tn]
-                decls.extend(self._getTableDefn(
-                    pre_table, metaName='pre_meta'))
-            if td.columns_missing_from_B or td.columns_different:
-                post_table = self.diff.metadataA.tables[tn]
-                decls.extend(self._getTableDefn(
-                    post_table, metaName='post_meta'))
-
-            for col in td.columns_missing_from_A:
-                upgradeCommands.append(
-                    'pre_meta.tables[%r].columns[%r].drop()' % (tn, col))
-                downgradeCommands.append(
-                    'pre_meta.tables[%r].columns[%r].create()' % (tn, col))
-            for col in td.columns_missing_from_B:
-                upgradeCommands.append(
-                    'post_meta.tables[%r].columns[%r].create()' % (tn, col))
-                downgradeCommands.append(
-                    'post_meta.tables[%r].columns[%r].drop()' % (tn, col))
-            for modelCol, databaseCol, modelDecl, databaseDecl in td.columns_different:
-                upgradeCommands.append(
-                    'assert False, "Can\'t alter columns: %s:%s=>%s"' % (
-                    tn, modelCol.name, databaseCol.name))
-                downgradeCommands.append(
-                    'assert False, "Can\'t alter columns: %s:%s=>%s"' % (
-                    tn, modelCol.name, databaseCol.name))
-
-        return (
-            '\n'.join(decls),
-            '\n'.join('%s%s' % (indent, line) for line in upgradeCommands),
-            '\n'.join('%s%s' % (indent, line) for line in downgradeCommands))
-
-    def _db_can_handle_this_change(self,td):
-        """Check if the database can handle going from B to A."""
-
-        if (td.columns_missing_from_B
-            and not td.columns_missing_from_A
-            and not td.columns_different):
-            # Even sqlite can handle column additions.
-            return True
-        else:
-            return not self.engine.url.drivername.startswith('sqlite')
-
-    def runB2A(self):
-        """Goes from B to A.
-
-        Was: applyModel. Apply model (A) to current database (B).
-        """
-
-        meta = sqlalchemy.MetaData(self.engine)
-
-        for table in self._get_tables(missingA=True):
-            table = table.tometadata(meta)
-            table.drop()
-        for table in self._get_tables(missingB=True):
-            table = table.tometadata(meta)
-            table.create()
-        for modelTable in self._get_tables(modified=True):
-            tableName = modelTable.name
-            modelTable = modelTable.tometadata(meta)
-            dbTable = self.diff.metadataB.tables[tableName]
-
-            td = self.diff.tables_different[tableName]
-
-            if self._db_can_handle_this_change(td):
-
-                for col in td.columns_missing_from_B:
-                    modelTable.columns[col].create()
-                for col in td.columns_missing_from_A:
-                    dbTable.columns[col].drop()
-                # XXX handle column changes here.
-            else:
-                # Sqlite doesn't support drop column, so you have to
-                # do more: create temp table, copy data to it, drop
-                # old table, create new table, copy data back.
-                #
-                # I wonder if this is guaranteed to be unique?
-                tempName = '_temp_%s' % modelTable.name
-
-                def getCopyStatement():
-                    preparer = self.engine.dialect.preparer
-                    commonCols = []
-                    for modelCol in modelTable.columns:
-                        if modelCol.name in dbTable.columns:
-                            commonCols.append(modelCol.name)
-                    commonColsStr = ', '.join(commonCols)
-                    return 'INSERT INTO %s (%s) SELECT %s FROM %s' % \
-                        (tableName, commonColsStr, commonColsStr, tempName)
-
-                # Move the data in one transaction, so that we don't
-                # leave the database in a nasty state.
-                connection = self.engine.connect()
-                trans = connection.begin()
-                try:
-                    connection.execute(
-                        'CREATE TEMPORARY TABLE %s as SELECT * from %s' % \
-                            (tempName, modelTable.name))
-                    # make sure the drop takes place inside our
-                    # transaction with the bind parameter
-                    modelTable.drop(bind=connection)
-                    modelTable.create(bind=connection)
-                    connection.execute(getCopyStatement())
-                    connection.execute('DROP TABLE %s' % tempName)
-                    trans.commit()
-                except:
-                    trans.rollback()
-                    raise
--- a/kallithea/lib/dbmigrate/migrate/versioning/migrate_repository.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,100 +0,0 @@
-"""
-   Script to migrate repository from sqlalchemy <= 0.4.4 to the new
-   repository schema. This shouldn't use any other migrate modules, so
-   that it can work in any version.
-"""
-
-import os
-import sys
-import logging
-
-log = logging.getLogger(__name__)
-
-
-def usage():
-    """Gives usage information."""
-    print """Usage: %(prog)s repository-to-migrate
-
-    Upgrade your repository to the new flat format.
-
-    NOTE: You should probably make a backup before running this.
-    """ % {'prog': sys.argv[0]}
-
-    sys.exit(1)
-
-
-def delete_file(filepath):
-    """Deletes a file and prints a message."""
-    log.info('Deleting file: %s', filepath)
-    os.remove(filepath)
-
-
-def move_file(src, tgt):
-    """Moves a file and prints a message."""
-    log.info('Moving file %s to %s', src, tgt)
-    if os.path.exists(tgt):
-        raise Exception(
-            'Cannot move file %s because target %s already exists' % \
-                (src, tgt))
-    os.rename(src, tgt)
-
-
-def delete_directory(dirpath):
-    """Delete a directory and print a message."""
-    log.info('Deleting directory: %s', dirpath)
-    os.rmdir(dirpath)
-
-
-def migrate_repository(repos):
-    """Does the actual migration to the new repository format."""
-    log.info('Migrating repository at: %s to new format', repos)
-    versions = '%s/versions' % repos
-    dirs = os.listdir(versions)
-    # Only use int's in list.
-    numdirs = [int(dirname) for dirname in dirs if dirname.isdigit()]
-    numdirs.sort()  # Sort list.
-    for dirname in numdirs:
-        origdir = '%s/%s' % (versions, dirname)
-        log.info('Working on directory: %s', origdir)
-        files = os.listdir(origdir)
-        files.sort()
-        for filename in files:
-            # Delete compiled Python files.
-            if filename.endswith('.pyc') or filename.endswith('.pyo'):
-                delete_file('%s/%s' % (origdir, filename))
-
-            # Delete empty __init__.py files.
-            origfile = '%s/__init__.py' % origdir
-            if os.path.exists(origfile) and len(open(origfile).read()) == 0:
-                delete_file(origfile)
-
-            # Move sql upgrade scripts.
-            if filename.endswith('.sql'):
-                version, dbms, operation = filename.split('.', 3)[0:3]
-                origfile = '%s/%s' % (origdir, filename)
-                # For instance:  2.postgres.upgrade.sql ->
-                #  002_postgres_upgrade.sql
-                tgtfile = '%s/%03d_%s_%s.sql' % (
-                    versions, int(version), dbms, operation)
-                move_file(origfile, tgtfile)
-
-        # Move Python upgrade script.
-        pyfile = '%s.py' % dirname
-        pyfilepath = '%s/%s' % (origdir, pyfile)
-        if os.path.exists(pyfilepath):
-            tgtfile = '%s/%03d.py' % (versions, int(dirname))
-            move_file(pyfilepath, tgtfile)
-
-        # Try to remove directory. Will fail if it's not empty.
-        delete_directory(origdir)
-
-
-def main():
-    """Main function to be called when using this script."""
-    if len(sys.argv) != 2:
-        usage()
-    migrate_repository(sys.argv[1])
-
-
-if __name__ == '__main__':
-    main()
--- a/kallithea/lib/dbmigrate/migrate/versioning/pathed.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,75 +0,0 @@
-"""
-   A path/directory class.
-"""
-
-import os
-import shutil
-import logging
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.versioning.config import *
-from kallithea.lib.dbmigrate.migrate.versioning.util import KeyedInstance
-
-
-log = logging.getLogger(__name__)
-
-class Pathed(KeyedInstance):
-    """
-    A class associated with a path/directory tree.
-
-    Only one instance of this class may exist for a particular file;
-    __new__ will return an existing instance if possible
-    """
-    parent = None
-
-    @classmethod
-    def _key(cls, path):
-        return str(path)
-
-    def __init__(self, path):
-        self.path = path
-        if self.__class__.parent is not None:
-            self._init_parent(path)
-
-    def _init_parent(self, path):
-        """Try to initialize this object's parent, if it has one"""
-        parent_path = self.__class__._parent_path(path)
-        self.parent = self.__class__.parent(parent_path)
-        log.debug("Getting parent %r:%r", self.__class__.parent, parent_path)
-        self.parent._init_child(path, self)
-
-    def _init_child(self, child, path):
-        """Run when a child of this object is initialized.
-
-        Parameters: the child object; the path to this object (its
-        parent)
-        """
-
-    @classmethod
-    def _parent_path(cls, path):
-        """
-        Fetch the path of this object's parent from this object's path.
-        """
-        # os.path.dirname(), but strip directories like files (like
-        # unix basename)
-        #
-        # Treat directories like files...
-        if path[-1] == '/':
-            path = path[:-1]
-        ret = os.path.dirname(path)
-        return ret
-
-    @classmethod
-    def require_notfound(cls, path):
-        """Ensures a given path does not already exist"""
-        if os.path.exists(path):
-            raise exceptions.PathFoundError(path)
-
-    @classmethod
-    def require_found(cls, path):
-        """Ensures a given path already exists"""
-        if not os.path.exists(path):
-            raise exceptions.PathNotFoundError(path)
-
-    def __str__(self):
-        return self.path
--- a/kallithea/lib/dbmigrate/migrate/versioning/repository.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,247 +0,0 @@
-"""
-   SQLAlchemy migrate repository management.
-"""
-import os
-import shutil
-import string
-import logging
-
-from pkg_resources import resource_filename
-from tempita import Template as TempitaTemplate
-
-import kallithea
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.versioning import version, pathed, cfgparse
-from kallithea.lib.dbmigrate.migrate.versioning.template import Template
-from kallithea.lib.dbmigrate.migrate.versioning.config import *
-
-
-log = logging.getLogger(__name__)
-
-class Changeset(dict):
-    """A collection of changes to be applied to a database.
-
-    Changesets are bound to a repository and manage a set of
-    scripts from that repository.
-
-    Behaves like a dict, for the most part. Keys are ordered based on step value.
-    """
-
-    def __init__(self, start, *changes, **k):
-        """
-        Give a start version; step must be explicitly stated.
-        """
-        self.step = k.pop('step', 1)
-        self.start = version.VerNum(start)
-        self.end = self.start
-        for change in changes:
-            self.add(change)
-
-    def __iter__(self):
-        return iter(self.items())
-
-    def keys(self):
-        """
-        In a series of upgrades x -> y, keys are version x. Sorted.
-        """
-        ret = super(Changeset, self).keys()
-        # Reverse order if downgrading
-        ret.sort(reverse=(self.step < 1))
-        return ret
-
-    def values(self):
-        return [self[k] for k in self.keys()]
-
-    def items(self):
-        return zip(self.keys(), self.values())
-
-    def add(self, change):
-        """Add new change to changeset"""
-        key = self.end
-        self.end += self.step
-        self[key] = change
-
-    def run(self, *p, **k):
-        """Run the changeset scripts"""
-        for _version, script in self:
-            script.run(*p, **k)
-
-
-class Repository(pathed.Pathed):
-    """A project's change script repository"""
-
-    _config = 'migrate.cfg'
-    _versions = 'versions'
-
-    def __init__(self, path):
-        log.debug('Loading repository %s...', path)
-        self.verify(path)
-        super(Repository, self).__init__(path)
-        self.config = cfgparse.Config(os.path.join(self.path, self._config))
-        self.versions = version.Collection(os.path.join(self.path,
-                                                      self._versions))
-        log.debug('Repository %s loaded successfully', path)
-        log.debug('Config: %r', self.config.to_dict())
-
-    @classmethod
-    def verify(cls, path):
-        """
-        Ensure the target path is a valid repository.
-
-        :raises: :exc:`InvalidRepositoryError <migrate.exceptions.InvalidRepositoryError>`
-        """
-        # Ensure the existence of required files
-        try:
-            cls.require_found(path)
-            cls.require_found(os.path.join(path, cls._config))
-            cls.require_found(os.path.join(path, cls._versions))
-        except exceptions.PathNotFoundError as e:
-            raise exceptions.InvalidRepositoryError(path)
-
-    @classmethod
-    def prepare_config(cls, tmpl_dir, name, options=None):
-        """
-        Prepare a project configuration file for a new project.
-
-        :param tmpl_dir: Path to Repository template
-        :param config_file: Name of the config file in Repository template
-        :param name: Repository name
-        :type tmpl_dir: string
-        :type config_file: string
-        :type name: string
-        :returns: Populated config file
-        """
-        if options is None:
-            options = {}
-        options.setdefault('version_table', 'migrate_version')
-        options.setdefault('repository_id', name)
-        options.setdefault('required_dbs', [])
-        options.setdefault('use_timestamp_numbering', False)
-
-        tmpl = open(os.path.join(tmpl_dir, cls._config)).read()
-        ret = TempitaTemplate(tmpl).substitute(options)
-
-        # cleanup
-        del options['__template_name__']
-
-        return ret
-
-    @classmethod
-    def create(cls, path, name, **opts):
-        """Create a repository at a specified path"""
-        cls.require_notfound(path)
-        theme = opts.pop('templates_theme', None)
-        t_path = opts.pop('templates_path', None)
-
-        # Create repository
-        tmpl_dir = Template(t_path).get_repository(theme=theme)
-        shutil.copytree(tmpl_dir, path)
-
-        # Edit config defaults
-        config_text = cls.prepare_config(tmpl_dir, name, options=opts)
-        fd = open(os.path.join(path, cls._config), 'w')
-        fd.write(config_text)
-        fd.close()
-
-        opts['repository_name'] = name
-
-        # Create a management script
-        manager = os.path.join(path, 'manage.py')
-        Repository.create_manage_file(manager, templates_theme=theme,
-            templates_path=t_path, **opts)
-
-        return cls(path)
-
-    def create_script(self, description, **k):
-        """API to :meth:`migrate.versioning.version.Collection.create_new_python_version`"""
-
-        k['use_timestamp_numbering'] = self.use_timestamp_numbering
-        self.versions.create_new_python_version(description, **k)
-
-    def create_script_sql(self, database, description, **k):
-        """API to :meth:`migrate.versioning.version.Collection.create_new_sql_version`"""
-        k['use_timestamp_numbering'] = self.use_timestamp_numbering
-        self.versions.create_new_sql_version(database, description, **k)
-
-    @property
-    def latest(self):
-        """API to :attr:`migrate.versioning.version.Collection.latest`"""
-        return self.versions.latest
-
-    @property
-    def version_table(self):
-        """Returns version_table name specified in config"""
-        return self.config.get('db_settings', 'version_table')
-
-    @property
-    def id(self):
-        """Returns repository id specified in config"""
-        # Adjust the value read from kallithea/lib/dbmigrate/migrate.cfg, normally "kallithea_db_migrations"
-        s = self.config.get('db_settings', 'repository_id')
-        if s == "kallithea_db_migrations":
-            s = kallithea.DB_MIGRATIONS
-        return s
-
-    @property
-    def use_timestamp_numbering(self):
-        """Returns use_timestamp_numbering specified in config"""
-        if self.config.has_option('db_settings', 'use_timestamp_numbering'):
-            return self.config.getboolean('db_settings', 'use_timestamp_numbering')
-        return False
-
-    def version(self, *p, **k):
-        """API to :attr:`migrate.versioning.version.Collection.version`"""
-        return self.versions.version(*p, **k)
-
-    @classmethod
-    def clear(cls):
-        # TODO: deletes repo
-        super(Repository, cls).clear()
-        version.Collection.clear()
-
-    def changeset(self, database, start, end=None):
-        """Create a changeset to migrate this database from ver. start to end/latest.
-
-        :param database: name of database to generate changeset
-        :param start: version to start at
-        :param end: version to end at (latest if None given)
-        :type database: string
-        :type start: int
-        :type end: int
-        :returns: :class:`Changeset instance <migration.versioning.repository.Changeset>`
-        """
-        start = version.VerNum(start)
-
-        if end is None:
-            end = self.latest
-        else:
-            end = version.VerNum(end)
-
-        if start <= end:
-            step = 1
-            range_mod = 1
-            op = 'upgrade'
-        else:
-            step = -1
-            range_mod = 0
-            op = 'downgrade'
-
-        versions = range(start + range_mod, end + range_mod, step)
-        changes = [self.version(v).script(database, op) for v in versions]
-        ret = Changeset(start, step=step, *changes)
-        return ret
-
-    @classmethod
-    def create_manage_file(cls, file_, **opts):
-        """Create a project management script (manage.py)
-
-        :param file_: Destination file to be written
-        :param opts: Options that are passed to :func:`migrate.versioning.shell.main`
-        """
-        mng_file = Template(opts.pop('templates_path', None)) \
-            .get_manage(theme=opts.pop('templates_theme', None))
-
-        tmpl = open(mng_file).read()
-        fd = open(file_, 'w')
-        fd.write(TempitaTemplate(tmpl).substitute(opts))
-        fd.close()
--- a/kallithea/lib/dbmigrate/migrate/versioning/schema.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,221 +0,0 @@
-"""
-   Database schema version management.
-"""
-import sys
-import logging
-
-from sqlalchemy import (Table, Column, MetaData, String, Text, Integer,
-    create_engine)
-from sqlalchemy.sql import and_
-from sqlalchemy import exc as sa_exceptions
-from sqlalchemy.sql import bindparam
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.changeset import SQLA_07
-from kallithea.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
-from kallithea.lib.dbmigrate.migrate.versioning.repository import Repository
-from kallithea.lib.dbmigrate.migrate.versioning.util import load_model
-from kallithea.lib.dbmigrate.migrate.versioning.version import VerNum
-
-
-log = logging.getLogger(__name__)
-
-
-class ControlledSchema(object):
-    """A database under version control"""
-
-    def __init__(self, engine, repository):
-        if isinstance(repository, basestring):
-            repository = Repository(repository)
-        self.engine = engine
-        self.repository = repository
-        self.meta = MetaData(engine)
-        self.load()
-
-    def __eq__(self, other):
-        """Compare two schemas by repositories and versions"""
-        return (self.repository is other.repository \
-            and self.version == other.version)
-
-    def load(self):
-        """Load controlled schema version info from DB"""
-        tname = self.repository.version_table
-        try:
-            if not hasattr(self, 'table') or self.table is None:
-                    self.table = Table(tname, self.meta, autoload=True)
-
-            result = self.engine.execute(self.table.select(
-                self.table.c.repository_id == str(self.repository.id)))
-
-            data = list(result)[0]
-        except:
-            cls, exc, tb = sys.exc_info()
-            raise exceptions.DatabaseNotControlledError, exc.__str__(), tb
-
-        self.version = data['version']
-        return data
-
-    def drop(self):
-        """
-        Remove version control from a database.
-        """
-        if SQLA_07:
-            try:
-                self.table.drop()
-            except sa_exceptions.DatabaseError:
-                raise exceptions.DatabaseNotControlledError(str(self.table))
-        else:
-            try:
-                self.table.drop()
-            except sa_exceptions.SQLError:
-                raise exceptions.DatabaseNotControlledError(str(self.table))
-
-    def changeset(self, version=None):
-        """API to Changeset creation.
-
-        Uses self.version for start version and engine.name
-        to get database name.
-        """
-        database = self.engine.name
-        start_ver = self.version
-        changeset = self.repository.changeset(database, start_ver, version)
-        return changeset
-
-    def runchange(self, ver, change, step):
-        startver = ver
-        endver = ver + step
-        # Current database version must be correct! Don't run if corrupt!
-        if self.version != startver:
-            raise exceptions.InvalidVersionError("%s is not %s" % \
-                                                     (self.version, startver))
-        # Run the change
-        change.run(self.engine, step)
-
-        # Update/refresh database version
-        self.update_repository_table(startver, endver)
-        self.load()
-
-    def update_repository_table(self, startver, endver):
-        """Update version_table with new information"""
-        update = self.table.update(and_(self.table.c.version == int(startver),
-             self.table.c.repository_id == str(self.repository.id)))
-        self.engine.execute(update, version=int(endver))
-
-    def upgrade(self, version=None):
-        """
-        Upgrade (or downgrade) to a specified version, or latest version.
-        """
-        changeset = self.changeset(version)
-        for ver, change in changeset:
-            self.runchange(ver, change, changeset.step)
-
-    def update_db_from_model(self, model):
-        """
-        Modify the database to match the structure of the current Python model.
-        """
-        model = load_model(model)
-
-        diff = schemadiff.getDiffOfModelAgainstDatabase(
-            model, self.engine, excludeTables=[self.repository.version_table]
-            )
-        genmodel.ModelGenerator(diff,self.engine).runB2A()
-
-        self.update_repository_table(self.version, int(self.repository.latest))
-
-        self.load()
-
-    @classmethod
-    def create(cls, engine, repository, version=None):
-        """
-        Declare a database to be under a repository's version control.
-
-        :raises: :exc:`DatabaseAlreadyControlledError`
-        :returns: :class:`ControlledSchema`
-        """
-        # Confirm that the version # is valid: positive, integer,
-        # exists in repos
-        if isinstance(repository, basestring):
-            repository = Repository(repository)
-        version = cls._validate_version(repository, version)
-        table = cls._create_table_version(engine, repository, version)
-        # TODO: history table
-        # Load repository information and return
-        return cls(engine, repository)
-
-    @classmethod
-    def _validate_version(cls, repository, version):
-        """
-        Ensures this is a valid version number for this repository.
-
-        :raises: :exc:`InvalidVersionError` if invalid
-        :return: valid version number
-        """
-        if version is None:
-            version = 0
-        try:
-            version = VerNum(version) # raises valueerror
-            if version < 0 or version > repository.latest:
-                raise ValueError()
-        except ValueError:
-            raise exceptions.InvalidVersionError(version)
-        return version
-
-    @classmethod
-    def _create_table_version(cls, engine, repository, version):
-        """
-        Creates the versioning table in a database.
-
-        :raises: :exc:`DatabaseAlreadyControlledError`
-        """
-        # Create tables
-        tname = repository.version_table
-        meta = MetaData(engine)
-
-        table = Table(
-            tname, meta,
-            Column('repository_id', String(250), primary_key=True),
-            Column('repository_path', Text),
-            Column('version', Integer), )
-
-        # there can be multiple repositories/schemas in the same db
-        if not table.exists():
-            table.create()
-
-        # test for existing repository_id
-        s = table.select(table.c.repository_id == bindparam("repository_id"))
-        result = engine.execute(s, repository_id=repository.id)
-        if result.fetchone():
-            raise exceptions.DatabaseAlreadyControlledError
-
-        # Insert data
-        engine.execute(table.insert().values(
-                           repository_id=repository.id,
-                           repository_path=repository.path,
-                           version=int(version)))
-        return table
-
-    @classmethod
-    def compare_model_to_db(cls, engine, model, repository):
-        """
-        Compare the current model against the current database.
-        """
-        if isinstance(repository, basestring):
-            repository = Repository(repository)
-        model = load_model(model)
-
-        diff = schemadiff.getDiffOfModelAgainstDatabase(
-            model, engine, excludeTables=[repository.version_table])
-        return diff
-
-    @classmethod
-    def create_model(cls, engine, repository, declarative=False):
-        """
-        Dump the current database as a Python model.
-        """
-        if isinstance(repository, basestring):
-            repository = Repository(repository)
-
-        diff = schemadiff.getDiffOfModelAgainstDatabase(
-            MetaData(), engine, excludeTables=[repository.version_table]
-            )
-        return genmodel.ModelGenerator(diff, engine, declarative).genBDefinition()
--- a/kallithea/lib/dbmigrate/migrate/versioning/schemadiff.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,295 +0,0 @@
-"""
-   Schema differencing support.
-"""
-
-import logging
-import sqlalchemy
-
-from kallithea.lib.dbmigrate.migrate.changeset import SQLA_06
-from sqlalchemy.types import Float
-
-log = logging.getLogger(__name__)
-
-
-def getDiffOfModelAgainstDatabase(metadata, engine, excludeTables=None):
-    """
-    Return differences of model against database.
-
-    :return: object which will evaluate to :keyword:`True` if there \
-      are differences else :keyword:`False`.
-    """
-    db_metadata = sqlalchemy.MetaData(engine)
-    db_metadata.reflect()
-
-    # sqlite will include a dynamically generated 'sqlite_sequence' table if
-    # there are autoincrement sequences in the database; this should not be
-    # compared.
-    if engine.dialect.name == 'sqlite':
-        if 'sqlite_sequence' in db_metadata.tables:
-            db_metadata.remove(db_metadata.tables['sqlite_sequence'])
-
-    return SchemaDiff(metadata, db_metadata,
-                      labelA='model',
-                      labelB='database',
-                      excludeTables=excludeTables)
-
-
-def getDiffOfModelAgainstModel(metadataA, metadataB, excludeTables=None):
-    """
-    Return differences of model against another model.
-
-    :return: object which will evaluate to :keyword:`True` if there \
-      are differences else :keyword:`False`.
-    """
-    return SchemaDiff(metadataA, metadataB, excludeTables=excludeTables)
-
-
-class ColDiff(object):
-    """
-    Container for differences in one :class:`~sqlalchemy.schema.Column`
-    between two :class:`~sqlalchemy.schema.Table` instances, ``A``
-    and ``B``.
-
-    .. attribute:: col_A
-
-      The :class:`~sqlalchemy.schema.Column` object for A.
-
-    .. attribute:: col_B
-
-      The :class:`~sqlalchemy.schema.Column` object for B.
-
-    .. attribute:: type_A
-
-      The most generic type of the :class:`~sqlalchemy.schema.Column`
-      object in A.
-
-    .. attribute:: type_B
-
-      The most generic type of the :class:`~sqlalchemy.schema.Column`
-      object in A.
-
-    """
-
-    diff = False
-
-    def __init__(self,col_A,col_B):
-        self.col_A = col_A
-        self.col_B = col_B
-
-        self.type_A = col_A.type
-        self.type_B = col_B.type
-
-        self.affinity_A = self.type_A._type_affinity
-        self.affinity_B = self.type_B._type_affinity
-
-        if self.affinity_A is not self.affinity_B:
-            self.diff = True
-            return
-
-        if isinstance(self.type_A,Float) or isinstance(self.type_B,Float):
-            if not (isinstance(self.type_A,Float) and isinstance(self.type_B,Float)):
-                self.diff=True
-                return
-
-        for attr in ('precision','scale','length'):
-            A = getattr(self.type_A,attr,None)
-            B = getattr(self.type_B,attr,None)
-            if not (A is None or B is None) and A!=B:
-                self.diff=True
-                return
-
-    def __nonzero__(self):
-        return self.diff
-
-class TableDiff(object):
-    """
-    Container for differences in one :class:`~sqlalchemy.schema.Table`
-    between two :class:`~sqlalchemy.schema.MetaData` instances, ``A``
-    and ``B``.
-
-    .. attribute:: columns_missing_from_A
-
-      A sequence of column names that were found in B but weren't in
-      A.
-
-    .. attribute:: columns_missing_from_B
-
-      A sequence of column names that were found in A but weren't in
-      B.
-
-    .. attribute:: columns_different
-
-      A dictionary containing information about columns that were
-      found to be different.
-      It maps column names to a :class:`ColDiff` objects describing the
-      differences found.
-    """
-    __slots__ = (
-        'columns_missing_from_A',
-        'columns_missing_from_B',
-        'columns_different',
-        )
-
-    def __nonzero__(self):
-        return bool(
-            self.columns_missing_from_A or
-            self.columns_missing_from_B or
-            self.columns_different
-            )
-
-class SchemaDiff(object):
-    """
-    Compute the difference between two :class:`~sqlalchemy.schema.MetaData`
-    objects.
-
-    The string representation of a :class:`SchemaDiff` will summarise
-    the changes found between the two
-    :class:`~sqlalchemy.schema.MetaData` objects.
-
-    The length of a :class:`SchemaDiff` will give the number of
-    changes found, enabling it to be used much like a boolean in
-    expressions.
-
-    :param metadataA:
-      First :class:`~sqlalchemy.schema.MetaData` to compare.
-
-    :param metadataB:
-      Second :class:`~sqlalchemy.schema.MetaData` to compare.
-
-    :param labelA:
-      The label to use in messages about the first
-      :class:`~sqlalchemy.schema.MetaData`.
-
-    :param labelB:
-      The label to use in messages about the second
-      :class:`~sqlalchemy.schema.MetaData`.
-
-    :param excludeTables:
-      A sequence of table names to exclude.
-
-    .. attribute:: tables_missing_from_A
-
-      A sequence of table names that were found in B but weren't in
-      A.
-
-    .. attribute:: tables_missing_from_B
-
-      A sequence of table names that were found in A but weren't in
-      B.
-
-    .. attribute:: tables_different
-
-      A dictionary containing information about tables that were found
-      to be different.
-      It maps table names to a :class:`TableDiff` objects describing the
-      differences found.
-    """
-
-    def __init__(self,
-                 metadataA, metadataB,
-                 labelA='metadataA',
-                 labelB='metadataB',
-                 excludeTables=None):
-
-        self.metadataA, self.metadataB = metadataA, metadataB
-        self.labelA, self.labelB = labelA, labelB
-        self.label_width = max(len(labelA),len(labelB))
-        excludeTables = set(excludeTables or [])
-
-        A_table_names = set(metadataA.tables.keys())
-        B_table_names = set(metadataB.tables.keys())
-
-        self.tables_missing_from_A = sorted(
-            B_table_names - A_table_names - excludeTables
-            )
-        self.tables_missing_from_B = sorted(
-            A_table_names - B_table_names - excludeTables
-            )
-
-        self.tables_different = {}
-        for table_name in A_table_names.intersection(B_table_names):
-
-            td = TableDiff()
-
-            A_table = metadataA.tables[table_name]
-            B_table = metadataB.tables[table_name]
-
-            A_column_names = set(A_table.columns.keys())
-            B_column_names = set(B_table.columns.keys())
-
-            td.columns_missing_from_A = sorted(
-                B_column_names - A_column_names
-                )
-
-            td.columns_missing_from_B = sorted(
-                A_column_names - B_column_names
-                )
-
-            td.columns_different = {}
-
-            for col_name in A_column_names.intersection(B_column_names):
-
-                cd = ColDiff(
-                    A_table.columns.get(col_name),
-                    B_table.columns.get(col_name)
-                    )
-
-                if cd:
-                    td.columns_different[col_name]=cd
-
-            # XXX - index and constraint differences should
-            #       be checked for here
-
-            if td:
-                self.tables_different[table_name]=td
-
-    def __str__(self):
-        """ Summarize differences. """
-        out = []
-        column_template ='      %%%is: %%r' % self.label_width
-
-        for names,label in (
-            (self.tables_missing_from_A,self.labelA),
-            (self.tables_missing_from_B,self.labelB),
-            ):
-            if names:
-                out.append(
-                    '  tables missing from %s: %s' % (
-                        label,', '.join(sorted(names))
-                        )
-                    )
-
-        for name,td in sorted(self.tables_different.items()):
-            out.append(
-               '  table with differences: %s' % name
-               )
-            for names,label in (
-                (td.columns_missing_from_A,self.labelA),
-                (td.columns_missing_from_B,self.labelB),
-                ):
-                if names:
-                    out.append(
-                        '    %s missing these columns: %s' % (
-                            label,', '.join(sorted(names))
-                            )
-                        )
-            for name,cd in td.columns_different.items():
-                out.append('    column with differences: %s' % name)
-                out.append(column_template % (self.labelA,cd.col_A))
-                out.append(column_template % (self.labelB,cd.col_B))
-
-        if out:
-            out.insert(0, 'Schema diffs:')
-            return '\n'.join(out)
-        else:
-            return 'No schema diffs'
-
-    def __len__(self):
-        """
-        Used in bool evaluation, return of 0 means no diffs.
-        """
-        return (
-            len(self.tables_missing_from_A) +
-            len(self.tables_missing_from_B) +
-            len(self.tables_different)
-            )
--- a/kallithea/lib/dbmigrate/migrate/versioning/script/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-from kallithea.lib.dbmigrate.migrate.versioning.script.base import BaseScript
-from kallithea.lib.dbmigrate.migrate.versioning.script.py import PythonScript
-from kallithea.lib.dbmigrate.migrate.versioning.script.sql import SqlScript
--- a/kallithea/lib/dbmigrate/migrate/versioning/script/base.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,57 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-import logging
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.versioning.config import operations
-from kallithea.lib.dbmigrate.migrate.versioning import pathed
-
-
-log = logging.getLogger(__name__)
-
-class BaseScript(pathed.Pathed):
-    """Base class for other types of scripts.
-    All scripts have the following properties:
-
-    source (script.source())
-      The source code of the script
-    version (script.version())
-      The version number of the script
-    operations (script.operations())
-      The operations defined by the script: upgrade(), downgrade() or both.
-      Returns a tuple of operations.
-      Can also check for an operation with ex. script.operation(Script.ops.up)
-    """ # TODO: sphinxfy this and implement it correctly
-
-    def __init__(self, path):
-        log.debug('Loading script %s...', path)
-        self.verify(path)
-        super(BaseScript, self).__init__(path)
-        log.debug('Script %s loaded successfully', path)
-
-    @classmethod
-    def verify(cls, path):
-        """Ensure this is a valid script
-        This version simply ensures the script file's existence
-
-        :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>`
-        """
-        try:
-            cls.require_found(path)
-        except:
-            raise exceptions.InvalidScriptError(path)
-
-    def source(self):
-        """:returns: source code of the script.
-        :rtype: string
-        """
-        fd = open(self.path)
-        ret = fd.read()
-        fd.close()
-        return ret
-
-    def run(self, engine):
-        """Core of each BaseScript subclass.
-        This method executes the script.
-        """
-        raise NotImplementedError()
--- a/kallithea/lib/dbmigrate/migrate/versioning/script/py.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,160 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-import shutil
-import warnings
-import logging
-import inspect
-from StringIO import StringIO
-
-from kallithea.lib.dbmigrate import migrate
-from kallithea.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
-from kallithea.lib.dbmigrate.migrate.versioning.config import operations
-from kallithea.lib.dbmigrate.migrate.versioning.template import Template
-from kallithea.lib.dbmigrate.migrate.versioning.script import base
-from kallithea.lib.dbmigrate.migrate.versioning.util import import_path, load_model, with_engine
-from kallithea.lib.dbmigrate.migrate.exceptions import MigrateDeprecationWarning, InvalidScriptError, ScriptError
-
-log = logging.getLogger(__name__)
-__all__ = ['PythonScript']
-
-
-class PythonScript(base.BaseScript):
-    """Base for Python scripts"""
-
-    @classmethod
-    def create(cls, path, **opts):
-        """Create an empty migration script at specified path
-
-        :returns: :class:`PythonScript instance <migrate.versioning.script.py.PythonScript>`"""
-        cls.require_notfound(path)
-
-        src = Template(opts.pop('templates_path', None)).get_script(theme=opts.pop('templates_theme', None))
-        shutil.copy(src, path)
-
-        return cls(path)
-
-    @classmethod
-    def make_update_script_for_model(cls, engine, oldmodel,
-                                     model, repository, **opts):
-        """Create a migration script based on difference between two SA models.
-
-        :param repository: path to migrate repository
-        :param oldmodel: dotted.module.name:SAClass or SAClass object
-        :param model: dotted.module.name:SAClass or SAClass object
-        :param engine: SQLAlchemy engine
-        :type repository: string or :class:`Repository instance <migrate.versioning.repository.Repository>`
-        :type oldmodel: string or Class
-        :type model: string or Class
-        :type engine: Engine instance
-        :returns: Upgrade / Downgrade script
-        :rtype: string
-        """
-
-        if isinstance(repository, basestring):
-            # oh dear, an import cycle!
-            from kallithea.lib.dbmigrate.migrate.versioning.repository import Repository
-            repository = Repository(repository)
-
-        oldmodel = load_model(oldmodel)
-        model = load_model(model)
-
-        # Compute differences.
-        diff = schemadiff.getDiffOfModelAgainstModel(
-            model,
-            oldmodel,
-            excludeTables=[repository.version_table])
-        # TODO: diff can be False (there is no difference?)
-        decls, upgradeCommands, downgradeCommands = \
-            genmodel.ModelGenerator(diff,engine).genB2AMigration()
-
-        # Store differences into file.
-        src = Template(opts.pop('templates_path', None)).get_script(opts.pop('templates_theme', None))
-        f = open(src)
-        contents = f.read()
-        f.close()
-
-        # generate source
-        search = 'def upgrade(migrate_engine):'
-        contents = contents.replace(search, '\n\n'.join((decls, search)), 1)
-        if upgradeCommands:
-            contents = contents.replace('    pass', upgradeCommands, 1)
-        if downgradeCommands:
-            contents = contents.replace('    pass', downgradeCommands, 1)
-        return contents
-
-    @classmethod
-    def verify_module(cls, path):
-        """Ensure path is a valid script
-
-        :param path: Script location
-        :type path: string
-        :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>`
-        :returns: Python module
-        """
-        # Try to import and get the upgrade() func
-        module = import_path(path)
-        try:
-            assert callable(module.upgrade)
-        except Exception as e:
-            raise InvalidScriptError(path + ': %s' % str(e))
-        return module
-
-    def preview_sql(self, url, step, **args):
-        """Mocks SQLAlchemy Engine to store all executed calls in a string
-        and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>`
-
-        :returns: SQL file
-        """
-        buf = StringIO()
-        args['engine_arg_strategy'] = 'mock'
-        args['engine_arg_executor'] = lambda s, p = '': buf.write(str(s) + p)
-
-        @with_engine
-        def go(url, step, **kw):
-            engine = kw.pop('engine')
-            self.run(engine, step)
-            return buf.getvalue()
-
-        return go(url, step, **args)
-
-    def run(self, engine, step):
-        """Core method of Script file.
-        Executes :func:`update` or :func:`downgrade` functions
-
-        :param engine: SQLAlchemy Engine
-        :param step: Operation to run
-        :type engine: string
-        :type step: int
-        """
-        if step > 0:
-            op = 'upgrade'
-        elif step < 0:
-            op = 'downgrade'
-        else:
-            raise ScriptError("%d is not a valid step" % step)
-
-        funcname = base.operations[op]
-        script_func = self._func(funcname)
-
-        # check for old way of using engine
-        if not inspect.getargspec(script_func)[0]:
-            raise TypeError("upgrade/downgrade functions must accept engine"
-                " parameter (since version 0.5.4)")
-
-        script_func(engine)
-
-    @property
-    def module(self):
-        """Calls :meth:`migrate.versioning.script.py.verify_module`
-        and returns it.
-        """
-        if not hasattr(self, '_module'):
-            self._module = self.verify_module(self.path)
-        return self._module
-
-    def _func(self, funcname):
-        if not hasattr(self.module, funcname):
-            msg = "Function '%s' is not defined in this script"
-            raise ScriptError(msg % funcname)
-        return getattr(self.module, funcname)
--- a/kallithea/lib/dbmigrate/migrate/versioning/script/sql.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,49 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-import logging
-import shutil
-
-from kallithea.lib.dbmigrate.migrate.versioning.script import base
-from kallithea.lib.dbmigrate.migrate.versioning.template import Template
-
-
-log = logging.getLogger(__name__)
-
-class SqlScript(base.BaseScript):
-    """A file containing plain SQL statements."""
-
-    @classmethod
-    def create(cls, path, **opts):
-        """Create an empty migration script at specified path
-
-        :returns: :class:`SqlScript instance <migrate.versioning.script.sql.SqlScript>`"""
-        cls.require_notfound(path)
-
-        src = Template(opts.pop('templates_path', None)).get_sql_script(theme=opts.pop('templates_theme', None))
-        shutil.copy(src, path)
-        return cls(path)
-
-    # TODO: why is step parameter even here?
-    def run(self, engine, step=None, executemany=True):
-        """Runs SQL script through raw dbapi execute call"""
-        text = self.source()
-        # Don't rely on SA's autocommit here
-        # (SA uses .startswith to check if a commit is needed. What if script
-        # starts with a comment?)
-        conn = engine.connect()
-        try:
-            trans = conn.begin()
-            try:
-                # HACK: SQLite doesn't allow multiple statements through
-                # its execute() method, but it provides executescript() instead
-                dbapi = conn.engine.raw_connection()
-                if executemany and getattr(dbapi, 'executescript', None):
-                    dbapi.executescript(text)
-                else:
-                    conn.execute(text)
-                trans.commit()
-            except:
-                trans.rollback()
-                raise
-        finally:
-            conn.close()
--- a/kallithea/lib/dbmigrate/migrate/versioning/shell.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,214 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-"""The migrate command-line tool."""
-
-import sys
-import inspect
-import logging
-from optparse import OptionParser, BadOptionError
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.versioning import api
-from kallithea.lib.dbmigrate.migrate.versioning.config import *
-from kallithea.lib.dbmigrate.migrate.versioning.util import asbool
-
-
-alias = dict(
-    s=api.script,
-    vc=api.version_control,
-    dbv=api.db_version,
-    v=api.version,
-)
-
-def alias_setup():
-    global alias
-    for key, val in alias.iteritems():
-        setattr(api, key, val)
-alias_setup()
-
-
-class PassiveOptionParser(OptionParser):
-
-    def _process_args(self, largs, rargs, values):
-        """little hack to support all --some_option=value parameters"""
-
-        while rargs:
-            arg = rargs[0]
-            if arg == "--":
-                del rargs[0]
-                return
-            elif arg[0:2] == "--":
-                # if parser does not know about the option
-                # pass it along (make it anonymous)
-                try:
-                    opt = arg.split('=', 1)[0]
-                    self._match_long_opt(opt)
-                except BadOptionError:
-                    largs.append(arg)
-                    del rargs[0]
-                else:
-                    self._process_long_opt(rargs, values)
-            elif arg[:1] == "-" and len(arg) > 1:
-                self._process_short_opts(rargs, values)
-            elif self.allow_interspersed_args:
-                largs.append(arg)
-                del rargs[0]
-
-def main(argv=None, **kwargs):
-    """Shell interface to :mod:`migrate.versioning.api`.
-
-    kwargs are default options that can be overridden with passing
-    --some_option as command line option
-
-    :param disable_logging: Let migrate configure logging
-    :type disable_logging: bool
-    """
-    if argv is not None:
-        argv = argv
-    else:
-        argv = list(sys.argv[1:])
-    commands = list(api.__all__)
-    commands.sort()
-
-    usage = """%%prog COMMAND ...
-
-    Available commands:
-        %s
-
-    Enter "%%prog help COMMAND" for information on a particular command.
-    """ % '\n\t'.join(["%s - %s" % (command.ljust(28), api.command_desc.get(command)) for command in commands])
-
-    parser = PassiveOptionParser(usage=usage)
-    parser.add_option("-d", "--debug",
-                     action="store_true",
-                     dest="debug",
-                     default=False,
-                     help="Shortcut to turn on DEBUG mode for logging")
-    parser.add_option("-q", "--disable_logging",
-                      action="store_true",
-                      dest="disable_logging",
-                      default=False,
-                      help="Use this option to disable logging configuration")
-    help_commands = ['help', '-h', '--help']
-    HELP = False
-
-    try:
-        command = argv.pop(0)
-        if command in help_commands:
-            HELP = True
-            command = argv.pop(0)
-    except IndexError:
-        parser.print_help()
-        return
-
-    command_func = getattr(api, command, None)
-    if command_func is None or command.startswith('_'):
-        parser.error("Invalid command %s" % command)
-
-    parser.set_usage(inspect.getdoc(command_func))
-    f_args, f_varargs, f_kwargs, f_defaults = inspect.getargspec(command_func)
-    for arg in f_args:
-        parser.add_option(
-            "--%s" % arg,
-            dest=arg,
-            action='store',
-            type="string")
-
-    # display help of the current command
-    if HELP:
-        parser.print_help()
-        return
-
-    options, args = parser.parse_args(argv)
-
-    # override kwargs with anonymous parameters
-    override_kwargs = dict()
-    for arg in list(args):
-        if arg.startswith('--'):
-            args.remove(arg)
-            if '=' in arg:
-                opt, value = arg[2:].split('=', 1)
-            else:
-                opt = arg[2:]
-                value = True
-            override_kwargs[opt] = value
-
-    # override kwargs with options if user is overwriting
-    for key, value in options.__dict__.iteritems():
-        if value is not None:
-            override_kwargs[key] = value
-
-    # arguments that function accepts without passed kwargs
-    f_required = list(f_args)
-    candidates = dict(kwargs)
-    candidates.update(override_kwargs)
-    for key, value in candidates.iteritems():
-        if key in f_args:
-            f_required.remove(key)
-
-    # map function arguments to parsed arguments
-    for arg in args:
-        try:
-            kw = f_required.pop(0)
-        except IndexError:
-            parser.error("Too many arguments for command %s: %s" % (command,
-                                                                    arg))
-        kwargs[kw] = arg
-
-    # apply overrides
-    kwargs.update(override_kwargs)
-
-    # configure options
-    for key, value in options.__dict__.iteritems():
-        kwargs.setdefault(key, value)
-
-    # configure logging
-    if not asbool(kwargs.pop('disable_logging', False)):
-        # filter to log =< INFO into stdout and rest to stderr
-        class SingleLevelFilter(logging.Filter):
-            def __init__(self, min=None, max=None):
-                self.min = min or 0
-                self.max = max or 100
-
-            def filter(self, record):
-                return self.min <= record.levelno <= self.max
-
-        logger = logging.getLogger()
-        h1 = logging.StreamHandler(sys.stdout)
-        f1 = SingleLevelFilter(max=logging.INFO)
-        h1.addFilter(f1)
-        h2 = logging.StreamHandler(sys.stderr)
-        f2 = SingleLevelFilter(min=logging.WARN)
-        h2.addFilter(f2)
-        logger.addHandler(h1)
-        logger.addHandler(h2)
-
-        if options.debug:
-            logger.setLevel(logging.DEBUG)
-        else:
-            logger.setLevel(logging.INFO)
-
-    log = logging.getLogger(__name__)
-
-    # check if all args are given
-    try:
-        num_defaults = len(f_defaults)
-    except TypeError:
-        num_defaults = 0
-    f_args_default = f_args[len(f_args) - num_defaults:]
-    required = list(set(f_required) - set(f_args_default))
-    if required:
-        parser.error("Not enough arguments for command %s: %s not specified" \
-            % (command, ', '.join(required)))
-
-    # handle command
-    try:
-        ret = command_func(**kwargs)
-        if ret is not None:
-            log.info(ret)
-    except (exceptions.UsageError, exceptions.KnownError) as e:
-        parser.error(e.args[0])
-
-if __name__ == "__main__":
-    main()
--- a/kallithea/lib/dbmigrate/migrate/versioning/template.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,94 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-import os
-import shutil
-import sys
-
-from pkg_resources import resource_filename
-
-from kallithea.lib.dbmigrate.migrate.versioning.config import *
-from kallithea.lib.dbmigrate.migrate.versioning import pathed
-
-
-class Collection(pathed.Pathed):
-    """A collection of templates of a specific type"""
-    _mask = None
-
-    def get_path(self, file):
-        return os.path.join(self.path, str(file))
-
-
-class RepositoryCollection(Collection):
-    _mask = '%s'
-
-class ScriptCollection(Collection):
-    _mask = '%s.py_tmpl'
-
-class ManageCollection(Collection):
-    _mask = '%s.py_tmpl'
-
-class SQLScriptCollection(Collection):
-    _mask = '%s.py_tmpl'
-
-class Template(pathed.Pathed):
-    """Finds the paths/packages of various Migrate templates.
-
-    :param path: Templates are loaded from kallithea.lib.dbmigrate.migrate package
-    if `path` is not provided.
-    """
-    pkg = 'kallithea.lib.dbmigrate.migrate.versioning.templates'
-    _manage = 'manage.py_tmpl'
-
-    def __new__(cls, path=None):
-        if path is None:
-            path = cls._find_path(cls.pkg)
-        return super(Template, cls).__new__(cls, path)
-
-    def __init__(self, path=None):
-        if path is None:
-            path = Template._find_path(self.pkg)
-        super(Template, self).__init__(path)
-        self.repository = RepositoryCollection(os.path.join(path, 'repository'))
-        self.script = ScriptCollection(os.path.join(path, 'script'))
-        self.manage = ManageCollection(os.path.join(path, 'manage'))
-        self.sql_script = SQLScriptCollection(os.path.join(path, 'sql_script'))
-
-    @classmethod
-    def _find_path(cls, pkg):
-        """Returns absolute path to dotted python package."""
-        tmp_pkg = pkg.rsplit('.', 1)
-
-        if len(tmp_pkg) != 1:
-            return resource_filename(tmp_pkg[0], tmp_pkg[1])
-        else:
-            return resource_filename(tmp_pkg[0], '')
-
-    def _get_item(self, collection, theme=None):
-        """Locates and returns collection.
-
-        :param collection: name of collection to locate
-        :param type_: type of subfolder in collection (defaults to "_default")
-        :returns: (package, source)
-        :rtype: str, str
-        """
-        item = getattr(self, collection)
-        theme_mask = getattr(item, '_mask')
-        theme = theme_mask % (theme or 'default')
-        return item.get_path(theme)
-
-    def get_repository(self, *a, **kw):
-        """Calls self._get_item('repository', *a, **kw)"""
-        return self._get_item('repository', *a, **kw)
-
-    def get_script(self, *a, **kw):
-        """Calls self._get_item('script', *a, **kw)"""
-        return self._get_item('script', *a, **kw)
-
-    def get_sql_script(self, *a, **kw):
-        """Calls self._get_item('sql_script', *a, **kw)"""
-        return self._get_item('sql_script', *a, **kw)
-
-    def get_manage(self, *a, **kw):
-        """Calls self._get_item('manage', *a, **kw)"""
-        return self._get_item('manage', *a, **kw)
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/manage.py_tmpl	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-#!/usr/bin/env python2
-from migrate.versioning.shell import main
-
-if __name__ == '__main__':
-    main(%(defaults)s)
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/manage/default.py_tmpl	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-#!/usr/bin/env python2
-from migrate.versioning.shell import main
-
-{{py:
-_vars = locals().copy()
-del _vars['__template_name__']
-_vars.pop('repository_name', None)
-defaults = ", ".join(["%s='%s'" % var for var in _vars.iteritems()])
-}}
-
-if __name__ == '__main__':
-    main({{ defaults }})
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/manage/pylons.py_tmpl	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-import sys
-
-from sqlalchemy import engine_from_config
-from paste.deploy.loadwsgi import ConfigLoader
-
-from migrate.versioning.shell import main
-from {{ locals().pop('repository_name') }}.model import migrations
-
-
-if '-c' in sys.argv:
-    pos = sys.argv.index('-c')
-    conf_path = sys.argv[pos + 1]
-    del sys.argv[pos:pos + 2]
-else:
-    conf_path = 'development.ini'
-
-{{py:
-_vars = locals().copy()
-del _vars['__template_name__']
-defaults = ", ".join(["%s='%s'" % var for var in _vars.iteritems()])
-}}
-
-conf_dict = ConfigLoader(conf_path).parser._sections['app:main']
-
-# migrate supports passing url as an existing Engine instance (since 0.6.0)
-# usage: migrate -c path/to/config.ini COMMANDS
-if __name__ == '__main__':
-    main(url=engine_from_config(conf_dict), repository=migrations.__path__[0],{{ defaults }})
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/repository/default/README	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,4 +0,0 @@
-This is a database migration repository.
-
-More information at
-http://code.google.com/p/sqlalchemy-migrate/
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/repository/default/migrate.cfg	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,25 +0,0 @@
-[db_settings]
-# Used to identify which repository this database is versioned under.
-# You can use the name of your project.
-repository_id={{ locals().pop('repository_id') }}
-
-# The name of the database table used to track the schema version.
-# This name shouldn't already be used by your project.
-# If this is changed once a database is under version control, you'll need to
-# change the table name in each database too.
-version_table={{ locals().pop('version_table') }}
-
-# When committing a change script, Migrate will attempt to generate the
-# sql for all supported databases; normally, if one of them fails - probably
-# because you don't have that database installed - it is ignored and the
-# commit continues, perhaps ending successfully.
-# Databases in this list MUST compile successfully during a commit, or the
-# entire commit will fail. List the databases your application will actually
-# be using to ensure your updates to that database work properly.
-# This must be a list; example: ['postgres','sqlite']
-required_dbs={{ locals().pop('required_dbs') }}
-
-# When creating new change scripts, Migrate will stamp the new script with
-# a version number. By default this is latest_version + 1. You can set this
-# to 'true' to tell Migrate to use the UTC timestamp instead.
-use_timestamp_numbering='false'
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/repository/pylons/README	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,4 +0,0 @@
-This is a database migration repository.
-
-More information at
-http://code.google.com/p/sqlalchemy-migrate/
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/repository/pylons/migrate.cfg	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-[db_settings]
-# Used to identify which repository this database is versioned under.
-# You can use the name of your project.
-repository_id={{ locals().pop('repository_id') }}
-
-# The name of the database table used to track the schema version.
-# This name shouldn't already be used by your project.
-# If this is changed once a database is under version control, you'll need to
-# change the table name in each database too.
-version_table={{ locals().pop('version_table') }}
-
-# When committing a change script, Migrate will attempt to generate the
-# sql for all supported databases; normally, if one of them fails - probably
-# because you don't have that database installed - it is ignored and the
-# commit continues, perhaps ending successfully.
-# Databases in this list MUST compile successfully during a commit, or the
-# entire commit will fail. List the databases your application will actually
-# be using to ensure your updates to that database work properly.
-# This must be a list; example: ['postgres','sqlite']
-required_dbs={{ locals().pop('required_dbs') }}
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/script/default.py_tmpl	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-from sqlalchemy import *
-from migrate import *
-
-def upgrade(migrate_engine):
-    # Upgrade operations go here. Don't create your own engine; bind migrate_engine
-    # to your metadata
-    pass
-
-def downgrade(migrate_engine):
-    # Operations to reverse the above upgrade go here.
-    pass
--- a/kallithea/lib/dbmigrate/migrate/versioning/templates/script/pylons.py_tmpl	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-from sqlalchemy import *
-from migrate import *
-
-def upgrade(migrate_engine):
-    # Upgrade operations go here. Don't create your own engine; bind migrate_engine
-    # to your metadata
-    pass
-
-def downgrade(migrate_engine):
-    # Operations to reverse the above upgrade go here.
-    pass
--- a/kallithea/lib/dbmigrate/migrate/versioning/util/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,179 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-""".. currentmodule:: migrate.versioning.util"""
-
-import warnings
-import logging
-from decorator import decorator
-from pkg_resources import EntryPoint
-
-from sqlalchemy import create_engine
-from sqlalchemy.engine import Engine
-from sqlalchemy.pool import StaticPool
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.versioning.util.keyedinstance import KeyedInstance
-from kallithea.lib.dbmigrate.migrate.versioning.util.importpath import import_path
-
-
-log = logging.getLogger(__name__)
-
-def load_model(dotted_name):
-    """Import module and use module-level variable".
-
-    :param dotted_name: path to model in form of string: ``some.python.module:Class``
-
-    .. versionchanged:: 0.5.4
-
-    """
-    if isinstance(dotted_name, basestring):
-        if ':' not in dotted_name:
-            # backwards compatibility
-            warnings.warn('model should be in form of module.model:User '
-                'and not module.model.User', exceptions.MigrateDeprecationWarning)
-            dotted_name = ':'.join(dotted_name.rsplit('.', 1))
-        return EntryPoint.parse('x=%s' % dotted_name).load(False)
-    else:
-        # Assume it's already loaded.
-        return dotted_name
-
-def asbool(obj):
-    """Do everything to use object as bool"""
-    if isinstance(obj, basestring):
-        obj = obj.strip().lower()
-        if obj in ['true', 'yes', 'on', 'y', 't', '1']:
-            return True
-        elif obj in ['false', 'no', 'off', 'n', 'f', '0']:
-            return False
-        else:
-            raise ValueError("String is not true/false: %r" % obj)
-    if obj in (True, False):
-        return bool(obj)
-    else:
-        raise ValueError("String is not true/false: %r" % obj)
-
-def guess_obj_type(obj):
-    """Do everything to guess object type from string
-
-    Tries to convert to `int`, `bool` and finally returns if not succeeded.
-
-    .. versionadded: 0.5.4
-    """
-
-    result = None
-
-    try:
-        result = int(obj)
-    except:
-        pass
-
-    if result is None:
-        try:
-            result = asbool(obj)
-        except:
-            pass
-
-    if result is not None:
-        return result
-    else:
-        return obj
-
-@decorator
-def catch_known_errors(f, *a, **kw):
-    """Decorator that catches known api errors
-
-    .. versionadded: 0.5.4
-    """
-
-    try:
-        return f(*a, **kw)
-    except exceptions.PathFoundError as e:
-        raise exceptions.KnownError("The path %s already exists" % e.args[0])
-
-def construct_engine(engine, **opts):
-    """.. versionadded:: 0.5.4
-
-    Constructs and returns SQLAlchemy engine.
-
-    Currently, there are 2 ways to pass create_engine options to :mod:`migrate.versioning.api` functions:
-
-    :param engine: connection string or a existing engine
-    :param engine_dict: python dictionary of options to pass to `create_engine`
-    :param engine_arg_*: keyword parameters to pass to `create_engine` (evaluated with :func:`migrate.versioning.util.guess_obj_type`)
-    :type engine_dict: dict
-    :type engine: string or Engine instance
-    :type engine_arg_*: string
-    :returns: SQLAlchemy Engine
-
-    .. note::
-
-        keyword parameters override ``engine_dict`` values.
-
-    """
-    if isinstance(engine, Engine):
-        return engine
-    elif not isinstance(engine, basestring):
-        raise ValueError("you need to pass either an existing engine or a database uri")
-
-    # get options for create_engine
-    if opts.get('engine_dict') and isinstance(opts['engine_dict'], dict):
-        kwargs = opts['engine_dict']
-    else:
-        kwargs = dict()
-
-    # DEPRECATED: handle echo the old way
-    echo = asbool(opts.get('echo', False))
-    if echo:
-        warnings.warn('echo=True parameter is deprecated, pass '
-            'engine_arg_echo=True or engine_dict={"echo": True}',
-            exceptions.MigrateDeprecationWarning)
-        kwargs['echo'] = echo
-
-    # parse keyword arguments
-    for key, value in opts.iteritems():
-        if key.startswith('engine_arg_'):
-            kwargs[key[11:]] = guess_obj_type(value)
-
-    log.debug('Constructing engine')
-    # TODO: return create_engine(engine, poolclass=StaticPool, **kwargs)
-    # seems like 0.5.x branch does not work with engine.dispose and staticpool
-    return create_engine(engine, **kwargs)
-
-@decorator
-def with_engine(f, *a, **kw):
-    """Decorator for :mod:`migrate.versioning.api` functions
-    to safely close resources after function usage.
-
-    Passes engine parameters to :func:`construct_engine` and
-    resulting parameter is available as kw['engine'].
-
-    Engine is disposed after wrapped function is executed.
-
-    .. versionadded: 0.6.0
-    """
-    url = a[0]
-    engine = construct_engine(url, **kw)
-
-    try:
-        kw['engine'] = engine
-        return f(*a, **kw)
-    finally:
-        if isinstance(engine, Engine) and engine is not url:
-            log.debug('Disposing SQLAlchemy engine %s', engine)
-            engine.dispose()
-
-
-class Memoize:
-    """Memoize(fn) - an instance which acts like fn but memoizes its arguments
-       Will only work on functions with non-mutable arguments
-
-       ActiveState Code 52201
-    """
-    def __init__(self, fn):
-        self.fn = fn
-        self.memo = {}
-
-    def __call__(self, *args):
-        if not self.memo.has_key(args):
-            self.memo[args] = self.fn(*args)
-        return self.memo[args]
--- a/kallithea/lib/dbmigrate/migrate/versioning/util/importpath.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-import os
-import sys
-
-def import_path(fullpath):
-    """ Import a file with full path specification. Allows one to
-        import from anywhere, something __import__ does not do.
-    """
-    # http://zephyrfalcon.org/weblog/arch_d7_2002_08_31.html
-    path, filename = os.path.split(fullpath)
-    filename, ext = os.path.splitext(filename)
-    sys.path.append(path)
-    module = __import__(filename)
-    reload(module) # Might be out of date during tests
-    del sys.path[-1]
-    return module
--- a/kallithea/lib/dbmigrate/migrate/versioning/util/keyedinstance.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,36 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-class KeyedInstance(object):
-    """A class whose instances have a unique identifier of some sort
-    No two instances with the same unique ID should exist - if we try to create
-    a second instance, the first should be returned.
-    """
-
-    _instances = dict()
-
-    def __new__(cls, *p, **k):
-        instances = cls._instances
-        clskey = str(cls)
-        if clskey not in instances:
-            instances[clskey] = dict()
-        instances = instances[clskey]
-
-        key = cls._key(*p, **k)
-        if key not in instances:
-            instances[key] = super(KeyedInstance, cls).__new__(cls)
-        return instances[key]
-
-    @classmethod
-    def _key(cls, *p, **k):
-        """Given a unique identifier, return a dictionary key
-        This should be overridden by child classes, to specify which parameters
-        should determine an object's uniqueness
-        """
-        raise NotImplementedError()
-
-    @classmethod
-    def clear(cls):
-        # Allow cls.clear() as well as uniqueInstance.clear(cls)
-        if str(cls) in cls._instances:
-            del cls._instances[str(cls)]
--- a/kallithea/lib/dbmigrate/migrate/versioning/version.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,238 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-import os
-import re
-import shutil
-import logging
-
-from kallithea.lib.dbmigrate.migrate import exceptions
-from kallithea.lib.dbmigrate.migrate.versioning import pathed, script
-from datetime import datetime
-
-
-log = logging.getLogger(__name__)
-
-class VerNum(object):
-    """A version number that behaves like a string and int at the same time"""
-
-    _instances = dict()
-
-    def __new__(cls, value):
-        val = str(value)
-        if val not in cls._instances:
-            cls._instances[val] = super(VerNum, cls).__new__(cls)
-        ret = cls._instances[val]
-        return ret
-
-    def __init__(self,value):
-        self.value = str(int(value))
-        if self < 0:
-            raise ValueError("Version number cannot be negative")
-
-    def __add__(self, value):
-        ret = int(self) + int(value)
-        return VerNum(ret)
-
-    def __sub__(self, value):
-        return self + (int(value) * -1)
-
-    def __cmp__(self, value):
-        return int(self) - int(value)
-
-    def __repr__(self):
-        return "<VerNum(%s)>" % self.value
-
-    def __str__(self):
-        return str(self.value)
-
-    def __int__(self):
-        return int(self.value)
-
-
-class Collection(pathed.Pathed):
-    """A collection of versioning scripts in a repository"""
-
-    FILENAME_WITH_VERSION = re.compile(r'^(\d{3,}).*')
-
-    def __init__(self, path):
-        """Collect current version scripts in repository
-        and store them in self.versions
-        """
-        super(Collection, self).__init__(path)
-
-        # Create temporary list of files, allowing skipped version numbers.
-        files = os.listdir(path)
-        if '1' in files:
-            # deprecation
-            raise Exception('It looks like you have a repository in the old '
-                'format (with directories for each version). '
-                'Please convert repository before proceeding.')
-
-        tempVersions = dict()
-        for filename in files:
-            match = self.FILENAME_WITH_VERSION.match(filename)
-            if match:
-                num = int(match.group(1))
-                tempVersions.setdefault(num, []).append(filename)
-            else:
-                pass  # Must be a helper file or something, let's ignore it.
-
-        # Create the versions member where the keys
-        # are VerNum's and the values are Version's.
-        self.versions = dict()
-        for num, files in tempVersions.items():
-            self.versions[VerNum(num)] = Version(num, path, files)
-
-    @property
-    def latest(self):
-        """:returns: Latest version in Collection"""
-        return max([VerNum(0)] + self.versions.keys())
-
-    def _next_ver_num(self, use_timestamp_numbering):
-        if use_timestamp_numbering:
-            return VerNum(int(datetime.utcnow().strftime('%Y%m%d%H%M%S')))
-        else:
-            return self.latest + 1
-
-    def create_new_python_version(self, description, **k):
-        """Create Python files for new version"""
-        ver = self._next_ver_num(k.pop('use_timestamp_numbering', False))
-        extra = str_to_filename(description)
-
-        if extra:
-            if extra == '_':
-                extra = ''
-            elif not extra.startswith('_'):
-                extra = '_%s' % extra
-
-        filename = '%03d%s.py' % (ver, extra)
-        filepath = self._version_path(filename)
-
-        script.PythonScript.create(filepath, **k)
-        self.versions[ver] = Version(ver, self.path, [filename])
-
-    def create_new_sql_version(self, database, description, **k):
-        """Create SQL files for new version"""
-        ver = self._next_ver_num(k.pop('use_timestamp_numbering', False))
-        self.versions[ver] = Version(ver, self.path, [])
-
-        extra = str_to_filename(description)
-
-        if extra:
-            if extra == '_':
-                extra = ''
-            elif not extra.startswith('_'):
-                extra = '_%s' % extra
-
-        # Create new files.
-        for op in ('upgrade', 'downgrade'):
-            filename = '%03d%s_%s_%s.sql' % (ver, extra, database, op)
-            filepath = self._version_path(filename)
-            script.SqlScript.create(filepath, **k)
-            self.versions[ver].add_script(filepath)
-
-    def version(self, vernum=None):
-        """Returns latest Version if vernum is not given.
-        Otherwise, returns wanted version"""
-        if vernum is None:
-            vernum = self.latest
-        return self.versions[VerNum(vernum)]
-
-    @classmethod
-    def clear(cls):
-        super(Collection, cls).clear()
-
-    def _version_path(self, ver):
-        """Returns path of file in versions repository"""
-        return os.path.join(self.path, str(ver))
-
-
-class Version(object):
-    """A single version in a collection
-    :param vernum: Version Number
-    :param path: Path to script files
-    :param filelist: List of scripts
-    :type vernum: int, VerNum
-    :type path: string
-    :type filelist: list
-    """
-
-    def __init__(self, vernum, path, filelist):
-        self.version = VerNum(vernum)
-
-        # Collect scripts in this folder
-        self.sql = dict()
-        self.python = None
-
-        for script in filelist:
-            self.add_script(os.path.join(path, script))
-
-    def script(self, database=None, operation=None):
-        """Returns SQL or Python Script"""
-        for db in (database, 'default'):
-            # Try to return a .sql script first
-            try:
-                return self.sql[db][operation]
-            except KeyError:
-                continue  # No .sql script exists
-
-        # TODO: maybe add force Python parameter?
-        ret = self.python
-
-        assert ret is not None, \
-            "There is no script for %d version" % self.version
-        return ret
-
-    def add_script(self, path):
-        """Add script to Collection/Version"""
-        if path.endswith(Extensions.py):
-            self._add_script_py(path)
-        elif path.endswith(Extensions.sql):
-            self._add_script_sql(path)
-
-    SQL_FILENAME = re.compile(r'^.*\.sql')
-
-    def _add_script_sql(self, path):
-        basename = os.path.basename(path)
-        match = self.SQL_FILENAME.match(basename)
-
-        if match:
-            basename = basename.replace('.sql', '')
-            parts = basename.split('_')
-            if len(parts) < 3:
-                raise exceptions.ScriptError(
-                    "Invalid SQL script name %s " % basename + \
-                    "(needs to be ###_description_database_operation.sql)")
-            version = parts[0]
-            op = parts[-1]
-            dbms = parts[-2]
-        else:
-            raise exceptions.ScriptError(
-                "Invalid SQL script name %s " % basename + \
-                "(needs to be ###_description_database_operation.sql)")
-
-        # File the script into a dictionary
-        self.sql.setdefault(dbms, {})[op] = script.SqlScript(path)
-
-    def _add_script_py(self, path):
-        if self.python is not None:
-            raise exceptions.ScriptError('You can only have one Python script '
-                'per version, but you have: %s and %s' % (self.python, path))
-        self.python = script.PythonScript(path)
-
-
-class Extensions:
-    """A namespace for file extensions"""
-    py = 'py'
-    sql = 'sql'
-
-def str_to_filename(s):
-    """Replaces spaces, (double and single) quotes
-    and double underscores to underscores
-    """
-
-    s = s.replace(' ', '_').replace('"', '_').replace("'", '_').replace(".", "_")
-    while '__' in s:
-        s = s.replace('__', '_')
-    return s
--- a/kallithea/lib/dbmigrate/schema/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Schemas for migrations
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Nov 1, 2011
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
--- a/kallithea/lib/dbmigrate/schema/db_1_1_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,93 +0,0 @@
-from sqlalchemy import *
-from sqlalchemy.orm import relation, class_mapper
-from sqlalchemy.orm.session import Session
-from kallithea.model.meta import Base
-
-class BaseModel(object):
-    """Base Model for all classess
-
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session.query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def getAll(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session.delete(obj)
-        Session.commit()
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
-                      UniqueConstraint('user_id', 'follows_user_id')
-                      , {'useexisting':True})
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey(u'repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey(u'users.user_id'), nullable=True, unique=None, default=None)
-
-    user = relation('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relation('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relation('Repository')
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (UniqueConstraint('cache_key'), {'useexisting':True})
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    cache_key = Column("cache_key", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_args = Column("cache_args", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-
-    def __init__(self, cache_key, cache_args=''):
-        self.cache_key = cache_key
-        self.cache_args = cache_args
-        self.cache_active = False
-
-    def __repr__(self):
-        return "<CacheInvalidation('%s:%s')>" % (self.cache_id, self.cache_key)
--- a/kallithea/lib/dbmigrate/schema/db_1_2_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1097 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_1_2_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea <=1.2.X
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import logging
-import datetime
-import traceback
-from datetime import date
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    generate_api_key, safe_unicode
-from kallithea.lib.exceptions import UserGroupsAssignedException
-from kallithea.lib.compat import json
-
-from kallithea.model.meta import Base, Session
-from kallithea.lib.caching_query import FromCache
-
-from kallithea import DB_PREFIX
-
-log = logging.getLogger(__name__)
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-class ModelSerializer(json.JSONEncoder):
-    """
-    Simple Serializer for JSON,
-
-    usage::
-
-        to make object customized for serialization implement a __json__
-        method that will return a dict for serialization into json
-
-    example::
-
-        class Task(object):
-
-            def __init__(self, name, value):
-                self.name = name
-                self.value = value
-
-            def __json__(self):
-                return dict(name=self.name,
-                            value=self.value)
-
-    """
-
-    def default(self, obj):
-
-        if hasattr(obj, '__json__'):
-            return obj.__json__()
-        else:
-            return json.JSONEncoder.default(self, obj)
-
-class BaseModel(object):
-    """Base Model for all classes
-
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session.query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def getAll(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session.delete(obj)
-        Session.commit()
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True})
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, k='', v=''):
-        self.app_settings_name = k
-        self.app_settings_value = v
-
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        if v == 'ldap_active':
-            v = str2bool(v)
-        return v
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    def __repr__(self):
-        return "<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.app_settings_name, self.app_settings_value)
-
-
-    @classmethod
-    def get_by_name(cls, ldap_key):
-        return cls.query() \
-            .filter(cls.app_settings_name == ldap_key).scalar()
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_ldap_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('ldap_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name:row.app_settings_value})
-
-        return fd
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True})
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'pretxnchangegroup.push_logger'
-    HOOK_PULL = 'preoutgoing.pull_logger'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key)
-
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE,
-                                    cls.HOOK_REPO_SIZE,
-                                    cls.HOOK_PUSH, cls.HOOK_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE,
-                                    cls.HOOK_REPO_SIZE,
-                                    cls.HOOK_PUSH, cls.HOOK_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key).scalar() or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session.add(new_ui)
-        Session.commit()
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True})
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=None)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    user_log = relationship('UserLog', cascade='all')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.name, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.name, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    def __repr__(self):
-        try:
-            return "<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                             self.user_id, self.username)
-        except:
-            return self.__class__.__name__
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False):
-        if case_insensitive:
-            return Session.query(cls).filter(cls.username.ilike(username)).scalar()
-        else:
-            return Session.query(cls).filter(cls.username == username).scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key):
-        return cls.query().filter(cls.api_key == api_key).one()
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-
-        self.last_login = datetime.datetime.now()
-        Session.add(self)
-        Session.commit()
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def create(cls, form_data):
-        from kallithea.lib.auth import get_crypt_password
-
-        try:
-            new_user = cls()
-            for k, v in form_data.items():
-                if k == 'password':
-                    v = get_crypt_password(v)
-                setattr(new_user, k, v)
-
-            new_user.api_key = generate_api_key()
-            Session.add(new_user)
-            Session.commit()
-            return new_user
-        except:
-            log.error(traceback.format_exc())
-            Session.rollback()
-            raise
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = {'extend_existing':True}
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    @property
-    def action_as_day(self):
-        return date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = {'extend_existing':True}
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-
-    def __repr__(self):
-        return '<userGroup(%s)>' % (self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.users_group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.users_group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache("sql_cache_short",
-                                          "get_user_%s" % group_name))
-        return gr.scalar()
-
-
-    @classmethod
-    def get(cls, users_group_id, cache=False):
-        users_group = cls.query()
-        if cache:
-            users_group = users_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % users_group_id))
-        return users_group.get(users_group_id)
-
-    @classmethod
-    def create(cls, form_data):
-        try:
-            new_users_group = cls()
-            for k, v in form_data.items():
-                setattr(new_users_group, k, v)
-
-            Session.add(new_users_group)
-            Session.commit()
-            return new_users_group
-        except:
-            log.error(traceback.format_exc())
-            Session.rollback()
-            raise
-
-    @classmethod
-    def update(cls, users_group_id, form_data):
-
-        try:
-            users_group = cls.get(users_group_id, cache=False)
-
-            for k, v in form_data.items():
-                if k == 'users_group_members':
-                    users_group.members = []
-                    Session.flush()
-                    members_list = []
-                    if v:
-                        v = [v] if isinstance(v, basestring) else v
-                        for u_id in set(v):
-                            member = UserGroupMember(users_group_id, u_id)
-                            members_list.append(member)
-                    setattr(users_group, 'members', members_list)
-                setattr(users_group, k, v)
-
-            Session.add(users_group)
-            Session.commit()
-        except:
-            log.error(traceback.format_exc())
-            Session.rollback()
-            raise
-
-    @classmethod
-    def delete(cls, users_group_id):
-        try:
-
-            # check if this group is not assigned to repo
-            assigned_groups = UserGroupRepoToPerm.query() \
-                .filter(UserGroupRepoToPerm.users_group_id ==
-                        users_group_id).all()
-
-            if assigned_groups:
-                raise UserGroupsAssignedException('RepoGroup assigned to %s' %
-                                                   assigned_groups)
-
-            users_group = cls.get(users_group_id, cache=False)
-            Session.delete(users_group)
-            Session.commit()
-        except:
-            log.error(traceback.format_exc())
-            Session.rollback()
-            raise
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = {'extend_existing':True}
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-    @staticmethod
-    def add_user_to_group(group, user):
-        ugm = UserGroupMember()
-        ugm.users_group = group
-        ugm.user = user
-        Session.add(ugm)
-        Session.commit()
-        return ugm
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},)
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg')
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
-
-    logs = relationship('UserLog', cascade='all')
-
-    def __repr__(self):
-        return "<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.repo_id, self.repo_name)
-
-    @classmethod
-    def url_sep(cls):
-        return '/'
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session.query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.one()
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session.query(Ui).filter(Ui.ui_key ==
-                                              cls.url_sep())
-        q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session.query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*p)
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from mercurial import ui
-        from mercurial import config
-        baseui = ui.ui()
-
-        #clean the baseui object
-        baseui._ocfg = config.config()
-        baseui._ucfg = config.config()
-        baseui._tcfg = config.config()
-
-
-        ret = Ui.query() \
-            .options(FromCache("sql_cache_short", "repository_repo_ui")).all()
-
-        hg_ui = ret
-        for ui_ in hg_ui:
-            if ui_.ui_active:
-                log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
-                          ui_.ui_key, ui_.ui_value)
-                baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
-
-        return baseui
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    @property
-    def invalidate(self):
-        return CacheInvalidation.invalidate(self.repo_name)
-
-    def set_invalidate(self):
-        """
-        set a cache for invalidation for this instance
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    @LazyProperty
-    def scm_instance(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance_cached(self):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        inv = self.invalidate
-        if inv is not None:
-            region_invalidate(_c, None, rn)
-            # update our cache
-            CacheInvalidation.set_valid(inv.cache_key)
-        return _c(rn)
-
-    def __get_instance(self):
-
-        repo_full_path = self.repo_full_path
-
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository', alias)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class Group(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'),
-                      CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},)
-    __mapper_args__ = {'order_by':'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    parent_group = relationship('Group', remote_side=group_id)
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __repr__(self):
-        return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                  self.group_name)
-
-    @classmethod
-    def groups_choices(cls):
-        from webhelpers.html import literal as _literal
-        repo_groups = [('', '')]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in cls.query().all()])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return '/'
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache("sql_cache_short",
-                                          "get_group_%s" % group_name))
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return Group.query().filter(Group.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(Group.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(Group.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query().filter(Repository.group == self)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return Group.url_sep().join(path_prefix + [group_name])
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = {'extend_existing':True}
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __repr__(self):
-        return "<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.permission_id, self.permission_name)
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True})
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True})
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission')
-
-    @classmethod
-    def has_perm(cls, user_id, perm):
-        if not isinstance(perm, Permission):
-            raise Exception('perm needs to be an instance of Permission class')
-
-        return cls.query().filter(cls.user_id == user_id) \
-            .filter(cls.permission == perm).scalar() is not None
-
-    @classmethod
-    def grant_perm(cls, user_id, perm):
-        if not isinstance(perm, Permission):
-            raise Exception('perm needs to be an instance of Permission class')
-
-        new = cls()
-        new.user_id = user_id
-        new.permission = perm
-        try:
-            Session.add(new)
-            Session.commit()
-        except:
-            Session.rollback()
-
-
-    @classmethod
-    def revoke_perm(cls, user_id, perm):
-        if not isinstance(perm, Permission):
-            raise Exception('perm needs to be an instance of Permission class')
-
-        try:
-            cls.query().filter(cls.user_id == user_id) \
-                .filter(cls.permission == perm).delete()
-            Session.commit()
-        except:
-            Session.rollback()
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True})
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    def __repr__(self):
-        return '<userGroup:%s => %s >' % (self.users_group, self.repository)
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = {'extend_existing':True}
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-    @classmethod
-    def has_perm(cls, users_group_id, perm):
-        if not isinstance(perm, Permission):
-            raise Exception('perm needs to be an instance of Permission class')
-
-        return cls.query().filter(cls.users_group_id ==
-                                         users_group_id) \
-                                         .filter(cls.permission == perm) \
-                                         .scalar() is not None
-
-    @classmethod
-    def grant_perm(cls, users_group_id, perm):
-        if not isinstance(perm, Permission):
-            raise Exception('perm needs to be an instance of Permission class')
-
-        new = cls()
-        new.users_group_id = users_group_id
-        new.permission = perm
-        try:
-            Session.add(new)
-            Session.commit()
-        except:
-            Session.rollback()
-
-
-    @classmethod
-    def revoke_perm(cls, users_group_id, perm):
-        if not isinstance(perm, Permission):
-            raise Exception('perm needs to be an instance of Permission class')
-
-        try:
-            cls.query().filter(cls.users_group_id == users_group_id) \
-                .filter(cls.permission == perm).delete()
-            Session.commit()
-        except:
-            Session.rollback()
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'group_to_perm'
-    __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True})
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True})
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
-                      UniqueConstraint('user_id', 'follows_user_id')
-                      , {'extend_existing':True})
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True})
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-
-    def __init__(self, cache_key, cache_args=''):
-        self.cache_key = cache_key
-        self.cache_args = cache_args
-        self.cache_active = False
-
-    def __repr__(self):
-        return "<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.cache_id, self.cache_key)
-
-    @classmethod
-    def invalidate(cls, key):
-        """
-        Returns Invalidation object if this given key should be invalidated
-        None otherwise. `cache_active = False` means that this cache
-        state is not valid and needs to be invalidated
-
-        :param key:
-        """
-        return cls.query() \
-                .filter(CacheInvalidation.cache_key == key) \
-                .filter(CacheInvalidation.cache_active == False) \
-                .scalar()
-
-    @classmethod
-    def set_invalidate(cls, key):
-        """
-        Mark this Cache key for invalidation
-
-        :param key:
-        """
-
-        log.debug('marking %s for invalidation', key)
-        inv_obj = Session.query(cls) \
-            .filter(cls.cache_key == key).scalar()
-        if inv_obj:
-            inv_obj.cache_active = False
-        else:
-            log.debug('cache key not found in invalidation db -> creating one')
-            inv_obj = CacheInvalidation(key)
-
-        try:
-            Session.add(inv_obj)
-            Session.commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session.rollback()
-
-    @classmethod
-    def set_valid(cls, key):
-        """
-        Mark this cache key as active and currently cached
-
-        :param key:
-        """
-        inv_obj = Session.query(CacheInvalidation) \
-            .filter(CacheInvalidation.cache_key == key).scalar()
-        inv_obj.cache_active = True
-        Session.add(inv_obj)
-        Session.commit()
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = {'extend_existing':True}
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_1_3_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1322 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_1_3_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea <=1.3.X
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-
-"""
-
-
-import os
-import logging
-import datetime
-import traceback
-from collections import defaultdict
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-import hashlib
-
-from kallithea import DB_PREFIX
-
-log = logging.getLogger(__name__)
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class ModelSerializer(json.JSONEncoder):
-    """
-    Simple Serializer for JSON,
-
-    usage::
-
-        to make object customized for serialization implement a __json__
-        method that will return a dict for serialization into json
-
-    example::
-
-        class Task(object):
-
-            def __init__(self, name, value):
-                self.name = name
-                self.value = value
-
-            def __json__(self):
-                return dict(name=self.name,
-                            value=self.value)
-
-    """
-
-    def default(self, obj):
-
-        if hasattr(obj, '__json__'):
-            return obj.__json__()
-        else:
-            return json.JSONEncoder.default(self, obj)
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        for k, val in getattr(self, '__json__', lambda: {})().iteritems():
-            d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session.query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def getAll(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session.delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            return safe_str(self.__unicode__())
-        return '<DB:%s>' % (self.__class__.__name__)
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, k='', v=''):
-        self.app_settings_name = k
-        self.app_settings_value = v
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        if self.app_settings_name == 'ldap_active':
-            v = str2bool(v)
-        return v
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value
-        )
-
-    @classmethod
-    def get_by_name(cls, ldap_key):
-        return cls.query() \
-            .filter(cls.app_settings_name == ldap_key).scalar()
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_ldap_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('ldap_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name:row.app_settings_value})
-
-        return fd
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'pretxnchangegroup.push_logger'
-    HOOK_PULL = 'preoutgoing.pull_logger'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key)
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE,
-                                    cls.HOOK_REPO_SIZE,
-                                    cls.HOOK_PUSH, cls.HOOK_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE,
-                                    cls.HOOK_REPO_SIZE,
-                                    cls.HOOK_PUSH, cls.HOOK_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key).scalar() or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session.add(new_ui)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=None)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    user_log = relationship('UserLog', cascade='all')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.name, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.name, self.lastname)
-                if (self.name and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.name, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.name, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                     self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % email))
-        return q.scalar()
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session.add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    def __json__(self):
-        return dict(
-            user_id=self.user_id,
-            first_name=self.name,
-            last_name=self.lastname,
-            email=self.email,
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-
-    def __unicode__(self):
-        return u'<userGroup(%s)>' % (self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, users_group_id, cache=False):
-        users_group = cls.query()
-        if cache:
-            users_group = users_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % users_group_id))
-        return users_group.get(users_group_id)
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg')
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
-
-    logs = relationship('UserLog')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__,self.repo_id,
-                                   self.repo_name)
-
-    @classmethod
-    def url_sep(cls):
-        return '/'
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session.query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session.query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session.query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*p)
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from mercurial import ui
-        from mercurial import config
-        baseui = ui.ui()
-
-        #clean the baseui object
-        baseui._ocfg = config.config()
-        baseui._ucfg = config.config()
-        baseui._tcfg = config.config()
-
-        ret = Ui.query() \
-            .options(FromCache("sql_cache_short", "repository_repo_ui")).all()
-
-        hg_ui = ret
-        for ui_ in hg_ui:
-            if ui_.ui_active:
-                log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
-                          ui_.ui_key, ui_.ui_value)
-                baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
-
-        return baseui
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    @property
-    def invalidate(self):
-        return CacheInvalidation.invalidate(self.repo_name)
-
-    def set_invalidate(self):
-        """
-        set a cache for invalidation for this instance
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    @LazyProperty
-    def scm_instance(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance_cached(self):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-        log.debug('Getting cached instance of repo')
-        inv = self.invalidate
-        if inv is not None:
-            region_invalidate(_c, None, rn)
-            # update our cache
-            CacheInvalidation.set_valid(inv.cache_key)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository', alias)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                  self.group_name)
-
-    @classmethod
-    def groups_choices(cls):
-        from webhelpers.html import literal as _literal
-        repo_groups = [('', '')]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in cls.query().all()])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return '/'
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session.query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session.query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session.add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<user:%s => %s >' % (self.user, self.repository)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session.add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine':'InnoDB',
-          'mysql_charset': 'utf8'}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, cache_args=''):
-        self.cache_key = cache_key
-        self.cache_args = cache_args
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.cache_id, self.cache_key)
-    @classmethod
-    def clear_cache(cls):
-        cls.query().delete()
-
-    @classmethod
-    def _get_key(cls, key):
-        """
-        Wrapper for generating a key, together with a prefix
-
-        :param key:
-        """
-        import kallithea
-        prefix = ''
-        iid = kallithea.CONFIG.get('instance_id')
-        if iid:
-            prefix = iid
-        return "%s%s" % (prefix, key), prefix, key.rstrip('_README')
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.cache_key == key).scalar()
-
-    @classmethod
-    def _get_or_create_key(cls, key, prefix, org_key):
-        inv_obj = Session.query(cls).filter(cls.cache_key == key).scalar()
-        if not inv_obj:
-            try:
-                inv_obj = CacheInvalidation(key, org_key)
-                Session.add(inv_obj)
-                Session.commit()
-            except Exception:
-                log.error(traceback.format_exc())
-                Session.rollback()
-        return inv_obj
-
-    @classmethod
-    def invalidate(cls, key):
-        """
-        Returns Invalidation object if this given key should be invalidated
-        None otherwise. `cache_active = False` means that this cache
-        state is not valid and needs to be invalidated
-
-        :param key:
-        """
-
-        key, _prefix, _org_key = cls._get_key(key)
-        inv = cls._get_or_create_key(key, _prefix, _org_key)
-
-        if inv and inv.cache_active is False:
-            return inv
-
-    @classmethod
-    def set_invalidate(cls, key):
-        """
-        Mark this Cache key for invalidation
-
-        :param key:
-        """
-
-        key, _prefix, _org_key = cls._get_key(key)
-        inv_objs = Session.query(cls).filter(cls.cache_args == _org_key).all()
-        log.debug('marking %s key[s] %s for invalidation', len(inv_objs),
-                                                             _org_key)
-        try:
-            for inv_obj in inv_objs:
-                if inv_obj:
-                    inv_obj.cache_active = False
-
-                Session.add(inv_obj)
-            Session.commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session.rollback()
-
-    @classmethod
-    def set_valid(cls, key):
-        """
-        Mark this cache key as active and currently cached
-
-        :param key:
-        """
-        inv_obj = cls.get_by_key(key)
-        inv_obj.cache_active = True
-        Session.add(inv_obj)
-        Session.commit()
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=False)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', Unicode(25000), nullable=False)
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-
-    @classmethod
-    def get_users(cls, revision):
-        """
-        Returns user associated with this changesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        return Session.query(User) \
-                .filter(cls.revision == revision) \
-                .join(ChangesetComment.author).all()
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', Unicode(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session.add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session.add(self)
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine':'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
-
-## this is migration from 1_4_0, but now it's here to overcome a problem of
-## attaching a FK to this from 1_3_0 !
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
--- a/kallithea/lib/dbmigrate/schema/db_1_4_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1814 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_1_4_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea <=1.4.X
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-
-import os
-import logging
-import datetime
-import traceback
-import hashlib
-import time
-from collections import defaultdict
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_suffix
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-from kallithea import DB_PREFIX
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            return safe_str(self.__unicode__())
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, k='', v=''):
-        self.app_settings_name = k
-        self.app_settings_value = v
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        if self.app_settings_name == 'ldap_active':
-            v = str2bool(v)
-        return v
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key)
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_ldap_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('ldap_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    DEFAULT_USER = 'default'
-    DEFAULT_PERMISSIONS = [
-        'hg.register.manual_activate', 'hg.create.repository',
-        'hg.fork.repository', 'repository.read', 'group.read'
-    ]
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    user_log = relationship('UserLog', cascade='all')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                     self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            ldap_dn=user.ldap_dn,
-            last_login=user.last_login,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-
-    def __unicode__(self):
-        return u'<userGroup(%s)>' % (self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, users_group_id, cache=False):
-        users_group = cls.query()
-        if cache:
-            users_group = users_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % users_group_id))
-        return users_group.get(users_group_id)
-
-    def get_api_data(self):
-        users_group = self
-
-        data = dict(
-            users_group_id=users_group.users_group_id,
-            group_name=users_group.users_group_name,
-            active=users_group.users_group_active,
-        )
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   self.repo_name)
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*p)
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def inject_ui(cls, repo, extras={}):
-        from kallithea.lib.vcs.backends.hg import MercurialRepository
-        from kallithea.lib.vcs.backends.git import GitRepository
-        required = (MercurialRepository, GitRepository)
-        if not isinstance(repo, required):
-            raise Exception('repo must be instance of %s' % (','.join(required)))
-
-        # inject ui extra param to log this action via push logger
-        for k, v in extras.items():
-            repo._repo.ui.setconfig('extras', k, v)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None
-        )
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id):
-        repo.locked = [user_id, time.time()]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
-        return cs
-
-    def update_last_change(self, last_change=None):
-        if last_change is None:
-            last_change = datetime.datetime.now()
-        if self.updated_on is None or self.updated_on != last_change:
-            log.debug('updated repo %s with new date %s', self, last_change)
-            self.updated_on = last_change
-            Session().add(self)
-            Session().commit()
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        :type revisions: list
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    @property
-    def invalidate(self):
-        return CacheInvalidation.invalidate(self.repo_name)
-
-    def set_invalidate(self):
-        """
-        set a cache for invalidation for this instance
-        """
-        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
-
-    @LazyProperty
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, cache_map=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-        log.debug('Getting cached instance of repo')
-
-        if cache_map:
-            # get using prefilled cache_map
-            invalidate_repo = cache_map[self.repo_name]
-            if invalidate_repo:
-                invalidate_repo = (None if invalidate_repo.cache_active
-                                   else invalidate_repo)
-        else:
-            # get from invalidate
-            invalidate_repo = self.invalidate
-
-        if invalidate_repo is not None:
-            region_invalidate(_c, None, rn)
-            # update our cache
-            CacheInvalidation.set_valid(invalidate_repo.cache_key)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository', alias)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                  self.group_name)
-
-    @classmethod
-    def groups_choices(cls, check_perms=False):
-        from webhelpers.html import literal as _literal
-        from kallithea.model.scm import ScmModel
-        groups = cls.query().all()
-        if check_perms:
-            #filter group user have access to, it's done
-            #magically inside ScmModel based on current user
-            groups = ScmModel().get_repos_groups(groups)
-        repo_groups = [('', '')]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        all_ = []
-
-        def _get_members(root_gr):
-            for r in root_gr.repositories:
-                all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    PERMS = [
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository Group no access')),
-        ('group.read', _('Repository Group read access')),
-        ('group.write', _('Repository Group write access')),
-        ('group.admin', _('Repository Group admin access')),
-
-        ('hg.admin', _('Kallithea Administrator')),
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-        ('hg.register.none', _('Register disabled')),
-        ('hg.register.manual_activate', _('Register new user with Kallithea '
-                                          'with manual activation')),
-
-        ('hg.register.auto_activate', _('Register new user with Kallithea '
-                                        'with auto activation')),
-    ]
-
-    # defines which permissions are more important higher the more important
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository':1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<user:%s => %s >' % (self.user, self.repository)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8'}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, cache_args=''):
-        self.cache_key = cache_key
-        self.cache_args = cache_args
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.cache_id, self.cache_key)
-
-    @property
-    def prefix(self):
-        _split = self.cache_key.split(self.cache_args, 1)
-        if _split and len(_split) == 2:
-            return _split[0]
-        return ''
-
-    @classmethod
-    def clear_cache(cls):
-        cls.query().delete()
-
-    @classmethod
-    def _get_key(cls, key):
-        """
-        Wrapper for generating a key, together with a prefix
-
-        :param key:
-        """
-        import kallithea
-        prefix = ''
-        org_key = key
-        iid = kallithea.CONFIG.get('instance_id')
-        if iid:
-            prefix = iid
-
-        return "%s%s" % (prefix, key), prefix, org_key
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.cache_key == key).scalar()
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        return cls.query().filter(cls.cache_args == repo_name).all()
-
-    @classmethod
-    def _get_or_create_key(cls, key, repo_name, commit=True):
-        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
-        if not inv_obj:
-            try:
-                inv_obj = CacheInvalidation(key, repo_name)
-                Session().add(inv_obj)
-                if commit:
-                    Session().commit()
-            except Exception:
-                log.error(traceback.format_exc())
-                Session().rollback()
-        return inv_obj
-
-    @classmethod
-    def invalidate(cls, key):
-        """
-        Returns Invalidation object if this given key should be invalidated
-        None otherwise. `cache_active = False` means that this cache
-        state is not valid and needs to be invalidated
-
-        :param key:
-        """
-        repo_name = key
-        repo_name = remove_suffix(repo_name, '_README')
-        repo_name = remove_suffix(repo_name, '_RSS')
-        repo_name = remove_suffix(repo_name, '_ATOM')
-
-        # adds instance prefix
-        key, _prefix, _org_key = cls._get_key(key)
-        inv = cls._get_or_create_key(key, repo_name)
-
-        if inv and inv.cache_active is False:
-            return inv
-
-    @classmethod
-    def set_invalidate(cls, key=None, repo_name=None):
-        """
-        Mark this Cache key for invalidation, either by key or whole
-        cache sets based on repo_name
-
-        :param key:
-        """
-        if key:
-            key, _prefix, _org_key = cls._get_key(key)
-            inv_objs = Session().query(cls).filter(cls.cache_key == key).all()
-        elif repo_name:
-            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s',
-                  len(inv_objs), key, repo_name)
-        try:
-            for inv_obj in inv_objs:
-                inv_obj.cache_active = False
-                Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def set_valid(cls, key):
-        """
-        Mark this cache key as active and currently cached
-
-        :param key:
-        """
-        inv_obj = cls.get_by_key(key)
-        inv_obj.cache_active = True
-        Session().add(inv_obj)
-        Session().commit()
-
-    @classmethod
-    def get_cache_map(cls):
-
-        class cachemapdict(dict):
-
-            def __init__(self, *args, **kwargs):
-                fixkey = kwargs.get('fixkey')
-                if fixkey:
-                    del kwargs['fixkey']
-                self.fixkey = fixkey
-                super(cachemapdict, self).__init__(*args, **kwargs)
-
-            def __getattr__(self, name):
-                key = name
-                if self.fixkey:
-                    key, _prefix, _org_key = cls._get_key(key)
-                if key in self.__dict__:
-                    return self.__dict__[key]
-                else:
-                    return self[key]
-
-            def __getitem__(self, key):
-                if self.fixkey:
-                    key, _prefix, _org_key = cls._get_key(key)
-                try:
-                    return super(cachemapdict, self).__getitem__(key)
-                except KeyError:
-                    return
-
-        cache_map = cachemapdict(fixkey=True)
-        for obj in cls.query().all():
-            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())
-        return cache_map
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    def __json__(self):
-        return dict(
-          revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_1_5_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1841 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_1_5_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea <=1.5.2
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import logging
-import datetime
-import traceback
-import hashlib
-import time
-from collections import defaultdict
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_suffix, remove_prefix
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-from kallithea import DB_PREFIX
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            return safe_str(self.__unicode__())
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, k='', v=''):
-        self.app_settings_name = k
-        self.app_settings_value = v
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        if self.app_settings_name in ["ldap_active",
-                                      "default_repo_enable_statistics",
-                                      "default_repo_enable_locking",
-                                      "default_repo_private",
-                                      "default_repo_enable_downloads"]:
-            v = str2bool(v)
-        return v
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key)
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_ldap_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('ldap_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    DEFAULT_USER = 'default'
-    DEFAULT_PERMISSIONS = [
-        'hg.register.manual_activate', 'hg.create.repository',
-        'hg.fork.repository', 'repository.read', 'group.read'
-    ]
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                     self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            ldap_dn=user.ldap_dn,
-            last_login=user.last_login,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-
-    def __unicode__(self):
-        return u'<userGroup(%s)>' % (self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, users_group_id, cache=False):
-        users_group = cls.query()
-        if cache:
-            users_group = users_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % users_group_id))
-        return users_group.get(users_group_id)
-
-    def get_api_data(self):
-        users_group = self
-
-        data = dict(
-            users_group_id=users_group.users_group_id,
-            group_name=users_group.users_group_name,
-            active=users_group.users_group_active,
-        )
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   self.repo_name)
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*p)
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def inject_ui(cls, repo, extras={}):
-        from kallithea.lib.vcs.backends.hg import MercurialRepository
-        from kallithea.lib.vcs.backends.git import GitRepository
-        required = (MercurialRepository, GitRepository)
-        if not isinstance(repo, required):
-            raise Exception('repo must be instance of %s' % (','.join(required)))
-
-        # inject ui extra param to log this action via push logger
-        for k, v in extras.items():
-            repo._repo.ui.setconfig('extras', k, v)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None
-        )
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id):
-        repo.locked = [user_id, time.time()]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
-        return cs
-
-    def update_last_change(self, last_change=None):
-        if last_change is None:
-            last_change = datetime.datetime.now()
-        if self.updated_on is None or self.updated_on != last_change:
-            log.debug('updated repo %s with new date %s', self, last_change)
-            self.updated_on = last_change
-            Session().add(self)
-            Session().commit()
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        :type revisions: list
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    @property
-    def invalidate(self):
-        return CacheInvalidation.invalidate(self.repo_name)
-
-    def set_invalidate(self):
-        """
-        set a cache for invalidation for this instance
-        """
-        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
-
-    @LazyProperty
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, cache_map=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-        log.debug('Getting cached instance of repo')
-
-        if cache_map:
-            # get using prefilled cache_map
-            invalidate_repo = cache_map[self.repo_name]
-            if invalidate_repo:
-                invalidate_repo = (None if invalidate_repo.cache_active
-                                   else invalidate_repo)
-        else:
-            # get from invalidate
-            invalidate_repo = self.invalidate
-
-        if invalidate_repo is not None:
-            region_invalidate(_c, None, rn)
-            # update our cache
-            CacheInvalidation.set_valid(invalidate_repo.cache_key)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository', alias)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                  self.group_name)
-
-    @classmethod
-    def groups_choices(cls, check_perms=False):
-        from webhelpers.html import literal as _literal
-        from kallithea.model.scm import ScmModel
-        groups = cls.query().all()
-        if check_perms:
-            #filter group user have access to, it's done
-            #magically inside ScmModel based on current user
-            groups = ScmModel().get_repos_groups(groups)
-        repo_groups = [('', '')]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        all_ = []
-
-        def _get_members(root_gr):
-            for r in root_gr.repositories:
-                all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    PERMS = [
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository Group no access')),
-        ('group.read', _('Repository Group read access')),
-        ('group.write', _('Repository Group write access')),
-        ('group.admin', _('Repository Group admin access')),
-
-        ('hg.admin', _('Kallithea Administrator')),
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-        ('hg.register.none', _('Register disabled')),
-        ('hg.register.manual_activate', _('Register new user with Kallithea '
-                                          'with manual activation')),
-
-        ('hg.register.auto_activate', _('Register new user with Kallithea '
-                                        'with auto activation')),
-    ]
-
-    # defines which permissions are more important higher the more important
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository':1
-    }
-
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'hg.create.repository',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-    ]
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<user:%s => %s >' % (self.user, self.repository)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8'}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, cache_args=''):
-        self.cache_key = cache_key
-        self.cache_args = cache_args
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.cache_id, self.cache_key)
-
-    @property
-    def prefix(self):
-        _split = self.cache_key.split(self.cache_args, 1)
-        if _split and len(_split) == 2:
-            return _split[0]
-        return ''
-
-    @classmethod
-    def clear_cache(cls):
-        cls.query().delete()
-
-    @classmethod
-    def _get_key(cls, key):
-        """
-        Wrapper for generating a key, together with a prefix
-
-        :param key:
-        """
-        import kallithea
-        prefix = ''
-        org_key = key
-        iid = kallithea.CONFIG.get('instance_id')
-        if iid:
-            prefix = iid
-
-        return "%s%s" % (prefix, key), prefix, org_key
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.cache_key == key).scalar()
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        return cls.query().filter(cls.cache_args == repo_name).all()
-
-    @classmethod
-    def _get_or_create_key(cls, key, repo_name, commit=True):
-        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
-        if not inv_obj:
-            try:
-                inv_obj = CacheInvalidation(key, repo_name)
-                Session().add(inv_obj)
-                if commit:
-                    Session().commit()
-            except Exception:
-                log.error(traceback.format_exc())
-                Session().rollback()
-        return inv_obj
-
-    @classmethod
-    def invalidate(cls, key):
-        """
-        Returns Invalidation object if this given key should be invalidated
-        None otherwise. `cache_active = False` means that this cache
-        state is not valid and needs to be invalidated
-
-        :param key:
-        """
-        repo_name = key
-        repo_name = remove_suffix(repo_name, '_README')
-        repo_name = remove_suffix(repo_name, '_RSS')
-        repo_name = remove_suffix(repo_name, '_ATOM')
-
-        # adds instance prefix
-        key, _prefix, _org_key = cls._get_key(key)
-        inv = cls._get_or_create_key(key, repo_name)
-
-        if inv and inv.cache_active is False:
-            return inv
-
-    @classmethod
-    def set_invalidate(cls, key=None, repo_name=None):
-        """
-        Mark this Cache key for invalidation, either by key or whole
-        cache sets based on repo_name
-
-        :param key:
-        """
-        if key:
-            key, _prefix, _org_key = cls._get_key(key)
-            inv_objs = Session().query(cls).filter(cls.cache_key == key).all()
-        elif repo_name:
-            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s',
-                  len(inv_objs), key, repo_name)
-        try:
-            for inv_obj in inv_objs:
-                inv_obj.cache_active = False
-                Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def set_valid(cls, key):
-        """
-        Mark this cache key as active and currently cached
-
-        :param key:
-        """
-        inv_obj = cls.get_by_key(key)
-        inv_obj.cache_active = True
-        Session().add(inv_obj)
-        Session().commit()
-
-    @classmethod
-    def get_cache_map(cls):
-
-        class cachemapdict(dict):
-
-            def __init__(self, *args, **kwargs):
-                fixkey = kwargs.get('fixkey')
-                if fixkey:
-                    del kwargs['fixkey']
-                self.fixkey = fixkey
-                super(cachemapdict, self).__init__(*args, **kwargs)
-
-            def __getattr__(self, name):
-                key = name
-                if self.fixkey:
-                    key, _prefix, _org_key = cls._get_key(key)
-                if key in self.__dict__:
-                    return self.__dict__[key]
-                else:
-                    return self[key]
-
-            def __getitem__(self, key):
-                if self.fixkey:
-                    key, _prefix, _org_key = cls._get_key(key)
-                try:
-                    return super(cachemapdict, self).__getitem__(key)
-                except KeyError:
-                    return
-
-        cache_map = cachemapdict(fixkey=True)
-        for obj in cls.query().all():
-            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())
-        return cache_map
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    def __json__(self):
-        return dict(
-          revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_1_5_2.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1962 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_1_5_2
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea <=1.5.X
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import logging
-import datetime
-import traceback
-import hashlib
-import time
-from collections import defaultdict
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_suffix, remove_prefix
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            return safe_str(self.__unicode__())
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, k='', v=''):
-        self.app_settings_name = k
-        self.app_settings_value = v
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        if self.app_settings_name in ["ldap_active",
-                                      "default_repo_enable_statistics",
-                                      "default_repo_enable_locking",
-                                      "default_repo_private",
-                                      "default_repo_enable_downloads"]:
-            v = str2bool(v)
-        return v
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key)
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_ldap_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('ldap_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    DEFAULT_USER = 'default'
-    DEFAULT_PERMISSIONS = [
-        'hg.register.manual_activate', 'hg.create.repository',
-        'hg.fork.repository', 'repository.read', 'group.read'
-    ]
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                     self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            ldap_dn=user.ldap_dn,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPv4Network(ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-
-    def __unicode__(self):
-        return u'<userGroup(%s)>' % (self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, users_group_id, cache=False):
-        users_group = cls.query()
-        if cache:
-            users_group = users_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % users_group_id))
-        return users_group.get(users_group_id)
-
-    def get_api_data(self):
-        users_group = self
-
-        data = dict(
-            users_group_id=users_group.users_group_id,
-            group_name=users_group.users_group_name,
-            active=users_group.users_group_active,
-        )
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*p)
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def inject_ui(cls, repo, extras={}):
-        from kallithea.lib.vcs.backends.hg import MercurialRepository
-        from kallithea.lib.vcs.backends.git import GitRepository
-        required = (MercurialRepository, GitRepository)
-        if not isinstance(repo, required):
-            raise Exception('repo must be instance of %s' % (','.join(required)))
-
-        # inject ui extra param to log this action via push logger
-        for k, v in extras.items():
-            repo._repo.ui.setconfig('extras', k, v)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache
-        )
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id):
-        repo.locked = [user_id, time.time()]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        from urlparse import urlparse
-        import urllib
-        parsed_url = urlparse(h.canonical_url('home'))
-        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
-        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
-        args = {
-           'user': '',
-           'pass': '',
-           'scheme': parsed_url.scheme,
-           'netloc': parsed_url.netloc,
-           'prefix': decoded_path,
-           'path': self.repo_name
-        }
-
-        args.update(override)
-        return default_clone_uri % args
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        :type revisions: list
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    @property
-    def invalidate(self):
-        return CacheInvalidation.invalidate(self.repo_name)
-
-    def set_invalidate(self):
-        """
-        set a cache for invalidation for this instance
-        """
-        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @LazyProperty
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, cache_map=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-        log.debug('Getting cached instance of repo')
-
-        if cache_map:
-            # get using prefilled cache_map
-            invalidate_repo = cache_map[self.repo_name]
-            if invalidate_repo:
-                invalidate_repo = (None if invalidate_repo.cache_active
-                                   else invalidate_repo)
-        else:
-            # get from invalidate
-            invalidate_repo = self.invalidate
-
-        if invalidate_repo is not None:
-            region_invalidate(_c, None, rn)
-            # update our cache
-            CacheInvalidation.set_valid(invalidate_repo.cache_key)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository', alias)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                  self.group_name)
-
-    @classmethod
-    def groups_choices(cls, check_perms=False):
-        from webhelpers.html import literal as _literal
-        from kallithea.model.scm import ScmModel
-        groups = cls.query().all()
-        if check_perms:
-            #filter group user have access to, it's done
-            #magically inside ScmModel based on current user
-            groups = ScmModel().get_repos_groups(groups)
-        repo_groups = [('', '')]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        all_ = []
-
-        def _get_members(root_gr):
-            for r in root_gr.repositories:
-                all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    PERMS = [
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository Group no access')),
-        ('group.read', _('Repository Group read access')),
-        ('group.write', _('Repository Group write access')),
-        ('group.admin', _('Repository Group admin access')),
-
-        ('hg.admin', _('Kallithea Administrator')),
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-        ('hg.register.none', _('Register disabled')),
-        ('hg.register.manual_activate', _('Register new user with Kallithea '
-                                          'with manual activation')),
-
-        ('hg.register.auto_activate', _('Register new user with Kallithea '
-                                        'with auto activation')),
-    ]
-
-    # defines which permissions are more important higher the more important
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository':1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<user:%s => %s >' % (self.user, self.repository)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8'}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, cache_args=''):
-        self.cache_key = cache_key
-        self.cache_args = cache_args
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.cache_id, self.cache_key)
-
-    @property
-    def prefix(self):
-        _split = self.cache_key.split(self.cache_args, 1)
-        if _split and len(_split) == 2:
-            return _split[0]
-        return ''
-
-    @classmethod
-    def clear_cache(cls):
-        cls.query().delete()
-
-    @classmethod
-    def _get_key(cls, key):
-        """
-        Wrapper for generating a key, together with a prefix
-
-        :param key:
-        """
-        import kallithea
-        prefix = ''
-        org_key = key
-        iid = kallithea.CONFIG.get('instance_id')
-        if iid:
-            prefix = iid
-
-        return "%s%s" % (prefix, key), prefix, org_key
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.cache_key == key).scalar()
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        return cls.query().filter(cls.cache_args == repo_name).all()
-
-    @classmethod
-    def _get_or_create_key(cls, key, repo_name, commit=True):
-        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
-        if not inv_obj:
-            try:
-                inv_obj = CacheInvalidation(key, repo_name)
-                Session().add(inv_obj)
-                if commit:
-                    Session().commit()
-            except Exception:
-                log.error(traceback.format_exc())
-                Session().rollback()
-        return inv_obj
-
-    @classmethod
-    def invalidate(cls, key):
-        """
-        Returns Invalidation object if this given key should be invalidated
-        None otherwise. `cache_active = False` means that this cache
-        state is not valid and needs to be invalidated
-
-        :param key:
-        """
-        repo_name = key
-        repo_name = remove_suffix(repo_name, '_README')
-        repo_name = remove_suffix(repo_name, '_RSS')
-        repo_name = remove_suffix(repo_name, '_ATOM')
-
-        # adds instance prefix
-        key, _prefix, _org_key = cls._get_key(key)
-        inv = cls._get_or_create_key(key, repo_name)
-
-        if inv and inv.cache_active is False:
-            return inv
-
-    @classmethod
-    def set_invalidate(cls, key=None, repo_name=None):
-        """
-        Mark this Cache key for invalidation, either by key or whole
-        cache sets based on repo_name
-
-        :param key:
-        """
-        if key:
-            key, _prefix, _org_key = cls._get_key(key)
-            inv_objs = Session().query(cls).filter(cls.cache_key == key).all()
-        elif repo_name:
-            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s',
-                  len(inv_objs), key, repo_name)
-        try:
-            for inv_obj in inv_objs:
-                inv_obj.cache_active = False
-                Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def set_valid(cls, key):
-        """
-        Mark this cache key as active and currently cached
-
-        :param key:
-        """
-        inv_obj = cls.get_by_key(key)
-        inv_obj.cache_active = True
-        Session().add(inv_obj)
-        Session().commit()
-
-    @classmethod
-    def get_cache_map(cls):
-
-        class cachemapdict(dict):
-
-            def __init__(self, *args, **kwargs):
-                fixkey = kwargs.get('fixkey')
-                if fixkey:
-                    del kwargs['fixkey']
-                self.fixkey = fixkey
-                super(cachemapdict, self).__init__(*args, **kwargs)
-
-            def __getattr__(self, name):
-                key = name
-                if self.fixkey:
-                    key, _prefix, _org_key = cls._get_key(key)
-                if key in self.__dict__:
-                    return self.__dict__[key]
-                else:
-                    return self[key]
-
-            def __getitem__(self, key):
-                if self.fixkey:
-                    key, _prefix, _org_key = cls._get_key(key)
-                try:
-                    return super(cachemapdict, self).__getitem__(key)
-                except KeyError:
-                    return
-
-        cache_map = cachemapdict(fixkey=True)
-        for obj in cls.query().all():
-            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())
-        return cache_map
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    def __json__(self):
-        return dict(
-          revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_1_6_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2041 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_1_6_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea <=1.5.X
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import logging
-import datetime
-import traceback
-import hashlib
-import time
-from collections import defaultdict
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_suffix, remove_prefix, time_to_datetime
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            return safe_str(self.__unicode__())
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, k='', v=''):
-        self.app_settings_name = k
-        self.app_settings_value = v
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        if self.app_settings_name in ["ldap_active",
-                                      "default_repo_enable_statistics",
-                                      "default_repo_enable_locking",
-                                      "default_repo_private",
-                                      "default_repo_enable_downloads"]:
-            v = str2bool(v)
-        return v
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key)
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_ldap_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('ldap_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    DEFAULT_USER = 'default'
-    DEFAULT_PERMISSIONS = [
-        'hg.register.manual_activate', 'hg.create.repository',
-        'hg.fork.repository', 'repository.read', 'group.read'
-    ]
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                     self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            ldap_dn=user.ldap_dn,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-
-    def __unicode__(self):
-        return u'<userGroup(%s)>' % (self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, users_group_id, cache=False):
-        users_group = cls.query()
-        if cache:
-            users_group = users_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % users_group_id))
-        return users_group.get(users_group_id)
-
-    def get_api_data(self):
-        users_group = self
-
-        data = dict(
-            users_group_id=users_group.users_group_id,
-            group_name=users_group.users_group_name,
-            active=users_group.users_group_active,
-        )
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
-    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id):
-        repo.locked = [user_id, time.time()]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        from urlparse import urlparse
-        import urllib
-        parsed_url = urlparse(h.canonical_url('home'))
-        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
-        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
-        args = {
-           'user': '',
-           'pass': '',
-           'scheme': parsed_url.scheme,
-           'netloc': parsed_url.netloc,
-           'prefix': decoded_path,
-           'path': self.repo_name
-        }
-
-        args.update(override)
-        return default_clone_uri % args
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        :type revisions: list
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    @property
-    def invalidate(self):
-        return CacheInvalidation.invalidate(self.repo_name)
-
-    def set_invalidate(self):
-        """
-        set a cache for invalidation for this instance
-        """
-        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @LazyProperty
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, cache_map=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-        log.debug('Getting cached instance of repo')
-
-        if cache_map:
-            # get using prefilled cache_map
-            invalidate_repo = cache_map[self.repo_name]
-            if invalidate_repo:
-                invalidate_repo = (None if invalidate_repo.cache_active
-                                   else invalidate_repo)
-        else:
-            # get from invalidate
-            invalidate_repo = self.invalidate
-
-        if invalidate_repo is not None:
-            region_invalidate(_c, None, rn)
-            # update our cache
-            CacheInvalidation.set_valid(invalidate_repo.cache_key)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                  self.group_name)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        from webhelpers.html import literal as _literal
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', '-- %s --' % _('top level'))]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    PERMS = [
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('hg.admin', _('Kallithea Administrator')),
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-        ('hg.register.none', _('Register disabled')),
-        ('hg.register.manual_activate', _('Register new user with Kallithea '
-                                          'with manual activation')),
-
-        ('hg.register.auto_activate', _('Register new user with Kallithea '
-                                        'with auto activation')),
-    ]
-
-    # defines which permissions are more important higher the more important
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository':1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<user:%s => %s >' % (self.user, self.repository)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8'}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # cache_args is usually a repo_name, possibly with _README/_RSS/_ATOM suffix
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching, other instances set cache_active to False to invalidate
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, cache_args=''):
-        self.cache_key = cache_key
-        self.cache_args = cache_args
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__,
-                                  self.cache_id, self.cache_key)
-
-    def get_prefix(self):
-        """
-        Guess prefix that might have been used in _get_cache_key to generate self.cache_key .
-        Only used for informational purposes in repo_edit.html .
-        """
-        _split = self.cache_key.split(self.cache_args, 1)
-        if len(_split) == 2:
-            return _split[0]
-        return ''
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def _get_or_create_inv_obj(cls, key, repo_name, commit=True):
-        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
-        if not inv_obj:
-            try:
-                inv_obj = CacheInvalidation(key, repo_name)
-                Session().add(inv_obj)
-                if commit:
-                    Session().commit()
-            except Exception:
-                log.error(traceback.format_exc())
-                Session().rollback()
-        return inv_obj
-
-    @classmethod
-    def invalidate(cls, key):
-        """
-        Returns Invalidation object if this given key should be invalidated
-        None otherwise. `cache_active = False` means that this cache
-        state is not valid and needs to be invalidated
-
-        :param key:
-        """
-        repo_name = key
-        repo_name = remove_suffix(repo_name, '_README')
-        repo_name = remove_suffix(repo_name, '_RSS')
-        repo_name = remove_suffix(repo_name, '_ATOM')
-
-        cache_key = cls._get_cache_key(key)
-        inv = cls._get_or_create_inv_obj(cache_key, repo_name)
-
-        if inv and not inv.cache_active:
-            return inv
-
-    @classmethod
-    def set_invalidate(cls, key=None, repo_name=None):
-        """
-        Mark this Cache key for invalidation, either by key or whole
-        cache sets based on repo_name
-
-        :param key:
-        """
-        invalidated_keys = []
-        if key:
-            assert not repo_name
-            cache_key = cls._get_cache_key(key)
-            inv_objs = Session().query(cls).filter(cls.cache_key == cache_key).all()
-        else:
-            assert repo_name
-            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        try:
-            for inv_obj in inv_objs:
-                inv_obj.cache_active = False
-                log.debug('marking %s key for invalidation based on key=%s,repo_name=%s',
-                  inv_obj, key, safe_str(repo_name))
-                invalidated_keys.append(inv_obj.cache_key)
-                Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-        return invalidated_keys
-
-    @classmethod
-    def set_valid(cls, key):
-        """
-        Mark this cache key as active and currently cached
-
-        :param key:
-        """
-        inv_obj = cls.query().filter(cls.cache_key == key).scalar()
-        inv_obj.cache_active = True
-        Session().add(inv_obj)
-        Session().commit()
-
-    @classmethod
-    def get_cache_map(cls):
-
-        class cachemapdict(dict):
-
-            def __init__(self, *args, **kwargs):
-                self.fixkey = kwargs.pop('fixkey', False)
-                super(cachemapdict, self).__init__(*args, **kwargs)
-
-            def __getattr__(self, name):
-                cache_key = name
-                if self.fixkey:
-                    cache_key = cls._get_cache_key(name)
-                if cache_key in self.__dict__:
-                    return self.__dict__[cache_key]
-                else:
-                    return self[cache_key]
-
-            def __getitem__(self, name):
-                cache_key = name
-                if self.fixkey:
-                    cache_key = cls._get_cache_key(name)
-                try:
-                    return super(cachemapdict, self).__getitem__(cache_key)
-                except KeyError:
-                    return None
-
-        cache_map = cachemapdict(fixkey=True)
-        for obj in cls.query().all():
-            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())
-        return cache_map
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-          revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_1_7_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2223 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_1_7_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import time
-import logging
-import datetime
-import traceback
-import hashlib
-import collections
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_prefix, time_to_datetime
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        # deprecated and left for backward compatibility
-        return cls.get_all()
-
-    @classmethod
-    def get_all(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            return safe_str(self.__unicode__())
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, k='', v=''):
-        self.app_settings_name = k
-        self.app_settings_value = v
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        if self.app_settings_name in ["ldap_active",
-                                      "default_repo_enable_statistics",
-                                      "default_repo_enable_locking",
-                                      "default_repo_private",
-                                      "default_repo_enable_downloads"]:
-            v = str2bool(v)
-        return v
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key)
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_ldap_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('ldap_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    DEFAULT_USER = 'default'
-
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                     self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def get_first_admin(cls):
-        user = User.query().filter(User.admin == True).first()
-        if user is None:
-            raise Exception('Missing administrative account!')
-        return user
-
-    @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
-        if user is None:
-            raise Exception('Missing default account!')
-        return user
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            ldap_dn=user.ldap_dn,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
-    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
-
-    user = relationship('User')
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, users_group_id, cache=False):
-        users_group = cls.query()
-        if cache:
-            users_group = users_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % users_group_id))
-        return users_group.get(users_group_id)
-
-    def get_api_data(self):
-        users_group = self
-
-        data = dict(
-            users_group_id=users_group.users_group_id,
-            group_name=users_group.users_group_name,
-            active=users_group.users_group_active,
-        )
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
-    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id, lock_time=None):
-        if not lock_time:
-            lock_time = time.time()
-        repo.locked = [user_id, lock_time]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        from urlparse import urlparse
-        import urllib
-        parsed_url = urlparse(h.canonical_url('home'))
-        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
-        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
-        args = {
-           'user': '',
-           'pass': '',
-           'scheme': parsed_url.scheme,
-           'netloc': parsed_url.netloc,
-           'prefix': decoded_path,
-           'path': self.repo_name
-        }
-
-        args.update(override)
-        return default_clone_uri % args
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = collections.defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    def set_invalidate(self):
-        """
-        Mark caches of this repo as invalid.
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, rn)
-        else:
-            log.debug('Getting obj for %s from cache', rn)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-    user = relationship('User')
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        from webhelpers.html import literal as _literal
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', '-- %s --' % _('top level'))]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    PERMS = [
-        ('hg.admin', _('Kallithea Administrator')),
-
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('usergroup.none', _('User group no access')),
-        ('usergroup.read', _('User group read access')),
-        ('usergroup.write', _('User group write access')),
-        ('usergroup.admin', _('User group admin access')),
-
-        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
-        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
-
-        ('hg.usergroup.create.false', _('User Group creation disabled')),
-        ('hg.usergroup.create.true', _('User Group creation enabled')),
-
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-
-        ('hg.register.none', _('Registration disabled')),
-        ('hg.register.manual_activate', _('User Registration with manual account activation')),
-        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
-
-        ('hg.extern_activate.manual', _('Manual activation of external account')),
-        ('hg.extern_activate.auto', _('Automatic activation of external account')),
-
-    ]
-
-    #definition of system default permissions for DEFAULT user
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'usergroup.read',
-        'hg.create.repository',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-        'hg.extern_activate.auto',
-    ]
-
-    # defines which permissions are more important higher the more important
-    # Weight defines which permissions are more important.
-    # The higher number the more important.
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'usergroup.none': 0,
-        'usergroup.read': 1,
-        'usergroup.write': 3,
-        'usergroup.admin': 4,
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
-        'hg.usergroup.create.false': 0,
-        'hg.usergroup.create.true': 1,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository': 1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserUserGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
-
-
-class UserUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    user_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, user_group, permission):
-        n = cls()
-        n.user = user
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_group_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
-        CheckConstraint('target_user_group_id != user_group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
-    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, target_user_group, user_group, permission):
-        n = cls()
-        n.target_user_group = target_user_group
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8'}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # cache_args is a repo_name
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching,
-    # other instances set cache_active to False to indicate that this cache is invalid
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (self.__class__.__name__,
-                            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        try:
-            for inv_obj in inv_objs:
-                log.debug('marking %s key for invalidation based on repo_name=%s',
-                          inv_obj, safe_str(repo_name))
-                inv_obj.cache_active = False
-                Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        try:
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if not inv_obj:
-                inv_obj = CacheInvalidation(cache_key, repo_name)
-            was_valid = inv_obj.cache_active
-            inv_obj.cache_active = True
-            Session().add(inv_obj)
-            Session().commit()
-            return was_valid
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-            return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-          revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class Gist(Base, BaseModel):
-    __tablename__ = 'gists'
-    __table_args__ = (
-        Index('g_gist_access_id_idx', 'gist_access_id'),
-        Index('g_created_on_idx', 'created_on'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'}
-    )
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-
-    gist_id = Column('gist_id', Integer(), primary_key=True)
-    gist_access_id = Column('gist_access_id', Unicode(250))
-    gist_description = Column('gist_description', UnicodeText(1024))
-    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
-    gist_expires = Column('gist_expires', Float(), nullable=False)
-    gist_type = Column('gist_type', Unicode(128), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    owner = relationship('User')
-
-    @classmethod
-    def get_or_404(cls, id_):
-        res = cls.query().filter(cls.gist_access_id == id_).scalar()
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def get_by_access_id(cls, gist_access_id):
-        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
-
-    def gist_url(self):
-        import kallithea
-        alias_url = kallithea.CONFIG.get('gist_alias_url')
-        if alias_url:
-            return alias_url.replace('{gistid}', self.gist_access_id)
-
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
-    def get_api_data(self):
-        """
-        Common function for generating gist related data for API
-        """
-        gist = self
-        data = dict(
-            gist_id=gist.gist_id,
-            type=gist.gist_type,
-            access_id=gist.gist_access_id,
-            description=gist.gist_description,
-            url=gist.gist_url(),
-            expires=gist.gist_expires,
-            created_on=gist.created_on,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-        )
-        data.update(self.get_api_data())
-        return data
-    ## SCM functions
-
-    @property
-    def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8'},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_1_8_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2270 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_1_8_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import time
-import logging
-import datetime
-import traceback
-import hashlib
-import collections
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_prefix, time_to_datetime
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        # deprecated and left for backward compatibility
-        return cls.get_all()
-
-    @classmethod
-    def get_all(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            return safe_str(self.__unicode__())
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX +  'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, key='', val='', type='unicode'):
-        self.app_settings_name = key
-        self.app_settings_value = val
-        self.app_settings_type = type
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        if self.app_settings_name in ["ldap_active",
-                                      "default_repo_enable_statistics",
-                                      "default_repo_enable_locking",
-                                      "default_repo_private",
-                                      "default_repo_enable_downloads"]:
-            v = str2bool(v)
-        return v
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key)
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_ldap_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('ldap_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_server_info(cls):
-        import pkg_resources
-        import platform
-        import kallithea
-        from kallithea.lib.utils import check_git_version
-        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
-        mods += [('git', str(check_git_version()))]
-        info = {
-            'modules': sorted(mods, key=lambda k: k[0].lower()),
-            'py_version': platform.python_version(),
-            'platform': platform.platform(),
-            'kallithea_version': kallithea.__version__
-        }
-        return info
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    # def __init__(self, section='', key='', value=''):
-    #     self.ui_section = section
-    #     self.ui_key = key
-    #     self.ui_value = value
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    DEFAULT_USER = 'default'
-
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                     self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def get_first_admin(cls):
-        user = User.query().filter(User.admin == True).first()
-        if user is None:
-            raise Exception('Missing administrative account!')
-        return user
-
-    @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
-        if user is None:
-            raise Exception('Missing default account!')
-        return user
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            ldap_dn=user.ldap_dn,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
-    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
-
-    user = relationship('User')
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, user_group_id, cache=False):
-        user_group = cls.query()
-        if cache:
-            user_group = user_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % user_group_id))
-        return user_group.get(user_group_id)
-
-    def get_api_data(self, with_members=True):
-        user_group = self
-
-        data = dict(
-            users_group_id=user_group.users_group_id,
-            group_name=user_group.users_group_name,
-            active=user_group.users_group_active,
-            owner=user_group.user.username,
-        )
-        if with_members:
-            members = []
-            for user in user_group.members:
-                user = user.user
-                members.append(user.get_api_data())
-            data['members'] = members
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
-    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id, lock_time=None):
-        if not lock_time:
-            lock_time = time.time()
-        repo.locked = [user_id, lock_time]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        from urlparse import urlparse
-        import urllib
-        parsed_url = urlparse(h.canonical_url('home'))
-        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
-        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
-        args = {
-           'user': '',
-           'pass': '',
-           'scheme': parsed_url.scheme,
-           'netloc': parsed_url.netloc,
-           'prefix': decoded_path,
-           'path': self.repo_name
-        }
-
-        args.update(override)
-        return default_clone_uri % args
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = collections.defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    def set_invalidate(self):
-        """
-        Mark caches of this repo as invalid.
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, rn)
-        else:
-            log.debug('Getting obj for %s from cache', rn)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-    user = relationship('User')
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        from webhelpers.html import literal as _literal
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', u'-- %s --' % _('top level'))]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-    def get_api_data(self):
-        """
-        Common function for generating api data
-
-        """
-        group = self
-        data = dict(
-            group_id=group.group_id,
-            group_name=group.group_name,
-            group_description=group.group_description,
-            parent_group=group.parent_group.group_name if group.parent_group else None,
-            repositories=[x.repo_name for x in group.repositories],
-            owner=group.user.username
-        )
-        return data
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PERMS = [
-        ('hg.admin', _('Kallithea Administrator')),
-
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('usergroup.none', _('User group no access')),
-        ('usergroup.read', _('User group read access')),
-        ('usergroup.write', _('User group write access')),
-        ('usergroup.admin', _('User group admin access')),
-
-        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
-        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
-
-        ('hg.usergroup.create.false', _('User Group creation disabled')),
-        ('hg.usergroup.create.true', _('User Group creation enabled')),
-
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-
-        ('hg.register.none', _('Registration disabled')),
-        ('hg.register.manual_activate', _('User Registration with manual account activation')),
-        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
-
-        ('hg.extern_activate.manual', _('Manual activation of external account')),
-        ('hg.extern_activate.auto', _('Automatic activation of external account')),
-
-    ]
-
-    #definition of system default permissions for DEFAULT user
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'usergroup.read',
-        'hg.create.repository',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-        'hg.extern_activate.auto',
-    ]
-
-    # defines which permissions are more important higher the more important
-    # Weight defines which permissions are more important.
-    # The higher number the more important.
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'usergroup.none': 0,
-        'usergroup.read': 1,
-        'usergroup.write': 3,
-        'usergroup.admin': 4,
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
-        'hg.usergroup.create.false': 0,
-        'hg.usergroup.create.true': 1,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository': 1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserUserGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
-
-
-class UserUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    user_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, user_group, permission):
-        n = cls()
-        n.user = user
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_group_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
-        CheckConstraint('target_user_group_id != user_group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
-    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, target_user_group, user_group, permission):
-        n = cls()
-        n.target_user_group = target_user_group
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # cache_args is a repo_name
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching,
-    # other instances set cache_active to False to indicate that this cache is invalid
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (self.__class__.__name__,
-                            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        try:
-            for inv_obj in inv_objs:
-                log.debug('marking %s key for invalidation based on repo_name=%s',
-                          inv_obj, safe_str(repo_name))
-                inv_obj.cache_active = False
-                Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        try:
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if not inv_obj:
-                inv_obj = CacheInvalidation(cache_key, repo_name)
-            was_valid = inv_obj.cache_active
-            inv_obj.cache_active = True
-            Session().add(inv_obj)
-            Session().commit()
-            return was_valid
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-            return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    # values for .status
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-            revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class Gist(Base, BaseModel):
-    __tablename__ = 'gists'
-    __table_args__ = (
-        Index('g_gist_access_id_idx', 'gist_access_id'),
-        Index('g_created_on_idx', 'created_on'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-
-    gist_id = Column('gist_id', Integer(), primary_key=True)
-    gist_access_id = Column('gist_access_id', Unicode(250))
-    gist_description = Column('gist_description', UnicodeText(1024))
-    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
-    gist_expires = Column('gist_expires', Float(53), nullable=False)
-    gist_type = Column('gist_type', Unicode(128), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    owner = relationship('User')
-
-    @classmethod
-    def get_or_404(cls, id_):
-        res = cls.query().filter(cls.gist_access_id == id_).scalar()
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def get_by_access_id(cls, gist_access_id):
-        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
-
-    def gist_url(self):
-        import kallithea
-        alias_url = kallithea.CONFIG.get('gist_alias_url')
-        if alias_url:
-            return alias_url.replace('{gistid}', self.gist_access_id)
-
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
-    def get_api_data(self):
-        """
-        Common function for generating gist related data for API
-        """
-        gist = self
-        data = dict(
-            gist_id=gist.gist_id,
-            type=gist.gist_type,
-            access_id=gist.gist_access_id,
-            description=gist.gist_description,
-            url=gist.gist_url(),
-            expires=gist.gist_expires,
-            created_on=gist.created_on,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-        )
-        data.update(self.get_api_data())
-        return data
-    ## SCM functions
-
-    @property
-    def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_2_0_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2330 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_2_0_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import time
-import logging
-import datetime
-import traceback
-import hashlib
-import collections
-import functools
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        # deprecated and left for backward compatibility
-        return cls.get_all()
-
-    @classmethod
-    def get_all(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            return safe_str(self.__unicode__())
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    SETTINGS_TYPES = {
-        'str': safe_str,
-        'int': safe_int,
-        'unicode': safe_unicode,
-        'bool': str2bool,
-        'list': functools.partial(aslist, sep=',')
-    }
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, key='', val='', type='unicode'):
-        self.app_settings_name = key
-        self.app_settings_value = val
-        self.app_settings_type = type
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        _type = self.app_settings_type
-        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
-        return converter(v)
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    @hybrid_property
-    def app_settings_type(self):
-        return self._app_settings_type
-
-    @app_settings_type.setter
-    def app_settings_type(self, val):
-        if val not in self.SETTINGS_TYPES:
-            raise Exception('type must be one of %s got %s'
-                            % (self.SETTINGS_TYPES.keys(), val))
-        self._app_settings_type = val
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value, self.app_settings_type
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key, val='', type='unicode'):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key, val, type)
-        return res
-
-    @classmethod
-    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
-        """
-        Creates or updates Kallithea setting. If updates is triggered it will only
-        update parameters that are explicitly set Optional instance will be skipped
-
-        :param key:
-        :param val:
-        :param type:
-        :return:
-        """
-        res = cls.get_by_name(key)
-        if not res:
-            val = Optional.extract(val)
-            type = Optional.extract(type)
-            res = cls(key, val, type)
-        else:
-            res.app_settings_name = key
-            if not isinstance(val, Optional):
-                # update if set
-                res.app_settings_value = val
-            if not isinstance(type, Optional):
-                # update if set
-                res.app_settings_type = type
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_auth_plugins(cls, cache=False):
-        auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
-        return auth_plugins
-
-    @classmethod
-    def get_auth_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('auth_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_server_info(cls):
-        import pkg_resources
-        import platform
-        import kallithea
-        from kallithea.lib.utils import check_git_version
-        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
-        info = {
-            'modules': sorted(mods, key=lambda k: k[0].lower()),
-            'py_version': platform.python_version(),
-            'platform': platform.platform(),
-            'kallithea_version': kallithea.__version__,
-            'git_version': str(check_git_version()),
-            'git_path': kallithea.CONFIG.get('git_path')
-        }
-        return info
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    # def __init__(self, section='', key='', value=''):
-    #     self.ui_section = section
-    #     self.ui_key = key
-    #     self.ui_value = value
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    DEFAULT_USER = 'default'
-
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    extern_type = Column("extern_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    extern_name = Column("extern_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    #for migration reasons, this is going to be later deleted
-    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                     self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def get_first_admin(cls):
-        user = User.query().filter(User.admin == True).first()
-        if user is None:
-            raise Exception('Missing administrative account!')
-        return user
-
-    @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
-        if user is None:
-            raise Exception('Missing default account!')
-        return user
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            extern_type=user.extern_type,
-            extern_name=user.extern_name,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    user_group_description = Column("user_group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    # don't trigger lazy load for migrations
-    #members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
-    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
-
-    user = relationship('User')
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, user_group_id, cache=False):
-        user_group = cls.query()
-        if cache:
-            user_group = user_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % user_group_id))
-        return user_group.get(user_group_id)
-
-    def get_api_data(self, with_members=True):
-        user_group = self
-
-        data = dict(
-            users_group_id=user_group.users_group_id,
-            group_name=user_group.users_group_name,
-            group_description=user_group.user_group_description,
-            active=user_group.users_group_active,
-            owner=user_group.user.username,
-        )
-        if with_members:
-            members = []
-            for user in user_group.members:
-                user = user.user
-                members.append(user.get_api_data())
-            data['members'] = members
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
-    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id, lock_time=None):
-        if not lock_time:
-            lock_time = time.time()
-        repo.locked = [user_id, lock_time]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        from urlparse import urlparse
-        import urllib
-        parsed_url = urlparse(h.canonical_url('home'))
-        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
-        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
-        args = {
-           'user': '',
-           'pass': '',
-           'scheme': parsed_url.scheme,
-           'netloc': parsed_url.netloc,
-           'prefix': decoded_path,
-           'path': self.repo_name
-        }
-
-        args.update(override)
-        return default_clone_uri % args
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = collections.defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    def set_invalidate(self):
-        """
-        Mark caches of this repo as invalid.
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, rn)
-        else:
-            log.debug('Getting obj for %s from cache', rn)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-    user = relationship('User')
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        from webhelpers.html import literal as _literal
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', u'-- %s --' % _('top level'))]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-    def get_api_data(self):
-        """
-        Common function for generating api data
-
-        """
-        group = self
-        data = dict(
-            group_id=group.group_id,
-            group_name=group.group_name,
-            group_description=group.group_description,
-            parent_group=group.parent_group.group_name if group.parent_group else None,
-            repositories=[x.repo_name for x in group.repositories],
-            owner=group.user.username
-        )
-        return data
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PERMS = [
-        ('hg.admin', _('Kallithea Administrator')),
-
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('usergroup.none', _('User group no access')),
-        ('usergroup.read', _('User group read access')),
-        ('usergroup.write', _('User group write access')),
-        ('usergroup.admin', _('User group admin access')),
-
-        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
-        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
-
-        ('hg.usergroup.create.false', _('User Group creation disabled')),
-        ('hg.usergroup.create.true', _('User Group creation enabled')),
-
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-
-        ('hg.register.none', _('Registration disabled')),
-        ('hg.register.manual_activate', _('User Registration with manual account activation')),
-        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
-
-        ('hg.extern_activate.manual', _('Manual activation of external account')),
-        ('hg.extern_activate.auto', _('Automatic activation of external account')),
-
-    ]
-
-    #definition of system default permissions for DEFAULT user
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'usergroup.read',
-        'hg.create.repository',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-        'hg.extern_activate.auto',
-    ]
-
-    # defines which permissions are more important higher the more important
-    # Weight defines which permissions are more important.
-    # The higher number the more important.
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'usergroup.none': 0,
-        'usergroup.read': 1,
-        'usergroup.write': 3,
-        'usergroup.admin': 4,
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
-        'hg.usergroup.create.false': 0,
-        'hg.usergroup.create.true': 1,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository': 1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserUserGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
-
-
-class UserUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    user_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, user_group, permission):
-        n = cls()
-        n.user = user
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_group_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
-        CheckConstraint('target_user_group_id != user_group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
-    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, target_user_group, user_group, permission):
-        n = cls()
-        n.target_user_group = target_user_group
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # cache_args is a repo_name
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching,
-    # other instances set cache_active to False to indicate that this cache is invalid
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (self.__class__.__name__,
-                            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name, delete=False):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        try:
-            for inv_obj in inv_objs:
-                log.debug('marking %s key for invalidation based on repo_name=%s',
-                          inv_obj, safe_str(repo_name))
-                if delete:
-                    Session().delete(inv_obj)
-                else:
-                    inv_obj.cache_active = False
-                    Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        try:
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if not inv_obj:
-                inv_obj = CacheInvalidation(cache_key, repo_name)
-            was_valid = inv_obj.cache_active
-            inv_obj.cache_active = True
-            Session().add(inv_obj)
-            Session().commit()
-            return was_valid
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-            return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    # values for .status
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-            revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class Gist(Base, BaseModel):
-    __tablename__ = 'gists'
-    __table_args__ = (
-        Index('g_gist_access_id_idx', 'gist_access_id'),
-        Index('g_created_on_idx', 'created_on'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-
-    gist_id = Column('gist_id', Integer(), primary_key=True)
-    gist_access_id = Column('gist_access_id', Unicode(250))
-    gist_description = Column('gist_description', UnicodeText(1024))
-    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
-    gist_expires = Column('gist_expires', Float(53), nullable=False)
-    gist_type = Column('gist_type', Unicode(128), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    owner = relationship('User')
-
-    @classmethod
-    def get_or_404(cls, id_):
-        res = cls.query().filter(cls.gist_access_id == id_).scalar()
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def get_by_access_id(cls, gist_access_id):
-        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
-
-    def gist_url(self):
-        import kallithea
-        alias_url = kallithea.CONFIG.get('gist_alias_url')
-        if alias_url:
-            return alias_url.replace('{gistid}', self.gist_access_id)
-
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
-    def get_api_data(self):
-        """
-        Common function for generating gist related data for API
-        """
-        gist = self
-        data = dict(
-            gist_id=gist.gist_id,
-            type=gist.gist_type,
-            access_id=gist.gist_access_id,
-            description=gist.gist_description,
-            url=gist.gist_url(),
-            expires=gist.gist_expires,
-            created_on=gist.created_on,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-        )
-        data.update(self.get_api_data())
-        return data
-    ## SCM functions
-
-    @property
-    def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_2_0_1.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2331 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_2_0_1
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import time
-import logging
-import datetime
-import traceback
-import hashlib
-import collections
-import functools
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-from kallithea import DB_PREFIX
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        # deprecated and left for backward compatibility
-        return cls.get_all()
-
-    @classmethod
-    def get_all(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            try:
-                return safe_str(self.__unicode__())
-            except UnicodeDecodeError:
-                pass
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    SETTINGS_TYPES = {
-        'str': safe_str,
-        'int': safe_int,
-        'unicode': safe_unicode,
-        'bool': str2bool,
-        'list': functools.partial(aslist, sep=',')
-    }
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, key='', val='', type='unicode'):
-        self.app_settings_name = key
-        self.app_settings_value = val
-        self.app_settings_type = type
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        _type = self.app_settings_type
-        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
-        return converter(v)
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    @hybrid_property
-    def app_settings_type(self):
-        return self._app_settings_type
-
-    @app_settings_type.setter
-    def app_settings_type(self, val):
-        if val not in self.SETTINGS_TYPES:
-            raise Exception('type must be one of %s got %s'
-                            % (self.SETTINGS_TYPES.keys(), val))
-        self._app_settings_type = val
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value, self.app_settings_type
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key, val='', type='unicode'):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key, val, type)
-        return res
-
-    @classmethod
-    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
-        """
-        Creates or updates Kallithea setting. If updates is triggered it will only
-        update parameters that are explicitly set Optional instance will be skipped
-
-        :param key:
-        :param val:
-        :param type:
-        :return:
-        """
-        res = cls.get_by_name(key)
-        if not res:
-            val = Optional.extract(val)
-            type = Optional.extract(type)
-            res = cls(key, val, type)
-        else:
-            res.app_settings_name = key
-            if not isinstance(val, Optional):
-                # update if set
-                res.app_settings_value = val
-            if not isinstance(type, Optional):
-                # update if set
-                res.app_settings_type = type
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_auth_plugins(cls, cache=False):
-        auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
-        return auth_plugins
-
-    @classmethod
-    def get_auth_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('auth_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_server_info(cls):
-        import pkg_resources
-        import platform
-        import kallithea
-        from kallithea.lib.utils import check_git_version
-        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
-        info = {
-            'modules': sorted(mods, key=lambda k: k[0].lower()),
-            'py_version': platform.python_version(),
-            'platform': safe_unicode(platform.platform()),
-            'kallithea_version': kallithea.__version__,
-            'git_version': safe_unicode(check_git_version()),
-            'git_path': kallithea.CONFIG.get('git_path')
-        }
-        return info
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    # def __init__(self, section='', key='', value=''):
-    #     self.ui_section = section
-    #     self.ui_key = key
-    #     self.ui_value = value
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    DEFAULT_USER = 'default'
-
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    extern_type = Column("extern_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    extern_name = Column("extern_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def get_first_admin(cls):
-        user = User.query().filter(User.admin == True).first()
-        if user is None:
-            raise Exception('Missing administrative account!')
-        return user
-
-    @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
-        if user is None:
-            raise Exception('Missing default account!')
-        return user
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            extern_type=user.extern_type,
-            extern_name=user.extern_name,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    user_group_description = Column("user_group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
-    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
-
-    user = relationship('User')
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, user_group_id, cache=False):
-        user_group = cls.query()
-        if cache:
-            user_group = user_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % user_group_id))
-        return user_group.get(user_group_id)
-
-    def get_api_data(self, with_members=True):
-        user_group = self
-
-        data = dict(
-            users_group_id=user_group.users_group_id,
-            group_name=user_group.users_group_name,
-            group_description=user_group.user_group_description,
-            active=user_group.users_group_active,
-            owner=user_group.user.username,
-        )
-        if with_members:
-            members = []
-            for user in user_group.members:
-                user = user.user
-                members.append(user.get_api_data())
-            data['members'] = members
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
-    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id, lock_time=None):
-        if not lock_time:
-            lock_time = time.time()
-        repo.locked = [user_id, lock_time]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        from urlparse import urlparse
-        import urllib
-        parsed_url = urlparse(h.canonical_url('home'))
-        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
-        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
-        args = {
-           'user': '',
-           'pass': '',
-           'scheme': parsed_url.scheme,
-           'netloc': parsed_url.netloc,
-           'prefix': decoded_path,
-           'path': self.repo_name
-        }
-
-        args.update(override)
-        return default_clone_uri % args
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = collections.defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    def set_invalidate(self):
-        """
-        Mark caches of this repo as invalid.
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, rn)
-        else:
-            log.debug('Getting obj for %s from cache', rn)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    #TODO: create this field in migrations
-    #created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-    user = relationship('User')
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        from webhelpers.html import literal as _literal
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', u'-- %s --' % _('top level'))]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-    def get_api_data(self):
-        """
-        Common function for generating api data
-
-        """
-        group = self
-        data = dict(
-            group_id=group.group_id,
-            group_name=group.group_name,
-            group_description=group.group_description,
-            parent_group=group.parent_group.group_name if group.parent_group else None,
-            repositories=[x.repo_name for x in group.repositories],
-            owner=group.user.username
-        )
-        return data
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PERMS = [
-        ('hg.admin', _('Kallithea Administrator')),
-
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('usergroup.none', _('User group no access')),
-        ('usergroup.read', _('User group read access')),
-        ('usergroup.write', _('User group write access')),
-        ('usergroup.admin', _('User group admin access')),
-
-        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
-        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
-
-        ('hg.usergroup.create.false', _('User Group creation disabled')),
-        ('hg.usergroup.create.true', _('User Group creation enabled')),
-
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-
-        ('hg.register.none', _('Registration disabled')),
-        ('hg.register.manual_activate', _('User Registration with manual account activation')),
-        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
-
-        ('hg.extern_activate.manual', _('Manual activation of external account')),
-        ('hg.extern_activate.auto', _('Automatic activation of external account')),
-
-    ]
-
-    #definition of system default permissions for DEFAULT user
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'usergroup.read',
-        'hg.create.repository',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-        'hg.extern_activate.auto',
-    ]
-
-    # defines which permissions are more important higher the more important
-    # Weight defines which permissions are more important.
-    # The higher number the more important.
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'usergroup.none': 0,
-        'usergroup.read': 1,
-        'usergroup.write': 3,
-        'usergroup.admin': 4,
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
-        'hg.usergroup.create.false': 0,
-        'hg.usergroup.create.true': 1,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository': 1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserUserGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
-
-
-class UserUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    user_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, user_group, permission):
-        n = cls()
-        n.user = user
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_group_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
-        CheckConstraint('target_user_group_id != user_group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
-    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, target_user_group, user_group, permission):
-        n = cls()
-        n.target_user_group = target_user_group
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # cache_args is a repo_name
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching,
-    # other instances set cache_active to False to indicate that this cache is invalid
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (self.__class__.__name__,
-                            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name, delete=False):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        try:
-            for inv_obj in inv_objs:
-                log.debug('marking %s key for invalidation based on repo_name=%s',
-                          inv_obj, safe_str(repo_name))
-                if delete:
-                    Session().delete(inv_obj)
-                else:
-                    inv_obj.cache_active = False
-                    Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        try:
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if not inv_obj:
-                inv_obj = CacheInvalidation(cache_key, repo_name)
-            was_valid = inv_obj.cache_active
-            inv_obj.cache_active = True
-            Session().add(inv_obj)
-            Session().commit()
-            return was_valid
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-            return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    # values for .status
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-            revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class Gist(Base, BaseModel):
-    __tablename__ = 'gists'
-    __table_args__ = (
-        Index('g_gist_access_id_idx', 'gist_access_id'),
-        Index('g_created_on_idx', 'created_on'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-
-    gist_id = Column('gist_id', Integer(), primary_key=True)
-    gist_access_id = Column('gist_access_id', Unicode(250))
-    gist_description = Column('gist_description', UnicodeText(1024))
-    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
-    gist_expires = Column('gist_expires', Float(53), nullable=False)
-    gist_type = Column('gist_type', Unicode(128), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    owner = relationship('User')
-
-    @classmethod
-    def get_or_404(cls, id_):
-        res = cls.query().filter(cls.gist_access_id == id_).scalar()
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def get_by_access_id(cls, gist_access_id):
-        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
-
-    def gist_url(self):
-        import kallithea
-        alias_url = kallithea.CONFIG.get('gist_alias_url')
-        if alias_url:
-            return alias_url.replace('{gistid}', self.gist_access_id)
-
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
-    def get_api_data(self):
-        """
-        Common function for generating gist related data for API
-        """
-        gist = self
-        data = dict(
-            gist_id=gist.gist_id,
-            type=gist.gist_type,
-            access_id=gist.gist_access_id,
-            description=gist.gist_description,
-            url=gist.gist_url(),
-            expires=gist.gist_expires,
-            created_on=gist.created_on,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-        )
-        data.update(self.get_api_data())
-        return data
-    ## SCM functions
-
-    @property
-    def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_2_0_2.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2352 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_2_0_2
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import time
-import logging
-import datetime
-import traceback
-import hashlib
-import collections
-import functools
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        # deprecated and left for backward compatibility
-        return cls.get_all()
-
-    @classmethod
-    def get_all(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            try:
-                return safe_str(self.__unicode__())
-            except UnicodeDecodeError:
-                pass
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    SETTINGS_TYPES = {
-        'str': safe_str,
-        'int': safe_int,
-        'unicode': safe_unicode,
-        'bool': str2bool,
-        'list': functools.partial(aslist, sep=',')
-    }
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, key='', val='', type='unicode'):
-        self.app_settings_name = key
-        self.app_settings_value = val
-        self.app_settings_type = type
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        _type = self.app_settings_type
-        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
-        return converter(v)
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    @hybrid_property
-    def app_settings_type(self):
-        return self._app_settings_type
-
-    @app_settings_type.setter
-    def app_settings_type(self, val):
-        if val not in self.SETTINGS_TYPES:
-            raise Exception('type must be one of %s got %s'
-                            % (self.SETTINGS_TYPES.keys(), val))
-        self._app_settings_type = val
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value, self.app_settings_type
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key, val='', type='unicode'):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key, val, type)
-        return res
-
-    @classmethod
-    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
-        """
-        Creates or updates Kallithea setting. If updates is triggered it will only
-        update parameters that are explicitly set Optional instance will be skipped
-
-        :param key:
-        :param val:
-        :param type:
-        :return:
-        """
-        res = cls.get_by_name(key)
-        if not res:
-            val = Optional.extract(val)
-            type = Optional.extract(type)
-            res = cls(key, val, type)
-        else:
-            res.app_settings_name = key
-            if not isinstance(val, Optional):
-                # update if set
-                res.app_settings_value = val
-            if not isinstance(type, Optional):
-                # update if set
-                res.app_settings_type = type
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_auth_plugins(cls, cache=False):
-        auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
-        return auth_plugins
-
-    @classmethod
-    def get_auth_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('auth_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_server_info(cls):
-        import pkg_resources
-        import platform
-        import kallithea
-        from kallithea.lib.utils import check_git_version
-        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
-        info = {
-            'modules': sorted(mods, key=lambda k: k[0].lower()),
-            'py_version': platform.python_version(),
-            'platform': safe_unicode(platform.platform()),
-            'kallithea_version': kallithea.__version__,
-            'git_version': safe_unicode(check_git_version()),
-            'git_path': kallithea.CONFIG.get('git_path')
-        }
-        return info
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    # def __init__(self, section='', key='', value=''):
-    #     self.ui_section = section
-    #     self.ui_key = key
-    #     self.ui_value = value
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
-                                   self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    DEFAULT_USER = 'default'
-
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    extern_type = Column("extern_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    extern_name = Column("extern_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        return q.scalar()
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def get_first_admin(cls):
-        user = User.query().filter(User.admin == True).first()
-        if user is None:
-            raise Exception('Missing administrative account!')
-        return user
-
-    @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
-        if user is None:
-            raise Exception('Missing default account!')
-        return user
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            extern_type=user.extern_type,
-            extern_name=user.extern_name,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    user_group_description = Column("user_group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
-    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
-
-    user = relationship('User')
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, user_group_id, cache=False):
-        user_group = cls.query()
-        if cache:
-            user_group = user_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % user_group_id))
-        return user_group.get(user_group_id)
-
-    def get_api_data(self, with_members=True):
-        user_group = self
-
-        data = dict(
-            users_group_id=user_group.users_group_id,
-            group_name=user_group.users_group_name,
-            group_description=user_group.user_group_description,
-            active=user_group.users_group_active,
-            owner=user_group.user.username,
-        )
-        if with_members:
-            members = []
-            for user in user_group.members:
-                user = user.user
-                members.append(user.get_api_data())
-            data['members'] = members
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
-    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    _landing_revision = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def landing_rev(self):
-        # always should return [rev_type, rev]
-        if self._landing_revision:
-            _rev_info = self._landing_revision.split(':')
-            if len(_rev_info) < 2:
-                _rev_info.insert(0, 'rev')
-            return [_rev_info[0], _rev_info[1]]
-        return [None, None]
-
-    @landing_rev.setter
-    def landing_rev(self, val):
-        if ':' not in val:
-            raise ValueError('value must be delimited with `:` and consist '
-                             'of <rev_type>:<rev>, got %s instead' % val)
-        self._landing_revision = val
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id, lock_time=None):
-        if not lock_time:
-            lock_time = time.time()
-        repo.locked = [user_id, lock_time]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        from urlparse import urlparse
-        import urllib
-        parsed_url = urlparse(h.canonical_url('home'))
-        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
-        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
-        args = {
-           'user': '',
-           'pass': '',
-           'scheme': parsed_url.scheme,
-           'netloc': parsed_url.netloc,
-           'prefix': decoded_path,
-           'path': self.repo_name
-        }
-
-        args.update(override)
-        return default_clone_uri % args
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        _rev_type, _rev = self.landing_rev
-        cs = self.get_changeset(_rev)
-        if isinstance(cs, EmptyChangeset):
-            return self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = collections.defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    def set_invalidate(self):
-        """
-        Mark caches of this repo as invalid.
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, rn)
-        else:
-            log.debug('Getting obj for %s from cache', rn)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-    def __json__(self):
-        return dict(landing_rev = self.landing_rev)
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-    user = relationship('User')
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        from webhelpers.html import literal as _literal
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', u'-- %s --' % _('top level'))]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-    def get_api_data(self):
-        """
-        Common function for generating api data
-
-        """
-        group = self
-        data = dict(
-            group_id=group.group_id,
-            group_name=group.group_name,
-            group_description=group.group_description,
-            parent_group=group.parent_group.group_name if group.parent_group else None,
-            repositories=[x.repo_name for x in group.repositories],
-            owner=group.user.username
-        )
-        return data
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PERMS = [
-        ('hg.admin', _('Kallithea Administrator')),
-
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('usergroup.none', _('User group no access')),
-        ('usergroup.read', _('User group read access')),
-        ('usergroup.write', _('User group write access')),
-        ('usergroup.admin', _('User group admin access')),
-
-        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
-        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
-
-        ('hg.usergroup.create.false', _('User Group creation disabled')),
-        ('hg.usergroup.create.true', _('User Group creation enabled')),
-
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-
-        ('hg.register.none', _('Registration disabled')),
-        ('hg.register.manual_activate', _('User Registration with manual account activation')),
-        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
-
-        ('hg.extern_activate.manual', _('Manual activation of external account')),
-        ('hg.extern_activate.auto', _('Automatic activation of external account')),
-
-    ]
-
-    #definition of system default permissions for DEFAULT user
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'usergroup.read',
-        'hg.create.repository',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-        'hg.extern_activate.auto',
-    ]
-
-    # defines which permissions are more important higher the more important
-    # Weight defines which permissions are more important.
-    # The higher number the more important.
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'usergroup.none': 0,
-        'usergroup.read': 1,
-        'usergroup.write': 3,
-        'usergroup.admin': 4,
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
-        'hg.usergroup.create.false': 0,
-        'hg.usergroup.create.true': 1,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository': 1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserUserGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
-
-
-class UserUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    user_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, user_group, permission):
-        n = cls()
-        n.user = user
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_group_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
-        CheckConstraint('target_user_group_id != user_group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
-    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, target_user_group, user_group, permission):
-        n = cls()
-        n.target_user_group = target_user_group
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # cache_args is a repo_name
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching,
-    # other instances set cache_active to False to indicate that this cache is invalid
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (self.__class__.__name__,
-                            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name, delete=False):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        try:
-            for inv_obj in inv_objs:
-                log.debug('marking %s key for invalidation based on repo_name=%s',
-                          inv_obj, safe_str(repo_name))
-                if delete:
-                    Session().delete(inv_obj)
-                else:
-                    inv_obj.cache_active = False
-                    Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        try:
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if not inv_obj:
-                inv_obj = CacheInvalidation(cache_key, repo_name)
-            was_valid = inv_obj.cache_active
-            inv_obj.cache_active = True
-            Session().add(inv_obj)
-            Session().commit()
-            return was_valid
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-            return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    # values for .status
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-            revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class Gist(Base, BaseModel):
-    __tablename__ = 'gists'
-    __table_args__ = (
-        Index('g_gist_access_id_idx', 'gist_access_id'),
-        Index('g_created_on_idx', 'created_on'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-
-    gist_id = Column('gist_id', Integer(), primary_key=True)
-    gist_access_id = Column('gist_access_id', Unicode(250))
-    gist_description = Column('gist_description', UnicodeText(1024))
-    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
-    gist_expires = Column('gist_expires', Float(53), nullable=False)
-    gist_type = Column('gist_type', Unicode(128), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    owner = relationship('User')
-
-    @classmethod
-    def get_or_404(cls, id_):
-        res = cls.query().filter(cls.gist_access_id == id_).scalar()
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def get_by_access_id(cls, gist_access_id):
-        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
-
-    def gist_url(self):
-        import kallithea
-        alias_url = kallithea.CONFIG.get('gist_alias_url')
-        if alias_url:
-            return alias_url.replace('{gistid}', self.gist_access_id)
-
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
-    def get_api_data(self):
-        """
-        Common function for generating gist related data for API
-        """
-        gist = self
-        data = dict(
-            gist_id=gist.gist_id,
-            type=gist.gist_type,
-            access_id=gist.gist_access_id,
-            description=gist.gist_description,
-            url=gist.gist_url(),
-            expires=gist.gist_expires,
-            created_on=gist.created_on,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-        )
-        data.update(self.get_api_data())
-        return data
-    ## SCM functions
-
-    @property
-    def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_2_1_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2391 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_2_1_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import time
-import logging
-import datetime
-import traceback
-import hashlib
-import collections
-import functools
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        # deprecated and left for backward compatibility
-        return cls.get_all()
-
-    @classmethod
-    def get_all(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            try:
-                return safe_str(self.__unicode__())
-            except UnicodeDecodeError:
-                pass
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    SETTINGS_TYPES = {
-        'str': safe_str,
-        'int': safe_int,
-        'unicode': safe_unicode,
-        'bool': str2bool,
-        'list': functools.partial(aslist, sep=',')
-    }
-    DEFAULT_UPDATE_URL = ''
-
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, key='', val='', type='unicode'):
-        self.app_settings_name = key
-        self.app_settings_value = val
-        self.app_settings_type = type
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        _type = self.app_settings_type
-        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
-        return converter(v)
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    @hybrid_property
-    def app_settings_type(self):
-        return self._app_settings_type
-
-    @app_settings_type.setter
-    def app_settings_type(self, val):
-        if val not in self.SETTINGS_TYPES:
-            raise Exception('type must be one of %s got %s'
-                            % (self.SETTINGS_TYPES.keys(), val))
-        self._app_settings_type = val
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value, self.app_settings_type
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key, val='', type='unicode'):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key, val, type)
-        return res
-
-    @classmethod
-    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
-        """
-        Creates or updates Kallithea setting. If updates is triggered it will only
-        update parameters that are explicitly set Optional instance will be skipped
-
-        :param key:
-        :param val:
-        :param type:
-        :return:
-        """
-        res = cls.get_by_name(key)
-        if not res:
-            val = Optional.extract(val)
-            type = Optional.extract(type)
-            res = cls(key, val, type)
-        else:
-            res.app_settings_name = key
-            if not isinstance(val, Optional):
-                # update if set
-                res.app_settings_value = val
-            if not isinstance(type, Optional):
-                # update if set
-                res.app_settings_type = type
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_auth_plugins(cls, cache=False):
-        auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
-        return auth_plugins
-
-    @classmethod
-    def get_auth_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('auth_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_server_info(cls):
-        import pkg_resources
-        import platform
-        import kallithea
-        from kallithea.lib.utils import check_git_version
-        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
-        info = {
-            'modules': sorted(mods, key=lambda k: k[0].lower()),
-            'py_version': platform.python_version(),
-            'platform': safe_unicode(platform.platform()),
-            'kallithea_version': kallithea.__version__,
-            'git_version': safe_unicode(check_git_version()),
-            'git_path': kallithea.CONFIG.get('git_path')
-        }
-        return info
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    # def __init__(self, section='', key='', value=''):
-    #     self.ui_section = section
-    #     self.ui_key = key
-    #     self.ui_value = value
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
-                                    self.ui_key, self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    DEFAULT_USER = 'default'
-    DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
-
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    extern_type = Column("extern_type", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    extern_name = Column("extern_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False, fallback=True):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        res = q.scalar()
-
-        if fallback and not res:
-            #fallback to additional keys
-            _res = UserApiKeys.query() \
-                .filter(UserApiKeys.api_key == api_key) \
-                .filter(or_(UserApiKeys.expires == -1,
-                            UserApiKeys.expires >= time.time())) \
-                .first()
-            if _res:
-                res = _res.user
-        return res
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def get_first_admin(cls):
-        user = User.query().filter(User.admin == True).first()
-        if user is None:
-            raise Exception('Missing administrative account!')
-        return user
-
-    @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
-        if user is None:
-            raise Exception('Missing default account!')
-        return user
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            active=user.active,
-            admin=user.admin,
-            extern_type=user.extern_type,
-            extern_name=user.extern_name,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserApiKeys(Base, BaseModel):
-    __tablename__ = 'user_api_keys'
-    __table_args__ = (
-        Index('uak_api_key_idx', 'api_key'),
-        UniqueConstraint('api_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True)
-    description = Column('description', UnicodeText(1024))
-    expires = Column('expires', Float(53), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    user = relationship('User', lazy='joined')
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-    def __unicode__(self):
-        return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
-                                            self.user_id, self.ip_addr)
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    user_group_description = Column("user_group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
-    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
-
-    user = relationship('User')
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, user_group_id, cache=False):
-        user_group = cls.query()
-        if cache:
-            user_group = user_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % user_group_id))
-        return user_group.get(user_group_id)
-
-    def get_api_data(self, with_members=True):
-        user_group = self
-
-        data = dict(
-            users_group_id=user_group.users_group_id,
-            group_name=user_group.users_group_name,
-            group_description=user_group.user_group_description,
-            active=user_group.users_group_active,
-            owner=user_group.user.username,
-        )
-        if with_members:
-            members = []
-            for user in user_group.members:
-                user = user.user
-                members.append(user.get_api_data())
-            data['members'] = members
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
-    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    _landing_revision = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def landing_rev(self):
-        # always should return [rev_type, rev]
-        if self._landing_revision:
-            _rev_info = self._landing_revision.split(':')
-            if len(_rev_info) < 2:
-                _rev_info.insert(0, 'rev')
-            return [_rev_info[0], _rev_info[1]]
-        return [None, None]
-
-    @landing_rev.setter
-    def landing_rev(self, val):
-        if ':' not in val:
-            raise ValueError('value must be delimited with `:` and consist '
-                             'of <rev_type>:<rev>, got %s instead' % val)
-        self._landing_revision = val
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id, lock_time=None):
-        if not lock_time:
-            lock_time = time.time()
-        repo.locked = [user_id, lock_time]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        from urlparse import urlparse
-        import urllib
-        parsed_url = urlparse(h.canonical_url('home'))
-        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
-        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
-        args = {
-           'user': '',
-           'pass': '',
-           'scheme': parsed_url.scheme,
-           'netloc': parsed_url.netloc,
-           'prefix': decoded_path,
-           'path': self.repo_name
-        }
-
-        args.update(override)
-        return default_clone_uri % args
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        _rev_type, _rev = self.landing_rev
-        cs = self.get_changeset(_rev)
-        if isinstance(cs, EmptyChangeset):
-            return self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = collections.defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    def set_invalidate(self):
-        """
-        Mark caches of this repo as invalid.
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, rn)
-        else:
-            log.debug('Getting obj for %s from cache', rn)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-    def __json__(self):
-        return dict(landing_rev = self.landing_rev)
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-    user = relationship('User')
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        from webhelpers.html import literal as _literal
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', u'-- %s --' % _('top level'))]
-        sep = ' &raquo; '
-        _name = lambda k: _literal(sep.join(k))
-
-        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
-                              for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-    def get_api_data(self):
-        """
-        Common function for generating api data
-
-        """
-        group = self
-        data = dict(
-            group_id=group.group_id,
-            group_name=group.group_name,
-            group_description=group.group_description,
-            parent_group=group.parent_group.group_name if group.parent_group else None,
-            repositories=[x.repo_name for x in group.repositories],
-            owner=group.user.username
-        )
-        return data
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PERMS = [
-        ('hg.admin', _('Kallithea Administrator')),
-
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('usergroup.none', _('User group no access')),
-        ('usergroup.read', _('User group read access')),
-        ('usergroup.write', _('User group write access')),
-        ('usergroup.admin', _('User group admin access')),
-
-        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
-        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
-
-        ('hg.usergroup.create.false', _('User Group creation disabled')),
-        ('hg.usergroup.create.true', _('User Group creation enabled')),
-
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-
-        ('hg.register.none', _('Registration disabled')),
-        ('hg.register.manual_activate', _('User Registration with manual account activation')),
-        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
-
-        ('hg.extern_activate.manual', _('Manual activation of external account')),
-        ('hg.extern_activate.auto', _('Automatic activation of external account')),
-
-    ]
-
-    #definition of system default permissions for DEFAULT user
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'usergroup.read',
-        'hg.create.repository',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-        'hg.extern_activate.auto',
-    ]
-
-    # defines which permissions are more important higher the more important
-    # Weight defines which permissions are more important.
-    # The higher number the more important.
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'usergroup.none': 0,
-        'usergroup.read': 1,
-        'usergroup.write': 3,
-        'usergroup.admin': 4,
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
-        'hg.usergroup.create.false': 0,
-        'hg.usergroup.create.true': 1,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository': 1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserUserGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
-
-
-class UserUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    user_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, user_group, permission):
-        n = cls()
-        n.user = user
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_group_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
-        CheckConstraint('target_user_group_id != user_group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
-    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, target_user_group, user_group, permission):
-        n = cls()
-        n.target_user_group = target_user_group
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # cache_args is a repo_name
-    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching,
-    # other instances set cache_active to False to indicate that this cache is invalid
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (self.__class__.__name__,
-                            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name, delete=False):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-
-        try:
-            for inv_obj in inv_objs:
-                log.debug('marking %s key for invalidation based on repo_name=%s',
-                          inv_obj, safe_str(repo_name))
-                if delete:
-                    Session().delete(inv_obj)
-                else:
-                    inv_obj.cache_active = False
-                    Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        try:
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if not inv_obj:
-                inv_obj = CacheInvalidation(cache_key, repo_name)
-            was_valid = inv_obj.cache_active
-            inv_obj.cache_active = True
-            Session().add(inv_obj)
-            Session().commit()
-            return was_valid
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-            return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    # values for .status
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-            revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class Gist(Base, BaseModel):
-    __tablename__ = 'gists'
-    __table_args__ = (
-        Index('g_gist_access_id_idx', 'gist_access_id'),
-        Index('g_created_on_idx', 'created_on'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-
-    gist_id = Column('gist_id', Integer(), primary_key=True)
-    gist_access_id = Column('gist_access_id', Unicode(250))
-    gist_description = Column('gist_description', UnicodeText(1024))
-    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
-    gist_expires = Column('gist_expires', Float(53), nullable=False)
-    gist_type = Column('gist_type', Unicode(128), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    owner = relationship('User')
-
-    @classmethod
-    def get_or_404(cls, id_):
-        res = cls.query().filter(cls.gist_access_id == id_).scalar()
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def get_by_access_id(cls, gist_access_id):
-        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
-
-    def gist_url(self):
-        import kallithea
-        alias_url = kallithea.CONFIG.get('gist_alias_url')
-        if alias_url:
-            return alias_url.replace('{gistid}', self.gist_access_id)
-
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
-    def get_api_data(self):
-        """
-        Common function for generating gist related data for API
-        """
-        gist = self
-        data = dict(
-            gist_id=gist.gist_id,
-            type=gist.gist_type,
-            access_id=gist.gist_access_id,
-            description=gist.gist_description,
-            url=gist.gist_url(),
-            expires=gist.gist_expires,
-            created_on=gist.created_on,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-        )
-        data.update(self.get_api_data())
-        return data
-    ## SCM functions
-
-    @property
-    def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_2_2_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2448 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_2_2_0
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import time
-import logging
-import datetime
-import traceback
-import hashlib
-import collections
-import functools
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int, \
-    get_clone_url
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        # deprecated and left for backward compatibility
-        return cls.get_all()
-
-    @classmethod
-    def get_all(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            try:
-                return safe_str(self.__unicode__())
-            except UnicodeDecodeError:
-                pass
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    SETTINGS_TYPES = {
-        'str': safe_str,
-        'int': safe_int,
-        'unicode': safe_unicode,
-        'bool': str2bool,
-        'list': functools.partial(aslist, sep=',')
-    }
-    DEFAULT_UPDATE_URL = ''
-
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False), nullable=True, unique=None, default=None)
-    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-
-    def __init__(self, key='', val='', type='unicode'):
-        self.app_settings_name = key
-        self.app_settings_value = val
-        self.app_settings_type = type
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        _type = self.app_settings_type
-        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
-        return converter(v)
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    @hybrid_property
-    def app_settings_type(self):
-        return self._app_settings_type
-
-    @app_settings_type.setter
-    def app_settings_type(self, val):
-        if val not in self.SETTINGS_TYPES:
-            raise Exception('type must be one of %s got %s'
-                            % (self.SETTINGS_TYPES.keys(), val))
-        self._app_settings_type = val
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value, self.app_settings_type
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key, val='', type='unicode'):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key, val, type)
-        return res
-
-    @classmethod
-    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
-        """
-        Creates or updates Kallithea setting. If updates is triggered it will only
-        update parameters that are explicitly set Optional instance will be skipped
-
-        :param key:
-        :param val:
-        :param type:
-        :return:
-        """
-        res = cls.get_by_name(key)
-        if not res:
-            val = Optional.extract(val)
-            type = Optional.extract(type)
-            res = cls(key, val, type)
-        else:
-            res.app_settings_name = key
-            if not isinstance(val, Optional):
-                # update if set
-                res.app_settings_value = val
-            if not isinstance(type, Optional):
-                # update if set
-                res.app_settings_type = type
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_auth_plugins(cls, cache=False):
-        auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
-        return auth_plugins
-
-    @classmethod
-    def get_auth_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('auth_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_server_info(cls):
-        import pkg_resources
-        import platform
-        import kallithea
-        from kallithea.lib.utils import check_git_version
-        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
-        info = {
-            'modules': sorted(mods, key=lambda k: k[0].lower()),
-            'py_version': platform.python_version(),
-            'platform': safe_unicode(platform.platform()),
-            'kallithea_version': kallithea.__version__,
-            'git_version': safe_unicode(check_git_version()),
-            'git_path': kallithea.CONFIG.get('git_path')
-        }
-        return info
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    # def __init__(self, section='', key='', value=''):
-    #     self.ui_section = section
-    #     self.ui_key = key
-    #     self.ui_value = value
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
-                                    self.ui_key, self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    DEFAULT_USER = 'default'
-    DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
-
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    extern_type = Column("extern_type", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    extern_name = Column("extern_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    #_user_data = Column("user_data", LargeBinary(), nullable=True)  # JSON data
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-    #extra API keys
-    user_api_keys = relationship('UserApiKeys', cascade='all')
-
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def api_keys(self):
-        other = UserApiKeys.query().filter(UserApiKeys.user==self).all()
-        return [self.api_key] + [x.api_key for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    @hybrid_property
-    def user_data(self):
-        if not self._user_data:
-            return {}
-
-        try:
-            return json.loads(self._user_data)
-        except TypeError:
-            return {}
-
-    @user_data.setter
-    def user_data(self, val):
-        try:
-            self._user_data = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False, fallback=True):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        res = q.scalar()
-
-        if fallback and not res:
-            #fallback to additional keys
-            _res = UserApiKeys.query() \
-                .filter(UserApiKeys.api_key == api_key) \
-                .filter(or_(UserApiKeys.expires == -1,
-                            UserApiKeys.expires >= time.time())) \
-                .first()
-            if _res:
-                res = _res.user
-        return res
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def get_first_admin(cls):
-        user = User.query().filter(User.admin == True).first()
-        if user is None:
-            raise Exception('Missing administrative account!')
-        return user
-
-    @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
-        if user is None:
-            raise Exception('Missing default account!')
-        return user
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            api_keys=user.api_keys,
-            active=user.active,
-            admin=user.admin,
-            extern_type=user.extern_type,
-            extern_name=user.extern_name,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserApiKeys(Base, BaseModel):
-    __tablename__ = 'user_api_keys'
-    __table_args__ = (
-        Index('uak_api_key_idx', 'api_key'),
-        Index('uak_api_key_expires_idx', 'api_key', 'expires'),
-        UniqueConstraint('api_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False), nullable=False, unique=True)
-    description = Column('description', UnicodeText(1024))
-    expires = Column('expires', Float(53), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    user = relationship('User', lazy='joined')
-
-    @property
-    def expired(self):
-        if self.expires == -1:
-            return False
-        return time.time() > self.expires
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-    def __unicode__(self):
-        return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
-                                            self.user_id, self.ip_addr)
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
-    user_group_description = Column("user_group_description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
-    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
-
-    user = relationship('User')
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, user_group_id, cache=False):
-        user_group = cls.query()
-        if cache:
-            user_group = user_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % user_group_id))
-        return user_group.get(user_group_id)
-
-    def get_api_data(self, with_members=True):
-        user_group = self
-
-        data = dict(
-            users_group_id=user_group.users_group_id,
-            group_name=user_group.users_group_name,
-            group_description=user_group.user_group_description,
-            active=user_group.users_group_active,
-            owner=user_group.user.username,
-        )
-        if with_members:
-            members = []
-            for user in user_group.members:
-                user = user.user
-                members.append(user.get_api_data())
-            data['members'] = members
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False))
-    field_label = Column("field_label", String(1024, convert_unicode=False), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
-    DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
-
-    STATE_CREATED = 'repo_state_created'
-    STATE_PENDING = 'repo_state_pending'
-    STATE_ERROR = 'repo_state_error'
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    _landing_revision = Column("landing_revision", String(255, convert_unicode=False), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def landing_rev(self):
-        # always should return [rev_type, rev]
-        if self._landing_revision:
-            _rev_info = self._landing_revision.split(':')
-            if len(_rev_info) < 2:
-                _rev_info.insert(0, 'rev')
-            return [_rev_info[0], _rev_info[1]]
-        return [None, None]
-
-    @landing_rev.setter
-    def landing_rev(self, val):
-        if ':' not in val:
-            raise ValueError('value must be delimited with `:` and consist '
-                             'of <rev_type>:<rev>, got %s instead' % val)
-        self._landing_revision = val
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id, lock_time=None):
-        if not lock_time:
-            lock_time = time.time()
-        repo.locked = [user_id, lock_time]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        qualified_home_url = h.canonical_url('home')
-
-        uri_tmpl = None
-        if 'uri_tmpl' in override:
-            uri_tmpl = override['uri_tmpl']
-            del override['uri_tmpl']
-
-        # we didn't override our tmpl from **overrides
-        if not uri_tmpl:
-            uri_tmpl = self.DEFAULT_CLONE_URI
-            try:
-                from pylons import tmpl_context as c
-                uri_tmpl = c.clone_uri_tmpl
-            except Exception:
-                # in any case if we call this outside of request context,
-                # ie, not having tmpl_context set up
-                pass
-
-        return get_clone_url(uri_tmpl=uri_tmpl,
-                             qualified_home_url=qualified_home_url,
-                             repo_name=self.repo_name,
-                             repo_id=self.repo_id, **override)
-
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        _rev_type, _rev = self.landing_rev
-        cs = self.get_changeset(_rev)
-        if isinstance(cs, EmptyChangeset):
-            return self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = collections.defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    def set_invalidate(self):
-        """
-        Mark caches of this repo as invalid.
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, rn)
-        else:
-            log.debug('Getting obj for %s from cache', rn)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-    def __json__(self):
-        return dict(landing_rev = self.landing_rev)
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    SEP = ' &raquo; '
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-    user = relationship('User')
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
-
-    @classmethod
-    def _generate_choice(cls, repo_group):
-        from webhelpers.html import literal as _literal
-        _name = lambda k: _literal(cls.SEP.join(k))
-        return repo_group.group_id, _name(repo_group.full_path_splitted)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', u'-- %s --' % _('top level'))]
-
-        repo_groups.extend([cls._generate_choice(x) for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(cls.SEP)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-    def get_api_data(self):
-        """
-        Common function for generating api data
-
-        """
-        group = self
-        data = dict(
-            group_id=group.group_id,
-            group_name=group.group_name,
-            group_description=group.group_description,
-            parent_group=group.parent_group.group_name if group.parent_group else None,
-            repositories=[x.repo_name for x in group.repositories],
-            owner=group.user.username
-        )
-        return data
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PERMS = [
-        ('hg.admin', _('Kallithea Administrator')),
-
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('usergroup.none', _('User group no access')),
-        ('usergroup.read', _('User group read access')),
-        ('usergroup.write', _('User group write access')),
-        ('usergroup.admin', _('User group admin access')),
-
-        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
-        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
-
-        ('hg.usergroup.create.false', _('User Group creation disabled')),
-        ('hg.usergroup.create.true', _('User Group creation enabled')),
-
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-        ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
-        ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
-
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-
-        ('hg.register.none', _('Registration disabled')),
-        ('hg.register.manual_activate', _('User Registration with manual account activation')),
-        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
-
-        ('hg.extern_activate.manual', _('Manual activation of external account')),
-        ('hg.extern_activate.auto', _('Automatic activation of external account')),
-
-    ]
-
-    #definition of system default permissions for DEFAULT user
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'usergroup.read',
-        'hg.create.repository',
-        'hg.create.write_on_repogroup.true',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-        'hg.extern_activate.auto',
-    ]
-
-    # defines which permissions are more important higher the more important
-    # Weight defines which permissions are more important.
-    # The higher number the more important.
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'usergroup.none': 0,
-        'usergroup.read': 1,
-        'usergroup.write': 3,
-        'usergroup.admin': 4,
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
-        'hg.usergroup.create.false': 0,
-        'hg.usergroup.create.true': 1,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository': 1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserUserGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
-
-
-class UserUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    user_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, user_group, permission):
-        n = cls()
-        n.user = user
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_group_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
-        CheckConstraint('target_user_group_id != user_group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
-    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, target_user_group, user_group, permission):
-        n = cls()
-        n.target_user_group = target_user_group
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    # cache_args is a repo_name
-    cache_args = Column("cache_args", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching,
-    # other instances set cache_active to False to indicate that this cache is invalid
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (self.__class__.__name__,
-                            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name, delete=False):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-        log.debug('for repo %s got %s invalidation objects', repo_name, inv_objs)
-        try:
-            for inv_obj in inv_objs:
-                log.debug('marking %s key for invalidation based on repo_name=%s',
-                          inv_obj, safe_str(repo_name))
-                if delete:
-                    Session().delete(inv_obj)
-                else:
-                    inv_obj.cache_active = False
-                    Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        try:
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if not inv_obj:
-                inv_obj = CacheInvalidation(cache_key, repo_name)
-            was_valid = inv_obj.cache_active
-            inv_obj.cache_active = True
-            Session().add(inv_obj)
-            Session().commit()
-            return was_valid
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-            return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    # values for .status
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-            revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class Gist(Base, BaseModel):
-    __tablename__ = 'gists'
-    __table_args__ = (
-        Index('g_gist_access_id_idx', 'gist_access_id'),
-        Index('g_created_on_idx', 'created_on'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-    DEFAULT_FILENAME = u'gistfile1.txt'
-
-    gist_id = Column('gist_id', Integer(), primary_key=True)
-    gist_access_id = Column('gist_access_id', Unicode(250))
-    gist_description = Column('gist_description', UnicodeText(1024))
-    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
-    gist_expires = Column('gist_expires', Float(53), nullable=False)
-    gist_type = Column('gist_type', Unicode(128), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    owner = relationship('User')
-
-    def __repr__(self):
-        return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        res = cls.query().filter(cls.gist_access_id == id_).scalar()
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def get_by_access_id(cls, gist_access_id):
-        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
-
-    def gist_url(self):
-        import kallithea
-        alias_url = kallithea.CONFIG.get('gist_alias_url')
-        if alias_url:
-            return alias_url.replace('{gistid}', self.gist_access_id)
-
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
-    def get_api_data(self):
-        """
-        Common function for generating gist related data for API
-        """
-        gist = self
-        data = dict(
-            gist_id=gist.gist_id,
-            type=gist.gist_type,
-            access_id=gist.gist_access_id,
-            description=gist.gist_description,
-            url=gist.gist_url(),
-            expires=gist.gist_expires,
-            created_on=gist.created_on,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-        )
-        data.update(self.get_api_data())
-        return data
-    ## SCM functions
-
-    @property
-    def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/schema/db_2_2_3.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2494 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.schema.db_2_2_3
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Database Models for Kallithea
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Apr 08, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-import os
-import time
-import logging
-import datetime
-import traceback
-import hashlib
-import collections
-import functools
-
-from sqlalchemy import *
-from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
-from beaker.cache import cache_region, region_invalidate
-from webob.exc import HTTPNotFound
-
-from pylons.i18n.translation import lazy_ugettext as _
-
-from kallithea.lib.vcs import get_backend
-from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.lib.vcs.backends.base import EmptyChangeset
-
-from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
-    safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int, \
-    get_clone_url
-from kallithea.lib.compat import json
-from kallithea.lib.caching_query import FromCache
-
-from kallithea.model.meta import Base, Session
-
-URL_SEP = '/'
-log = logging.getLogger(__name__)
-
-from kallithea import DB_PREFIX
-
-#==============================================================================
-# BASE CLASSES
-#==============================================================================
-
-_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
-
-
-class BaseModel(object):
-    """
-    Base Model for all classess
-    """
-
-    @classmethod
-    def _get_keys(cls):
-        """return column names for this model """
-        return class_mapper(cls).c.keys()
-
-    def get_dict(self):
-        """
-        return dict with keys and values corresponding
-        to this model data """
-
-        d = {}
-        for k in self._get_keys():
-            d[k] = getattr(self, k)
-
-        # also use __json__() if present to get additional fields
-        _json_attr = getattr(self, '__json__', None)
-        if _json_attr:
-            # update with attributes from __json__
-            if callable(_json_attr):
-                _json_attr = _json_attr()
-            for k, val in _json_attr.iteritems():
-                d[k] = val
-        return d
-
-    def get_appstruct(self):
-        """return list with keys and values tuples corresponding
-        to this model data """
-
-        l = []
-        for k in self._get_keys():
-            l.append((k, getattr(self, k),))
-        return l
-
-    def populate_obj(self, populate_dict):
-        """populate model with data from given populate_dict"""
-
-        for k in self._get_keys():
-            if k in populate_dict:
-                setattr(self, k, populate_dict[k])
-
-    @classmethod
-    def query(cls):
-        return Session().query(cls)
-
-    @classmethod
-    def get(cls, id_):
-        if id_:
-            return cls.query().get(id_)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        try:
-            id_ = int(id_)
-        except (TypeError, ValueError):
-            raise HTTPNotFound
-
-        res = cls.query().get(id_)
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def getAll(cls):
-        # deprecated and left for backward compatibility
-        return cls.get_all()
-
-    @classmethod
-    def get_all(cls):
-        return cls.query().all()
-
-    @classmethod
-    def delete(cls, id_):
-        obj = cls.query().get(id_)
-        Session().delete(obj)
-
-    def __repr__(self):
-        if hasattr(self, '__unicode__'):
-            # python repr needs to return str
-            try:
-                return safe_str(self.__unicode__())
-            except UnicodeDecodeError:
-                pass
-        return '<DB:%s>' % (self.__class__.__name__)
-
-
-class Setting(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (
-        UniqueConstraint('app_settings_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    SETTINGS_TYPES = {
-        'str': safe_str,
-        'int': safe_int,
-        'unicode': safe_unicode,
-        'bool': str2bool,
-        'list': functools.partial(aslist, sep=',')
-    }
-    DEFAULT_UPDATE_URL = ''
-
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    _app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False), nullable=True, unique=None, default=None)
-    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-
-    def __init__(self, key='', val='', type='unicode'):
-        self.app_settings_name = key
-        self.app_settings_value = val
-        self.app_settings_type = type
-
-    @validates('_app_settings_value')
-    def validate_settings_value(self, key, val):
-        assert type(val) == unicode
-        return val
-
-    @hybrid_property
-    def app_settings_value(self):
-        v = self._app_settings_value
-        _type = self.app_settings_type
-        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
-        return converter(v)
-
-    @app_settings_value.setter
-    def app_settings_value(self, val):
-        """
-        Setter that will always make sure we use unicode in app_settings_value
-
-        :param val:
-        """
-        self._app_settings_value = safe_unicode(val)
-
-    @hybrid_property
-    def app_settings_type(self):
-        return self._app_settings_type
-
-    @app_settings_type.setter
-    def app_settings_type(self, val):
-        if val not in self.SETTINGS_TYPES:
-            raise Exception('type must be one of %s got %s'
-                            % (self.SETTINGS_TYPES.keys(), val))
-        self._app_settings_type = val
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (
-            self.__class__.__name__,
-            self.app_settings_name, self.app_settings_value, self.app_settings_type
-        )
-
-    @classmethod
-    def get_by_name(cls, key):
-        return cls.query() \
-            .filter(cls.app_settings_name == key).scalar()
-
-    @classmethod
-    def get_by_name_or_create(cls, key, val='', type='unicode'):
-        res = cls.get_by_name(key)
-        if not res:
-            res = cls(key, val, type)
-        return res
-
-    @classmethod
-    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
-        """
-        Creates or updates Kallithea setting. If updates is triggered it will only
-        update parameters that are explicitly set Optional instance will be skipped
-
-        :param key:
-        :param val:
-        :param type:
-        :return:
-        """
-        res = cls.get_by_name(key)
-        if not res:
-            val = Optional.extract(val)
-            type = Optional.extract(type)
-            res = cls(key, val, type)
-        else:
-            res.app_settings_name = key
-            if not isinstance(val, Optional):
-                # update if set
-                res.app_settings_value = val
-            if not isinstance(type, Optional):
-                # update if set
-                res.app_settings_type = type
-        return res
-
-    @classmethod
-    def get_app_settings(cls, cache=False):
-
-        ret = cls.query()
-
-        if cache:
-            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
-
-        if not ret:
-            raise Exception('Could not get application settings !')
-        settings = {}
-        for each in ret:
-            settings[each.app_settings_name] = \
-                each.app_settings_value
-
-        return settings
-
-    @classmethod
-    def get_auth_plugins(cls, cache=False):
-        auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
-        return auth_plugins
-
-    @classmethod
-    def get_auth_settings(cls, cache=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('auth_')).all()
-        fd = {}
-        for row in ret:
-            fd.update({row.app_settings_name: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
-        ret = cls.query() \
-                .filter(cls.app_settings_name.startswith('default_')).all()
-        fd = {}
-        for row in ret:
-            key = row.app_settings_name
-            if strip_prefix:
-                key = remove_prefix(key, prefix='default_')
-            fd.update({key: row.app_settings_value})
-
-        return fd
-
-    @classmethod
-    def get_server_info(cls):
-        import pkg_resources
-        import platform
-        import kallithea
-        from kallithea.lib.utils import check_git_version
-        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
-        info = {
-            'modules': sorted(mods, key=lambda k: k[0].lower()),
-            'py_version': platform.python_version(),
-            'platform': safe_unicode(platform.platform()),
-            'kallithea_version': kallithea.__version__,
-            'git_version': safe_unicode(check_git_version()),
-            'git_path': kallithea.CONFIG.get('git_path')
-        }
-        return info
-
-
-class Ui(Base, BaseModel):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = (
-        UniqueConstraint('ui_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    HOOK_UPDATE = 'changegroup.update'
-    HOOK_REPO_SIZE = 'changegroup.repo_size'
-    HOOK_PUSH = 'changegroup.push_logger'
-    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
-    HOOK_PULL = 'outgoing.pull_logger'
-    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
-
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-    # def __init__(self, section='', key='', value=''):
-    #     self.ui_section = section
-    #     self.ui_key = key
-    #     self.ui_value = value
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.ui_key == key).scalar()
-
-    @classmethod
-    def get_builtin_hooks(cls):
-        q = cls.query()
-        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        return q.all()
-
-    @classmethod
-    def get_custom_hooks(cls):
-        q = cls.query()
-        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
-                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
-                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
-        q = q.filter(cls.ui_section == 'hooks')
-        return q.all()
-
-    @classmethod
-    def get_repos_location(cls):
-        return cls.get_by_key('/').ui_value
-
-    @classmethod
-    def create_or_update_hook(cls, key, val):
-        new_ui = cls.get_by_key(key) or cls()
-        new_ui.ui_section = 'hooks'
-        new_ui.ui_active = True
-        new_ui.ui_key = key
-        new_ui.ui_value = val
-
-        Session().add(new_ui)
-
-    def __repr__(self):
-        return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
-                                    self.ui_key, self.ui_value)
-
-
-class User(Base, BaseModel):
-    __tablename__ = 'users'
-    __table_args__ = (
-        UniqueConstraint('username'), UniqueConstraint('email'),
-        Index('u_username_idx', 'username'),
-        Index('u_email_idx', 'email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    DEFAULT_USER = 'default'
-    DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
-
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    password = Column("password", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("firstname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    extern_type = Column("extern_type", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    extern_name = Column("extern_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    _user_data = Column("user_data", LargeBinary(), nullable=True)  # JSON data
-
-    user_log = relationship('UserLog')
-    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relationship('Repository')
-    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
-
-    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
-
-    group_member = relationship('UserGroupMember', cascade='all')
-
-    notifications = relationship('UserNotification', cascade='all')
-    # notifications assigned to this user
-    user_created_notifications = relationship('Notification', cascade='all')
-    # comments created by this user
-    user_comments = relationship('ChangesetComment', cascade='all')
-    #extra emails for this user
-    user_emails = relationship('UserEmailMap', cascade='all')
-    #extra API keys
-    user_api_keys = relationship('UserApiKeys', cascade='all')
-
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-    @property
-    def firstname(self):
-        # alias for future
-        return self.name
-
-    @property
-    def emails(self):
-        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
-        return [self.email] + [x.email for x in other]
-
-    @property
-    def api_keys(self):
-        other = UserApiKeys.query().filter(UserApiKeys.user==self).all()
-        return [self.api_key] + [x.api_key for x in other]
-
-    @property
-    def ip_addresses(self):
-        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
-        return [x.ip_addr for x in ret]
-
-    @property
-    def username_and_name(self):
-        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
-
-    @property
-    def full_name(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def full_name_or_username(self):
-        return ('%s %s' % (self.firstname, self.lastname)
-                if (self.firstname and self.lastname) else self.username)
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
-
-    @property
-    def short_contact(self):
-        return '%s %s' % (self.firstname, self.lastname)
-
-    @property
-    def is_admin(self):
-        return self.admin
-
-    @property
-    def AuthUser(self):
-        """
-        Returns instance of AuthUser for this user
-        """
-        from kallithea.lib.auth import AuthUser
-        return AuthUser(user_id=self.user_id, api_key=self.api_key,
-                        username=self.username)
-
-    @hybrid_property
-    def user_data(self):
-        if not self._user_data:
-            return {}
-
-        try:
-            return json.loads(self._user_data)
-        except TypeError:
-            return {}
-
-    @user_data.setter
-    def user_data(self, val):
-        try:
-            self._user_data = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.user_id, self.username)
-
-    @classmethod
-    def get_by_username(cls, username, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.username.ilike(username))
-        else:
-            q = cls.query().filter(cls.username == username)
-
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(username)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get_by_api_key(cls, api_key, cache=False, fallback=True):
-        q = cls.query().filter(cls.api_key == api_key)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_api_key_%s" % api_key))
-        res = q.scalar()
-
-        if fallback and not res:
-            #fallback to additional keys
-            _res = UserApiKeys.query() \
-                .filter(UserApiKeys.api_key == api_key) \
-                .filter(or_(UserApiKeys.expires == -1,
-                            UserApiKeys.expires >= time.time())) \
-                .first()
-            if _res:
-                res = _res.user
-        return res
-
-    @classmethod
-    def get_by_email(cls, email, case_insensitive=False, cache=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.email.ilike(email))
-        else:
-            q = cls.query().filter(cls.email == email)
-
-        if cache:
-            q = q.options(FromCache("sql_cache_short",
-                                    "get_email_key_%s" % email))
-
-        ret = q.scalar()
-        if ret is None:
-            q = UserEmailMap.query()
-            # try fetching in alternate email map
-            if case_insensitive:
-                q = q.filter(UserEmailMap.email.ilike(email))
-            else:
-                q = q.filter(UserEmailMap.email == email)
-            q = q.options(joinedload(UserEmailMap.user))
-            if cache:
-                q = q.options(FromCache("sql_cache_short",
-                                        "get_email_map_key_%s" % email))
-            ret = getattr(q.scalar(), 'user', None)
-
-        return ret
-
-    @classmethod
-    def get_from_cs_author(cls, author):
-        """
-        Tries to get User objects out of commit author string
-
-        :param author:
-        """
-        from kallithea.lib.helpers import email, author_name
-        # Valid email in the attribute passed, see if they're in the system
-        _email = email(author)
-        if _email:
-            user = cls.get_by_email(_email, case_insensitive=True)
-            if user:
-                return user
-        # Maybe we can match by username?
-        _author = author_name(author)
-        user = cls.get_by_username(_author, case_insensitive=True)
-        if user:
-            return user
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-        self.last_login = datetime.datetime.now()
-        Session().add(self)
-        log.debug('updated user %s lastlogin', self.username)
-
-    @classmethod
-    def get_first_admin(cls):
-        user = User.query().filter(User.admin == True).first()
-        if user is None:
-            raise Exception('Missing administrative account!')
-        return user
-
-    @classmethod
-    def get_default_user(cls, cache=False):
-        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
-        if user is None:
-            raise Exception('Missing default account!')
-        return user
-
-    def get_api_data(self):
-        """
-        Common function for generating user related data for API
-        """
-        user = self
-        data = dict(
-            user_id=user.user_id,
-            username=user.username,
-            firstname=user.name,
-            lastname=user.lastname,
-            email=user.email,
-            emails=user.emails,
-            api_key=user.api_key,
-            api_keys=user.api_keys,
-            active=user.active,
-            admin=user.admin,
-            extern_type=user.extern_type,
-            extern_name=user.extern_name,
-            last_login=user.last_login,
-            ip_addresses=user.ip_addresses
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-            full_name=self.full_name,
-            full_name_or_username=self.full_name_or_username,
-            short_contact=self.short_contact,
-            full_contact=self.full_contact
-        )
-        data.update(self.get_api_data())
-        return data
-
-
-class UserApiKeys(Base, BaseModel):
-    __tablename__ = 'user_api_keys'
-    __table_args__ = (
-        Index('uak_api_key_idx', 'api_key'),
-        Index('uak_api_key_expires_idx', 'api_key', 'expires'),
-        UniqueConstraint('api_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    api_key = Column("api_key", String(255, convert_unicode=False), nullable=False, unique=True)
-    description = Column('description', UnicodeText(1024))
-    expires = Column('expires', Float(53), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    user = relationship('User', lazy='joined')
-
-    @property
-    def expired(self):
-        if self.expires == -1:
-            return False
-        return time.time() > self.expires
-
-
-class UserEmailMap(Base, BaseModel):
-    __tablename__ = 'user_email_map'
-    __table_args__ = (
-        Index('uem_email_idx', 'email'),
-        UniqueConstraint('email'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    _email = Column("email", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
-    user = relationship('User', lazy='joined')
-
-    @validates('_email')
-    def validate_email(self, key, email):
-        # check if this email is not main one
-        main_email = Session().query(User).filter(User.email == email).scalar()
-        if main_email is not None:
-            raise AttributeError('email %s is present is user table' % email)
-        return email
-
-    @hybrid_property
-    def email(self):
-        return self._email
-
-    @email.setter
-    def email(self, val):
-        self._email = val.lower() if val else None
-
-
-class UserIpMap(Base, BaseModel):
-    __tablename__ = 'user_ip_map'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'ip_addr'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    __mapper_args__ = {}
-
-    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    ip_addr = Column("ip_addr", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
-    user = relationship('User', lazy='joined')
-
-    @classmethod
-    def _get_ip_range(cls, ip_addr):
-        from kallithea.lib import ipaddr
-        net = ipaddr.IPNetwork(address=ip_addr)
-        return [str(net.network), str(net.broadcast)]
-
-    def __json__(self):
-        return dict(
-          ip_addr=self.ip_addr,
-          ip_range=self._get_ip_range(self.ip_addr)
-        )
-
-    def __unicode__(self):
-        return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
-                                            self.user_id, self.ip_addr)
-
-class UserLog(Base, BaseModel):
-    __tablename__ = 'user_logs'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    username = Column("username", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
-    repository_name = Column("repository_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    action = Column("action", UnicodeText(1200000, convert_unicode=False), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.repository_name,
-                                      self.action)
-
-    @property
-    def action_as_day(self):
-        return datetime.date(*self.action_date.timetuple()[:3])
-
-    user = relationship('User')
-    repository = relationship('Repository', cascade='')
-
-
-class UserGroup(Base, BaseModel):
-    __tablename__ = 'users_groups'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_name = Column("users_group_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
-    user_group_description = Column("user_group_description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
-    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
-    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    _group_data = Column("group_data", LargeBinary(), nullable=True)  # JSON data
-
-    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
-    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
-    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
-    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
-
-    user = relationship('User')
-
-    @hybrid_property
-    def group_data(self):
-        if not self._group_data:
-            return {}
-
-        try:
-            return json.loads(self._group_data)
-        except TypeError:
-            return {}
-
-    @group_data.setter
-    def group_data(self, val):
-        try:
-            self._group_data = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
-                                      self.users_group_id,
-                                      self.users_group_name)
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False,
-                          case_insensitive=False):
-        if case_insensitive:
-            q = cls.query().filter(cls.users_group_name.ilike(group_name))
-        else:
-            q = cls.query().filter(cls.users_group_name == group_name)
-        if cache:
-            q = q.options(FromCache(
-                            "sql_cache_short",
-                            "get_user_%s" % _hash_key(group_name)
-                          )
-            )
-        return q.scalar()
-
-    @classmethod
-    def get(cls, user_group_id, cache=False):
-        user_group = cls.query()
-        if cache:
-            user_group = user_group.options(FromCache("sql_cache_short",
-                                    "get_users_group_%s" % user_group_id))
-        return user_group.get(user_group_id)
-
-    def get_api_data(self, with_members=True):
-        user_group = self
-
-        data = dict(
-            users_group_id=user_group.users_group_id,
-            group_name=user_group.users_group_name,
-            group_description=user_group.user_group_description,
-            active=user_group.users_group_active,
-            owner=user_group.user.username,
-        )
-        if with_members:
-            members = []
-            for user in user_group.members:
-                user = user.user
-                members.append(user.get_api_data())
-            data['members'] = members
-
-        return data
-
-
-class UserGroupMember(Base, BaseModel):
-    __tablename__ = 'users_groups_members'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User', lazy='joined')
-    users_group = relationship('UserGroup')
-
-    def __init__(self, gr_id='', u_id=''):
-        self.users_group_id = gr_id
-        self.user_id = u_id
-
-
-class RepositoryField(Base, BaseModel):
-    __tablename__ = 'repositories_fields'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
-
-    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-    field_key = Column("field_key", String(250, convert_unicode=False))
-    field_label = Column("field_label", String(1024, convert_unicode=False), nullable=False)
-    field_value = Column("field_value", String(10000, convert_unicode=False), nullable=False)
-    field_desc = Column("field_desc", String(1024, convert_unicode=False), nullable=False)
-    field_type = Column("field_type", String(256), nullable=False, unique=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repository = relationship('Repository')
-
-    @property
-    def field_key_prefixed(self):
-        return 'ex_%s' % self.field_key
-
-    @classmethod
-    def un_prefix_key(cls, key):
-        if key.startswith(cls.PREFIX):
-            return key[len(cls.PREFIX):]
-        return key
-
-    @classmethod
-    def get_by_key_name(cls, key, repo):
-        row = cls.query() \
-                .filter(cls.repository == repo) \
-                .filter(cls.field_key == key).scalar()
-        return row
-
-
-class Repository(Base, BaseModel):
-    __tablename__ = 'repositories'
-    __table_args__ = (
-        UniqueConstraint('repo_name'),
-        Index('r_repo_name_idx', 'repo_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
-    DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
-
-    STATE_CREATED = 'repo_state_created'
-    STATE_PENDING = 'repo_state_pending'
-    STATE_ERROR = 'repo_state_error'
-
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
-    repo_state = Column("repo_state", String(255), nullable=True)
-
-    clone_uri = Column("clone_uri", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
-    repo_type = Column("repo_type", String(255, convert_unicode=False), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-    _landing_revision = Column("landing_revision", String(255, convert_unicode=False), nullable=False, unique=False, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    _locked = Column("locked", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
-    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
-
-    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
-
-    user = relationship('User')
-    fork = relationship('Repository', remote_side=repo_id)
-    group = relationship('RepoGroup')
-    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
-    stats = relationship('Statistics', cascade='all', uselist=False)
-
-    followers = relationship('UserFollowing',
-                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
-                             cascade='all')
-    extra_fields = relationship('RepositoryField',
-                                cascade="all, delete, delete-orphan")
-
-    logs = relationship('UserLog')
-    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
-
-    pull_requests_org = relationship('PullRequest',
-                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    pull_requests_other = relationship('PullRequest',
-                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
-                    cascade="all, delete, delete-orphan")
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
-                                   safe_unicode(self.repo_name))
-
-    @hybrid_property
-    def landing_rev(self):
-        # always should return [rev_type, rev]
-        if self._landing_revision:
-            _rev_info = self._landing_revision.split(':')
-            if len(_rev_info) < 2:
-                _rev_info.insert(0, 'rev')
-            return [_rev_info[0], _rev_info[1]]
-        return [None, None]
-
-    @landing_rev.setter
-    def landing_rev(self, val):
-        if ':' not in val:
-            raise ValueError('value must be delimited with `:` and consist '
-                             'of <rev_type>:<rev>, got %s instead' % val)
-        self._landing_revision = val
-
-    @hybrid_property
-    def locked(self):
-        # always should return [user_id, timelocked]
-        if self._locked:
-            _lock_info = self._locked.split(':')
-            return int(_lock_info[0]), _lock_info[1]
-        return [None, None]
-
-    @locked.setter
-    def locked(self, val):
-        if val and isinstance(val, (list, tuple)):
-            self._locked = ':'.join(map(str, val))
-        else:
-            self._locked = None
-
-    @hybrid_property
-    def changeset_cache(self):
-        from kallithea.lib.vcs.backends.base import EmptyChangeset
-        dummy = EmptyChangeset().__json__()
-        if not self._changeset_cache:
-            return dummy
-        try:
-            return json.loads(self._changeset_cache)
-        except TypeError:
-            return dummy
-
-    @changeset_cache.setter
-    def changeset_cache(self, val):
-        try:
-            self._changeset_cache = json.dumps(val)
-        except Exception:
-            log.error(traceback.format_exc())
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def normalize_repo_name(cls, repo_name):
-        """
-        Normalizes os specific repo_name to the format internally stored inside
-        dabatabase using URL_SEP
-
-        :param cls:
-        :param repo_name:
-        """
-        return cls.url_sep().join(repo_name.split(os.sep))
-
-    @classmethod
-    def get_by_repo_name(cls, repo_name):
-        q = Session().query(cls).filter(cls.repo_name == repo_name)
-        q = q.options(joinedload(Repository.fork)) \
-                .options(joinedload(Repository.user)) \
-                .options(joinedload(Repository.group))
-        return q.scalar()
-
-    @classmethod
-    def get_by_full_path(cls, repo_full_path):
-        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
-        repo_name = cls.normalize_repo_name(repo_name)
-        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
-
-    @classmethod
-    def get_repo_forks(cls, repo_id):
-        return cls.query().filter(Repository.fork_id == repo_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all repos are stored
-
-        :param cls:
-        """
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == cls.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def forks(self):
-        """
-        Return forks of this repo
-        """
-        return Repository.get_repo_forks(self.repo_id)
-
-    @property
-    def parent(self):
-        """
-        Returns fork parent
-        """
-        return self.fork
-
-    @property
-    def just_name(self):
-        return self.repo_name.split(Repository.url_sep())[-1]
-
-    @property
-    def groups_with_parents(self):
-        groups = []
-        if self.group is None:
-            return groups
-
-        cur_gr = self.group
-        groups.insert(0, cur_gr)
-        while 1:
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            groups.insert(0, gr)
-
-        return groups
-
-    @property
-    def groups_and_repo(self):
-        return self.groups_with_parents, self.just_name, self.repo_name
-
-    @LazyProperty
-    def repo_path(self):
-        """
-        Returns base full path for that repository means where it actually
-        exists on a filesystem
-        """
-        q = Session().query(Ui).filter(Ui.ui_key ==
-                                              Repository.url_sep())
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return q.one().ui_value
-
-    @property
-    def repo_full_path(self):
-        p = [self.repo_path]
-        # we need to split the name by / since this is how we store the
-        # names in the database, but that eventually needs to be converted
-        # into a valid system path
-        p += self.repo_name.split(Repository.url_sep())
-        return os.path.join(*map(safe_unicode, p))
-
-    @property
-    def cache_keys(self):
-        """
-        Returns associated cache keys for that repo
-        """
-        return CacheInvalidation.query() \
-            .filter(CacheInvalidation.cache_args == self.repo_name) \
-            .order_by(CacheInvalidation.cache_key) \
-            .all()
-
-    def get_new_name(self, repo_name):
-        """
-        returns new full repository name based on assigned group and new new
-
-        :param group_name:
-        """
-        path_prefix = self.group.full_path_splitted if self.group else []
-        return Repository.url_sep().join(path_prefix + [repo_name])
-
-    @property
-    def _ui(self):
-        """
-        Creates an db based ui object for this repository
-        """
-        from kallithea.lib.utils import make_ui
-        return make_ui('db', clear_session=False)
-
-    @classmethod
-    def is_valid(cls, repo_name):
-        """
-        returns True if given repo name is a valid filesystem repository
-
-        :param cls:
-        :param repo_name:
-        """
-        from kallithea.lib.utils import is_valid_repo
-
-        return is_valid_repo(repo_name, cls.base_path())
-
-    def get_api_data(self):
-        """
-        Common function for generating repo api data
-
-        """
-        repo = self
-        data = dict(
-            repo_id=repo.repo_id,
-            repo_name=repo.repo_name,
-            repo_type=repo.repo_type,
-            clone_uri=repo.clone_uri,
-            private=repo.private,
-            created_on=repo.created_on,
-            description=repo.description,
-            landing_rev=repo.landing_rev,
-            owner=repo.user.username,
-            fork_of=repo.fork.repo_name if repo.fork else None,
-            enable_statistics=repo.enable_statistics,
-            enable_locking=repo.enable_locking,
-            enable_downloads=repo.enable_downloads,
-            last_changeset=repo.changeset_cache,
-            locked_by=User.get(self.locked[0]).get_api_data() \
-                if self.locked[0] else None,
-            locked_date=time_to_datetime(self.locked[1]) \
-                if self.locked[1] else None
-        )
-        rc_config = Setting.get_app_settings()
-        repository_fields = str2bool(rc_config.get('repository_fields'))
-        if repository_fields:
-            for f in self.extra_fields:
-                data[f.field_key_prefixed] = f.field_value
-
-        return data
-
-    @classmethod
-    def lock(cls, repo, user_id, lock_time=None):
-        if not lock_time:
-            lock_time = time.time()
-        repo.locked = [user_id, lock_time]
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def unlock(cls, repo):
-        repo.locked = None
-        Session().add(repo)
-        Session().commit()
-
-    @classmethod
-    def getlock(cls, repo):
-        return repo.locked
-
-    @property
-    def last_db_change(self):
-        return self.updated_on
-
-    def clone_url(self, **override):
-        import kallithea.lib.helpers as h
-        qualified_home_url = h.canonical_url('home')
-
-        uri_tmpl = None
-        if 'with_id' in override:
-            uri_tmpl = self.DEFAULT_CLONE_URI_ID
-            del override['with_id']
-
-        if 'uri_tmpl' in override:
-            uri_tmpl = override['uri_tmpl']
-            del override['uri_tmpl']
-
-        # we didn't override our tmpl from **overrides
-        if not uri_tmpl:
-            uri_tmpl = self.DEFAULT_CLONE_URI
-            try:
-                from pylons import tmpl_context as c
-                uri_tmpl = c.clone_uri_tmpl
-            except Exception:
-                # in any case if we call this outside of request context,
-                # ie, not having tmpl_context set up
-                pass
-
-        return get_clone_url(uri_tmpl=uri_tmpl,
-                             qualified_home_url=qualified_home_url,
-                             repo_name=self.repo_name,
-                             repo_id=self.repo_id, **override)
-
-    def set_state(self, state):
-        self.repo_state = state
-        Session().add(self)
-    #==========================================================================
-    # SCM PROPERTIES
-    #==========================================================================
-
-    def get_changeset(self, rev=None):
-        return get_changeset_safe(self.scm_instance, rev)
-
-    def get_landing_changeset(self):
-        """
-        Returns landing changeset, or if that doesn't exist returns the tip
-        """
-        _rev_type, _rev = self.landing_rev
-        cs = self.get_changeset(_rev)
-        if isinstance(cs, EmptyChangeset):
-            return self.get_changeset()
-        return cs
-
-    def update_changeset_cache(self, cs_cache=None):
-        """
-        Update cache of last changeset for repository, keys should be::
-
-            short_id
-            raw_id
-            revision
-            message
-            date
-            author
-
-        :param cs_cache:
-        """
-        from kallithea.lib.vcs.backends.base import BaseChangeset
-        if cs_cache is None:
-            cs_cache = EmptyChangeset()
-            # use no-cache version here
-            scm_repo = self.scm_instance_no_cache()
-            if scm_repo:
-                cs_cache = scm_repo.get_changeset()
-
-        if isinstance(cs_cache, BaseChangeset):
-            cs_cache = cs_cache.__json__()
-
-        if (cs_cache != self.changeset_cache or not self.changeset_cache):
-            _default = datetime.datetime.fromtimestamp(0)
-            last_change = cs_cache.get('date') or _default
-            log.debug('updated repo %s with new cs cache %s',
-                      self.repo_name, cs_cache)
-            self.updated_on = last_change
-            self.changeset_cache = cs_cache
-            Session().add(self)
-            Session().commit()
-        else:
-            log.debug('Skipping repo:%s already with latest changes',
-                      self.repo_name)
-
-    @property
-    def tip(self):
-        return self.get_changeset('tip')
-
-    @property
-    def author(self):
-        return self.tip.author
-
-    @property
-    def last_change(self):
-        return self.scm_instance.last_change
-
-    def get_comments(self, revisions=None):
-        """
-        Returns comments for this repository grouped by revisions
-
-        :param revisions: filter query by revisions only
-        """
-        cmts = ChangesetComment.query() \
-            .filter(ChangesetComment.repo == self)
-        if revisions:
-            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
-        grouped = collections.defaultdict(list)
-        for cmt in cmts.all():
-            grouped[cmt.revision].append(cmt)
-        return grouped
-
-    def statuses(self, revisions=None):
-        """
-        Returns statuses for this repository
-
-        :param revisions: list of revisions to get statuses for
-        """
-
-        statuses = ChangesetStatus.query() \
-            .filter(ChangesetStatus.repo == self) \
-            .filter(ChangesetStatus.version == 0)
-        if revisions:
-            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
-        grouped = {}
-
-        #maybe we have open new pullrequest without a status ?
-        stat = ChangesetStatus.STATUS_UNDER_REVIEW
-        status_lbl = ChangesetStatus.get_status_lbl(stat)
-        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
-            for rev in pr.revisions:
-                pr_id = pr.pull_request_id
-                pr_repo = pr.other_repo.repo_name
-                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
-
-        for stat in statuses.all():
-            pr_id = pr_repo = None
-            if stat.pull_request:
-                pr_id = stat.pull_request.pull_request_id
-                pr_repo = stat.pull_request.other_repo.repo_name
-            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
-                                      pr_id, pr_repo]
-        return grouped
-
-    def _repo_size(self):
-        from kallithea.lib import helpers as h
-        log.debug('calculating repository size...')
-        return h.format_byte_size(self.scm_instance.size)
-
-    #==========================================================================
-    # SCM CACHE INSTANCE
-    #==========================================================================
-
-    def set_invalidate(self):
-        """
-        Mark caches of this repo as invalid.
-        """
-        CacheInvalidation.set_invalidate(self.repo_name)
-
-    def scm_instance_no_cache(self):
-        return self.__get_instance()
-
-    @property
-    def scm_instance(self):
-        import kallithea
-        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
-        if full_cache:
-            return self.scm_instance_cached()
-        return self.__get_instance()
-
-    def scm_instance_cached(self, valid_cache_keys=None):
-        @cache_region('long_term')
-        def _c(repo_name):
-            return self.__get_instance()
-        rn = self.repo_name
-
-        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
-        if not valid:
-            log.debug('Cache for %s invalidated, getting new object', rn)
-            region_invalidate(_c, None, rn)
-        else:
-            log.debug('Getting obj for %s from cache', rn)
-        return _c(rn)
-
-    def __get_instance(self):
-        repo_full_path = self.repo_full_path
-        try:
-            alias = get_scm(repo_full_path)[0]
-            log.debug('Creating instance of %s repository from %s',
-                      alias, repo_full_path)
-            backend = get_backend(alias)
-        except VCSError:
-            log.error(traceback.format_exc())
-            log.error('Perhaps this repository is in db and not in '
-                      'filesystem run rescan repositories with '
-                      '"destroy old data " option from admin panel')
-            return
-
-        if alias == 'hg':
-
-            repo = backend(safe_str(repo_full_path), create=False,
-                           baseui=self._ui)
-        else:
-            repo = backend(repo_full_path, create=False)
-
-        return repo
-
-    def __json__(self):
-        return dict(landing_rev = self.landing_rev)
-
-class RepoGroup(Base, BaseModel):
-    __tablename__ = 'groups'
-    __table_args__ = (
-        UniqueConstraint('group_name', 'group_parent_id'),
-        CheckConstraint('group_id != group_parent_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    __mapper_args__ = {'order_by': 'group_name'}
-
-    SEP = ' &raquo; '
-
-    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    group_name = Column("group_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
-    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
-    group_description = Column("group_description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
-    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
-    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
-    parent_group = relationship('RepoGroup', remote_side=group_id)
-    user = relationship('User')
-
-    def __init__(self, group_name='', parent_group=None):
-        self.group_name = group_name
-        self.parent_group = parent_group
-
-    def __unicode__(self):
-        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
-                                      self.group_name)
-
-    @classmethod
-    def _generate_choice(cls, repo_group):
-        from webhelpers.html import literal as _literal
-        _name = lambda k: _literal(cls.SEP.join(k))
-        return repo_group.group_id, _name(repo_group.full_path_splitted)
-
-    @classmethod
-    def groups_choices(cls, groups=None, show_empty_group=True):
-        if not groups:
-            groups = cls.query().all()
-
-        repo_groups = []
-        if show_empty_group:
-            repo_groups = [('-1', u'-- %s --' % _('top level'))]
-
-        repo_groups.extend([cls._generate_choice(x) for x in groups])
-
-        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(cls.SEP)[0])
-        return repo_groups
-
-    @classmethod
-    def url_sep(cls):
-        return URL_SEP
-
-    @classmethod
-    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
-        if case_insensitive:
-            gr = cls.query() \
-                .filter(cls.group_name.ilike(group_name))
-        else:
-            gr = cls.query() \
-                .filter(cls.group_name == group_name)
-        if cache:
-            gr = gr.options(FromCache(
-                            "sql_cache_short",
-                            "get_group_%s" % _hash_key(group_name)
-                            )
-            )
-        return gr.scalar()
-
-    @property
-    def parents(self):
-        parents_recursion_limit = 5
-        groups = []
-        if self.parent_group is None:
-            return groups
-        cur_gr = self.parent_group
-        groups.insert(0, cur_gr)
-        cnt = 0
-        while 1:
-            cnt += 1
-            gr = getattr(cur_gr, 'parent_group', None)
-            cur_gr = cur_gr.parent_group
-            if gr is None:
-                break
-            if cnt == parents_recursion_limit:
-                # this will prevent accidental infinite loops
-                log.error('group nested more than %s',
-                          parents_recursion_limit)
-                break
-
-            groups.insert(0, gr)
-        return groups
-
-    @property
-    def children(self):
-        return RepoGroup.query().filter(RepoGroup.parent_group == self)
-
-    @property
-    def name(self):
-        return self.group_name.split(RepoGroup.url_sep())[-1]
-
-    @property
-    def full_path(self):
-        return self.group_name
-
-    @property
-    def full_path_splitted(self):
-        return self.group_name.split(RepoGroup.url_sep())
-
-    @property
-    def repositories(self):
-        return Repository.query() \
-                .filter(Repository.group == self) \
-                .order_by(Repository.repo_name)
-
-    @property
-    def repositories_recursive_count(self):
-        cnt = self.repositories.count()
-
-        def children_count(group):
-            cnt = 0
-            for child in group.children:
-                cnt += child.repositories.count()
-                cnt += children_count(child)
-            return cnt
-
-        return cnt + children_count(self)
-
-    def _recursive_objects(self, include_repos=True):
-        all_ = []
-
-        def _get_members(root_gr):
-            if include_repos:
-                for r in root_gr.repositories:
-                    all_.append(r)
-            childs = root_gr.children.all()
-            if childs:
-                for gr in childs:
-                    all_.append(gr)
-                    _get_members(gr)
-
-        _get_members(self)
-        return [self] + all_
-
-    def recursive_groups_and_repos(self):
-        """
-        Recursive return all groups, with repositories in those groups
-        """
-        return self._recursive_objects()
-
-    def recursive_groups(self):
-        """
-        Returns all children groups for this group including children of children
-        """
-        return self._recursive_objects(include_repos=False)
-
-    def get_new_name(self, group_name):
-        """
-        returns new full group name based on parent and new name
-
-        :param group_name:
-        """
-        path_prefix = (self.parent_group.full_path_splitted if
-                       self.parent_group else [])
-        return RepoGroup.url_sep().join(path_prefix + [group_name])
-
-    def get_api_data(self):
-        """
-        Common function for generating api data
-
-        """
-        group = self
-        data = dict(
-            group_id=group.group_id,
-            group_name=group.group_name,
-            group_description=group.group_description,
-            parent_group=group.parent_group.group_name if group.parent_group else None,
-            repositories=[x.repo_name for x in group.repositories],
-            owner=group.user.username
-        )
-        return data
-
-
-class Permission(Base, BaseModel):
-    __tablename__ = 'permissions'
-    __table_args__ = (
-        Index('p_perm_name_idx', 'permission_name'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    PERMS = [
-        ('hg.admin', _('Kallithea Administrator')),
-
-        ('repository.none', _('Repository no access')),
-        ('repository.read', _('Repository read access')),
-        ('repository.write', _('Repository write access')),
-        ('repository.admin', _('Repository admin access')),
-
-        ('group.none', _('Repository group no access')),
-        ('group.read', _('Repository group read access')),
-        ('group.write', _('Repository group write access')),
-        ('group.admin', _('Repository group admin access')),
-
-        ('usergroup.none', _('User group no access')),
-        ('usergroup.read', _('User group read access')),
-        ('usergroup.write', _('User group write access')),
-        ('usergroup.admin', _('User group admin access')),
-
-        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
-        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
-
-        ('hg.usergroup.create.false', _('User Group creation disabled')),
-        ('hg.usergroup.create.true', _('User Group creation enabled')),
-
-        ('hg.create.none', _('Repository creation disabled')),
-        ('hg.create.repository', _('Repository creation enabled')),
-        ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
-        ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
-
-        ('hg.fork.none', _('Repository forking disabled')),
-        ('hg.fork.repository', _('Repository forking enabled')),
-
-        ('hg.register.none', _('Registration disabled')),
-        ('hg.register.manual_activate', _('User Registration with manual account activation')),
-        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
-
-        ('hg.extern_activate.manual', _('Manual activation of external account')),
-        ('hg.extern_activate.auto', _('Automatic activation of external account')),
-
-    ]
-
-    #definition of system default permissions for DEFAULT user
-    DEFAULT_USER_PERMISSIONS = [
-        'repository.read',
-        'group.read',
-        'usergroup.read',
-        'hg.create.repository',
-        'hg.create.write_on_repogroup.true',
-        'hg.fork.repository',
-        'hg.register.manual_activate',
-        'hg.extern_activate.auto',
-    ]
-
-    # defines which permissions are more important higher the more important
-    # Weight defines which permissions are more important.
-    # The higher number the more important.
-    PERM_WEIGHTS = {
-        'repository.none': 0,
-        'repository.read': 1,
-        'repository.write': 3,
-        'repository.admin': 4,
-
-        'group.none': 0,
-        'group.read': 1,
-        'group.write': 3,
-        'group.admin': 4,
-
-        'usergroup.none': 0,
-        'usergroup.read': 1,
-        'usergroup.write': 3,
-        'usergroup.admin': 4,
-        'hg.repogroup.create.false': 0,
-        'hg.repogroup.create.true': 1,
-
-        'hg.usergroup.create.false': 0,
-        'hg.usergroup.create.true': 1,
-
-        'hg.fork.none': 0,
-        'hg.fork.repository': 1,
-        'hg.create.none': 0,
-        'hg.create.repository': 1
-    }
-
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__, self.permission_id, self.permission_name
-        )
-
-    @classmethod
-    def get_by_key(cls, key):
-        return cls.query().filter(cls.permission_name == key).scalar()
-
-    @classmethod
-    def get_default_perms(cls, default_user_id):
-        q = Session().query(UserRepoToPerm, Repository, cls) \
-         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
-         .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_group_perms(cls, default_user_id):
-        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
-         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
-         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserRepoGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-    @classmethod
-    def get_default_user_group_perms(cls, default_user_id):
-        q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
-         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
-         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
-         .filter(UserUserGroupToPerm.user_id == default_user_id)
-
-        return q.all()
-
-
-class UserRepoToPerm(Base, BaseModel):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    repository = relationship('Repository')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository, permission):
-        n = cls()
-        n.user = user
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.repository)
-
-
-class UserUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    user_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, user_group, permission):
-        n = cls()
-        n.user = user
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.user_group)
-
-
-class UserToPerm(Base, BaseModel):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    permission = relationship('Permission', lazy='joined')
-
-    def __unicode__(self):
-        return u'<%s => %s >' % (self.user, self.permission)
-
-
-class UserGroupRepoToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_to_perm'
-    __table_args__ = (
-        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    repository = relationship('Repository')
-
-    @classmethod
-    def create(cls, users_group, repository, permission):
-        n = cls()
-        n.users_group = users_group
-        n.repository = repository
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
-
-
-class UserGroupUserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_group_user_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
-        CheckConstraint('target_user_group_id != user_group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-    user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-
-    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
-    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, target_user_group, user_group, permission):
-        n = cls()
-        n.target_user_group = target_user_group
-        n.user_group = user_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-    def __unicode__(self):
-        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
-
-
-class UserGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'permission_id',),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-
-
-class UserRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'user_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'group_id', 'permission_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relationship('User')
-    group = relationship('RepoGroup')
-    permission = relationship('Permission')
-
-    @classmethod
-    def create(cls, user, repository_group, permission):
-        n = cls()
-        n.user = user
-        n.group = repository_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-
-class UserGroupRepoGroupToPerm(Base, BaseModel):
-    __tablename__ = 'users_group_repo_group_to_perm'
-    __table_args__ = (
-        UniqueConstraint('users_group_id', 'group_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    users_group = relationship('UserGroup')
-    permission = relationship('Permission')
-    group = relationship('RepoGroup')
-
-    @classmethod
-    def create(cls, user_group, repository_group, permission):
-        n = cls()
-        n.users_group = user_group
-        n.group = repository_group
-        n.permission = permission
-        Session().add(n)
-        return n
-
-
-class Statistics(Base, BaseModel):
-    __tablename__ = 'statistics'
-    __table_args__ = (
-         UniqueConstraint('repository_id'),
-         {'extend_existing': True, 'mysql_engine': 'InnoDB',
-          'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
-
-    repository = relationship('Repository', single_parent=True)
-
-
-class UserFollowing(Base, BaseModel):
-    __tablename__ = 'user_followings'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'follows_repository_id'),
-        UniqueConstraint('user_id', 'follows_user_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
-    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
-
-    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relationship('Repository', order_by='Repository.repo_name')
-
-    @classmethod
-    def get_repo_followers(cls, repo_id):
-        return cls.query().filter(cls.follows_repo_id == repo_id)
-
-
-class CacheInvalidation(Base, BaseModel):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (
-        UniqueConstraint('cache_key'),
-        Index('key_idx', 'cache_key'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    # cache_id, not used
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    # cache_key as created by _get_cache_key
-    cache_key = Column("cache_key", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    # cache_args is a repo_name
-    cache_args = Column("cache_args", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
-    # instance sets cache_active True when it is caching,
-    # other instances set cache_active to False to indicate that this cache is invalid
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-    def __init__(self, cache_key, repo_name=''):
-        self.cache_key = cache_key
-        self.cache_args = repo_name
-        self.cache_active = False
-
-    def __unicode__(self):
-        return u"<%s('%s:%s[%s]')>" % (self.__class__.__name__,
-                            self.cache_id, self.cache_key, self.cache_active)
-
-    def _cache_key_partition(self):
-        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
-        return prefix, repo_name, suffix
-
-    def get_prefix(self):
-        """
-        get prefix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[0]
-
-    def get_suffix(self):
-        """
-        get suffix that might have been used in _get_cache_key to
-        generate self.cache_key. Only used for informational purposes
-        in repo_edit.html.
-        """
-        # prefix, repo_name, suffix
-        return self._cache_key_partition()[2]
-
-    @classmethod
-    def clear_cache(cls):
-        """
-        Delete all cache keys from database.
-        Should only be run when all instances are down and all entries thus stale.
-        """
-        cls.query().delete()
-        Session().commit()
-
-    @classmethod
-    def _get_cache_key(cls, key):
-        """
-        Wrapper for generating a unique cache key for this instance and "key".
-        key must / will start with a repo_name which will be stored in .cache_args .
-        """
-        import kallithea
-        prefix = kallithea.CONFIG.get('instance_id', '')
-        return "%s%s" % (prefix, key)
-
-    @classmethod
-    def set_invalidate(cls, repo_name, delete=False):
-        """
-        Mark all caches of a repo as invalid in the database.
-        """
-        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
-        log.debug('for repo %s got %s invalidation objects',
-                  safe_str(repo_name), inv_objs)
-        try:
-            for inv_obj in inv_objs:
-                log.debug('marking %s key for invalidation based on repo_name=%s',
-                          inv_obj, safe_str(repo_name))
-                if delete:
-                    Session().delete(inv_obj)
-                else:
-                    inv_obj.cache_active = False
-                    Session().add(inv_obj)
-            Session().commit()
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-
-    @classmethod
-    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
-        """
-        Mark this cache key as active and currently cached.
-        Return True if the existing cache registration still was valid.
-        Return False to indicate that it had been invalidated and caches should be refreshed.
-        """
-
-        key = (repo_name + '_' + kind) if kind else repo_name
-        cache_key = cls._get_cache_key(key)
-
-        if valid_cache_keys and cache_key in valid_cache_keys:
-            return True
-
-        try:
-            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
-            if not inv_obj:
-                inv_obj = CacheInvalidation(cache_key, repo_name)
-            was_valid = inv_obj.cache_active
-            inv_obj.cache_active = True
-            Session().add(inv_obj)
-            Session().commit()
-            return was_valid
-        except Exception:
-            log.error(traceback.format_exc())
-            Session().rollback()
-            return False
-
-    @classmethod
-    def get_valid_cache_keys(cls):
-        """
-        Return opaque object with information of which caches still are valid
-        and can be used without checking for invalidation.
-        """
-        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
-
-
-class ChangesetComment(Base, BaseModel):
-    __tablename__ = 'changeset_comments'
-    __table_args__ = (
-        Index('cc_revision_idx', 'revision'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    revision = Column('revision', String(40), nullable=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-    line_no = Column('line_no', Unicode(10), nullable=True)
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    f_path = Column('f_path', Unicode(1000), nullable=True)
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
-    text = Column('text', UnicodeText(25000), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    @classmethod
-    def get_users(cls, revision=None, pull_request_id=None):
-        """
-        Returns user associated with this ChangesetComment. ie those
-        who actually commented
-
-        :param cls:
-        :param revision:
-        """
-        q = Session().query(User) \
-                .join(ChangesetComment.author)
-        if revision:
-            q = q.filter(cls.revision == revision)
-        elif pull_request_id:
-            q = q.filter(cls.pull_request_id == pull_request_id)
-        return q.all()
-
-
-class ChangesetStatus(Base, BaseModel):
-    __tablename__ = 'changeset_statuses'
-    __table_args__ = (
-        Index('cs_revision_idx', 'revision'),
-        Index('cs_version_idx', 'version'),
-        UniqueConstraint('repo_id', 'revision', 'version'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
-    STATUS_APPROVED = 'approved'
-    STATUS_REJECTED = 'rejected'
-    STATUS_UNDER_REVIEW = 'under_review'
-
-    STATUSES = [
-        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
-        (STATUS_APPROVED, _("Approved")),
-        (STATUS_REJECTED, _("Rejected")),
-        (STATUS_UNDER_REVIEW, _("Under Review")),
-    ]
-
-    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
-    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    revision = Column('revision', String(40), nullable=False)
-    status = Column('status', String(128), nullable=False, default=DEFAULT)
-    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
-    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
-    version = Column('version', Integer(), nullable=False, default=0)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
-
-    author = relationship('User', lazy='joined')
-    repo = relationship('Repository')
-    comment = relationship('ChangesetComment', lazy='joined')
-    pull_request = relationship('PullRequest', lazy='joined')
-
-    def __unicode__(self):
-        return u"<%s('%s:%s')>" % (
-            self.__class__.__name__,
-            self.status, self.author
-        )
-
-    @classmethod
-    def get_status_lbl(cls, value):
-        return dict(cls.STATUSES).get(value)
-
-    @property
-    def status_lbl(self):
-        return ChangesetStatus.get_status_lbl(self.status)
-
-
-class PullRequest(Base, BaseModel):
-    __tablename__ = 'pull_requests'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    # values for .status
-    STATUS_NEW = u'new'
-    STATUS_OPEN = u'open'
-    STATUS_CLOSED = u'closed'
-
-    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
-    title = Column('title', Unicode(256), nullable=True)
-    description = Column('description', UnicodeText(10240), nullable=True)
-    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
-    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
-    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    org_ref = Column('org_ref', Unicode(256), nullable=False)
-    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
-    other_ref = Column('other_ref', Unicode(256), nullable=False)
-
-    @hybrid_property
-    def revisions(self):
-        return self._revisions.split(':')
-
-    @revisions.setter
-    def revisions(self, val):
-        self._revisions = ':'.join(val)
-
-    @property
-    def org_ref_parts(self):
-        return self.org_ref.split(':')
-
-    @property
-    def other_ref_parts(self):
-        return self.other_ref.split(':')
-
-    author = relationship('User', lazy='joined')
-    reviewers = relationship('PullRequestReviewers',
-                             cascade="all, delete, delete-orphan")
-    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
-    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
-    statuses = relationship('ChangesetStatus')
-    comments = relationship('ChangesetComment',
-                             cascade="all, delete, delete-orphan")
-
-    def is_closed(self):
-        return self.status == self.STATUS_CLOSED
-
-    @property
-    def last_review_status(self):
-        return self.statuses[-1].status if self.statuses else ''
-
-    def __json__(self):
-        return dict(
-            revisions=self.revisions
-        )
-
-
-class PullRequestReviewers(Base, BaseModel):
-    __tablename__ = 'pull_request_reviewers'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    def __init__(self, user=None, pull_request=None):
-        self.user = user
-        self.pull_request = pull_request
-
-    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
-    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
-
-    user = relationship('User')
-    pull_request = relationship('PullRequest')
-
-
-class Notification(Base, BaseModel):
-    __tablename__ = 'notifications'
-    __table_args__ = (
-        Index('notification_type_idx', 'type'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-
-    TYPE_CHANGESET_COMMENT = u'cs_comment'
-    TYPE_MESSAGE = u'message'
-    TYPE_MENTION = u'mention'
-    TYPE_REGISTRATION = u'registration'
-    TYPE_PULL_REQUEST = u'pull_request'
-    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
-
-    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
-    subject = Column('subject', Unicode(512), nullable=True)
-    body = Column('body', UnicodeText(50000), nullable=True)
-    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    type_ = Column('type', Unicode(256))
-
-    created_by_user = relationship('User')
-    notifications_to_users = relationship('UserNotification', lazy='joined',
-                                          cascade="all, delete, delete-orphan")
-
-    @property
-    def recipients(self):
-        return [x.user for x in UserNotification.query() \
-                .filter(UserNotification.notification == self) \
-                .order_by(UserNotification.user_id.asc()).all()]
-
-    @classmethod
-    def create(cls, created_by, subject, body, recipients, type_=None):
-        if type_ is None:
-            type_ = Notification.TYPE_MESSAGE
-
-        notification = cls()
-        notification.created_by_user = created_by
-        notification.subject = subject
-        notification.body = body
-        notification.type_ = type_
-        notification.created_on = datetime.datetime.now()
-
-        for u in recipients:
-            assoc = UserNotification()
-            assoc.notification = notification
-            u.notifications.append(assoc)
-        Session().add(notification)
-        return notification
-
-    @property
-    def description(self):
-        from kallithea.model.notification import NotificationModel
-        return NotificationModel().make_description(self)
-
-
-class UserNotification(Base, BaseModel):
-    __tablename__ = 'user_to_notification'
-    __table_args__ = (
-        UniqueConstraint('user_id', 'notification_id'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
-    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
-    read = Column('read', Boolean, default=False)
-    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
-
-    user = relationship('User', lazy="joined")
-    notification = relationship('Notification', lazy="joined",
-                                order_by=lambda: Notification.created_on.desc(),)
-
-    def mark_as_read(self):
-        self.read = True
-        Session().add(self)
-
-
-class Gist(Base, BaseModel):
-    __tablename__ = 'gists'
-    __table_args__ = (
-        Index('g_gist_access_id_idx', 'gist_access_id'),
-        Index('g_created_on_idx', 'created_on'),
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
-    )
-    GIST_PUBLIC = u'public'
-    GIST_PRIVATE = u'private'
-    DEFAULT_FILENAME = u'gistfile1.txt'
-
-    gist_id = Column('gist_id', Integer(), primary_key=True)
-    gist_access_id = Column('gist_access_id', Unicode(250))
-    gist_description = Column('gist_description', UnicodeText(1024))
-    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
-    gist_expires = Column('gist_expires', Float(53), nullable=False)
-    gist_type = Column('gist_type', Unicode(128), nullable=False)
-    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
-
-    owner = relationship('User')
-
-    def __repr__(self):
-        return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
-
-    @classmethod
-    def get_or_404(cls, id_):
-        res = cls.query().filter(cls.gist_access_id == id_).scalar()
-        if not res:
-            raise HTTPNotFound
-        return res
-
-    @classmethod
-    def get_by_access_id(cls, gist_access_id):
-        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
-
-    def gist_url(self):
-        import kallithea
-        alias_url = kallithea.CONFIG.get('gist_alias_url')
-        if alias_url:
-            return alias_url.replace('{gistid}', self.gist_access_id)
-
-        import kallithea.lib.helpers as h
-        return h.canonical_url('gist', gist_id=self.gist_access_id)
-
-    @classmethod
-    def base_path(cls):
-        """
-        Returns base path when all gists are stored
-
-        :param cls:
-        """
-        from kallithea.model.gist import GIST_STORE_LOC
-        q = Session().query(Ui) \
-            .filter(Ui.ui_key == URL_SEP)
-        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
-        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
-
-    def get_api_data(self):
-        """
-        Common function for generating gist related data for API
-        """
-        gist = self
-        data = dict(
-            gist_id=gist.gist_id,
-            type=gist.gist_type,
-            access_id=gist.gist_access_id,
-            description=gist.gist_description,
-            url=gist.gist_url(),
-            expires=gist.gist_expires,
-            created_on=gist.created_on,
-        )
-        return data
-
-    def __json__(self):
-        data = dict(
-        )
-        data.update(self.get_api_data())
-        return data
-    ## SCM functions
-
-    @property
-    def scm_instance(self):
-        from kallithea.lib.vcs import get_repo
-        base_path = self.base_path()
-        return get_repo(os.path.join(*map(safe_str,
-                                          [base_path, self.gist_access_id])))
-
-
-class DbMigrateVersion(Base, BaseModel):
-    __tablename__ = 'db_migrate_version'
-    __table_args__ = (
-        {'extend_existing': True, 'mysql_engine': 'InnoDB',
-         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
-    )
-    repository_id = Column('repository_id', String(250), primary_key=True)
-    repository_path = Column('repository_path', Text)
-    version = Column('version', Integer)
--- a/kallithea/lib/dbmigrate/versions/001_initial_release.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,207 +0,0 @@
-#==============================================================================
-# DB INITIAL MODEL
-#==============================================================================
-import logging
-import datetime
-
-from sqlalchemy import *
-from sqlalchemy.exc import DatabaseError
-from sqlalchemy.orm import relation
-from sqlalchemy.orm.session import Session
-from kallithea.model.meta import Base
-
-from kallithea.lib.dbmigrate.migrate import *
-
-from kallithea import DB_PREFIX
-
-log = logging.getLogger(__name__)
-
-class Setting(Base):
-    __tablename__ = DB_PREFIX + 'settings'
-    __table_args__ = (UniqueConstraint('app_settings_name'), {'useexisting':True})
-    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    app_settings_name = Column("app_settings_name", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    app_settings_value = Column("app_settings_value", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __init__(self, k, v):
-        self.app_settings_name = k
-        self.app_settings_value = v
-
-    def __repr__(self):
-        return "<Setting('%s:%s')>" % (self.app_settings_name,
-                                                self.app_settings_value)
-
-class Ui(Base):
-    __tablename__ = DB_PREFIX + 'ui'
-    __table_args__ = {'useexisting':True}
-    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    ui_section = Column("ui_section", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_key = Column("ui_key", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_value = Column("ui_value", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
-
-
-class User(Base):
-    __tablename__ = 'users'
-    __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'useexisting':True})
-    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    username = Column("username", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    password = Column("password", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    active = Column("active", Boolean(), nullable=True, unique=None, default=None)
-    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
-    name = Column("name", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    lastname = Column("lastname", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    email = Column("email", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
-    is_ldap = Column("is_ldap", Boolean(), nullable=False, unique=None, default=False)
-
-    user_log = relation('UserLog', cascade='all')
-    user_perms = relation('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
-
-    repositories = relation('Repository')
-    user_followers = relation('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
-
-    @property
-    def full_contact(self):
-        return '%s %s <%s>' % (self.name, self.lastname, self.email)
-
-    def __repr__(self):
-        return "<User('id:%s:%s')>" % (self.user_id, self.username)
-
-    def update_lastlogin(self):
-        """Update user lastlogin"""
-
-        try:
-            session = Session.object_session(self)
-            self.last_login = datetime.datetime.now()
-            session.add(self)
-            session.commit()
-            log.debug('updated user %s lastlogin', self.username)
-        except DatabaseError:
-            session.rollback()
-
-
-class UserLog(Base):
-    __tablename__ = 'user_logs'
-    __table_args__ = {'useexisting':True}
-    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(length=None, convert_unicode=False, assert_unicode=None), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
-    repository_name = Column("repository_name", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action = Column("action", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
-
-    user = relation('User')
-    repository = relation('Repository')
-
-class Repository(Base):
-    __tablename__ = 'repositories'
-    __table_args__ = (UniqueConstraint('repo_name'), {'useexisting':True},)
-    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repo_name = Column("repo_name", String(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
-    repo_type = Column("repo_type", String(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
-    user_id = Column("user_id", Integer(), ForeignKey(u'users.user_id'), nullable=False, unique=False, default=None)
-    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
-    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
-    description = Column("description", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    fork_id = Column("fork_id", Integer(), ForeignKey(u'repositories.repo_id'), nullable=True, unique=False, default=None)
-
-    user = relation('User')
-    fork = relation('Repository', remote_side=repo_id)
-    repo_to_perm = relation('UserRepoToPerm', cascade='all')
-    stats = relation('Statistics', cascade='all', uselist=False)
-
-    repo_followers = relation('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
-
-
-    def __repr__(self):
-        return "<Repository('%s:%s')>" % (self.repo_id, self.repo_name)
-
-class Permission(Base):
-    __tablename__ = 'permissions'
-    __table_args__ = {'useexisting':True}
-    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    permission_name = Column("permission_name", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    permission_longname = Column("permission_longname", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
-    def __repr__(self):
-        return "<Permission('%s:%s')>" % (self.permission_id, self.permission_name)
-
-class UserRepoToPerm(Base):
-    __tablename__ = 'repo_to_perm'
-    __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'useexisting':True})
-    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", Integer(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
-
-    user = relation('User')
-    permission = relation('Permission')
-    repository = relation('Repository')
-
-class UserToPerm(Base):
-    __tablename__ = 'user_to_perm'
-    __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'useexisting':True})
-    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
-    permission_id = Column("permission_id", Integer(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
-
-    user = relation('User')
-    permission = relation('Permission')
-
-class Statistics(Base):
-    __tablename__ = 'statistics'
-    __table_args__ = (UniqueConstraint('repository_id'), {'useexisting':True})
-    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    repository_id = Column("repository_id", Integer(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=True, default=None)
-    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
-    commit_activity = Column("commit_activity", LargeBinary(), nullable=False)#JSON data
-    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
-    languages = Column("languages", LargeBinary(), nullable=False)#JSON data
-
-    repository = relation('Repository', single_parent=True)
-
-class UserFollowing(Base):
-    __tablename__ = 'user_followings'
-    __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
-                      UniqueConstraint('user_id', 'follows_user_id')
-                      , {'useexisting':True})
-
-    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    user_id = Column("user_id", Integer(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
-    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey(u'repositories.repo_id'), nullable=True, unique=None, default=None)
-    follows_user_id = Column("follows_user_id", Integer(), ForeignKey(u'users.user_id'), nullable=True, unique=None, default=None)
-
-    user = relation('User', primaryjoin='User.user_id==UserFollowing.user_id')
-
-    follows_user = relation('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
-    follows_repository = relation('Repository')
-
-
-class CacheInvalidation(Base):
-    __tablename__ = 'cache_invalidation'
-    __table_args__ = (UniqueConstraint('cache_key'), {'useexisting':True})
-    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
-    cache_key = Column("cache_key", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_args = Column("cache_args", String(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
-
-
-    def __init__(self, cache_key, cache_args=''):
-        self.cache_key = cache_key
-        self.cache_args = cache_args
-        self.cache_active = False
-
-    def __repr__(self):
-        return "<CacheInvalidation('%s:%s')>" % (self.cache_id, self.cache_key)
-
-
-def upgrade(migrate_engine):
-    # Upgrade operations go here. Don't create your own engine; bind migrate_engine
-    # to your metadata
-    Base.metadata.create_all(bind=migrate_engine, checkfirst=False)
-
-def downgrade(migrate_engine):
-    # Operations to reverse the above upgrade go here.
-    Base.metadata.drop_all(bind=migrate_engine, checkfirst=False)
--- a/kallithea/lib/dbmigrate/versions/002_version_1_1_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,80 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """ Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-
-    #==========================================================================
-    # Upgrade of `users` table
-    #==========================================================================
-    tblname = 'users'
-    tbl = Table(tblname, MetaData(bind=migrate_engine), autoload=True,
-                    autoload_with=migrate_engine)
-
-    #ADD is_ldap column
-    is_ldap = Column("is_ldap", Boolean(), nullable=True,
-                     unique=None, default=False)
-    is_ldap.create(tbl, populate_default=True)
-    is_ldap.alter(nullable=False)
-
-    #==========================================================================
-    # Upgrade of `user_logs` table
-    #==========================================================================
-
-    tblname = 'users'
-    tbl = Table(tblname, MetaData(bind=migrate_engine), autoload=True,
-                    autoload_with=migrate_engine)
-
-    #ADD revision column
-    revision = Column('revision', TEXT(length=None, convert_unicode=False,
-                                       assert_unicode=None),
-                      nullable=True, unique=None, default=None)
-    revision.create(tbl)
-
-    #==========================================================================
-    # Upgrade of `repositories` table
-    #==========================================================================
-    tblname = 'repositories'
-    tbl = Table(tblname, MetaData(bind=migrate_engine), autoload=True,
-                    autoload_with=migrate_engine)
-
-    #ADD repo_type column#
-    repo_type = Column("repo_type", String(length=None, convert_unicode=False,
-                                           assert_unicode=None),
-                       nullable=True, unique=False, default='hg')
-
-    repo_type.create(tbl, populate_default=True)
-    #repo_type.alter(nullable=False)
-
-    #ADD statistics column#
-    enable_statistics = Column("statistics", Boolean(), nullable=True,
-                               unique=None, default=True)
-    enable_statistics.create(tbl)
-
-    #==========================================================================
-    # Add table `user_followings`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_1_0 import UserFollowing
-    UserFollowing().__table__.create()
-
-    #==========================================================================
-    # Add table `cache_invalidation`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_1_0 import CacheInvalidation
-    CacheInvalidation().__table__.create()
-
-    return
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
--- a/kallithea/lib/dbmigrate/versions/003_version_1_2_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,114 +0,0 @@
-import logging
-import datetime
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """ Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-
-    #==========================================================================
-    # Add table `groups``
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import Group as Group
-    Group().__table__.create()
-
-    #==========================================================================
-    # Add table `group_to_perm`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import UserRepoGroupToPerm
-    UserRepoGroupToPerm().__table__.create()
-
-    #==========================================================================
-    # Add table `users_groups`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import UserGroup
-    UserGroup().__table__.create()
-
-    #==========================================================================
-    # Add table `users_groups_members`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import UserGroupMember
-    UserGroupMember().__table__.create()
-
-    #==========================================================================
-    # Add table `users_group_repo_to_perm`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import UserGroupRepoToPerm
-    UserGroupRepoToPerm().__table__.create()
-
-    #==========================================================================
-    # Add table `users_group_to_perm`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import UserGroupToPerm
-    UserGroupToPerm().__table__.create()
-
-    #==========================================================================
-    # Upgrade of `users` table
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import User
-
-    #add column
-    ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    ldap_dn.create(User().__table__)
-
-    api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    api_key.create(User().__table__)
-
-    #remove old column
-    is_ldap = Column("is_ldap", Boolean(), nullable=False, unique=None, default=False)
-    is_ldap.drop(User().__table__)
-
-    #==========================================================================
-    # Upgrade of `repositories` table
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import Repository
-
-    #ADD clone_uri column#
-
-    clone_uri = Column("clone_uri", String(length=255, convert_unicode=False,
-                                           assert_unicode=None),
-                        nullable=True, unique=False, default=None)
-
-    clone_uri.create(Repository().__table__)
-
-    #ADD downloads column#
-    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
-    enable_downloads.create(Repository().__table__)
-
-    #ADD column created_on
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True,
-                        unique=None, default=datetime.datetime.now)
-    created_on.create(Repository().__table__)
-
-    #ADD group_id column#
-    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'),
-                  nullable=True, unique=False, default=None)
-
-    group_id.create(Repository().__table__)
-
-    #==========================================================================
-    # Upgrade of `user_followings` table
-    #==========================================================================
-
-    from kallithea.lib.dbmigrate.schema.db_1_2_0 import UserFollowing
-
-    follows_from = Column('follows_from', DateTime(timezone=False),
-                          nullable=True, unique=None,
-                          default=datetime.datetime.now)
-    follows_from.create(UserFollowing().__table__)
-
-    return
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
--- a/kallithea/lib/dbmigrate/versions/004_version_1_3_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """ Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    #==========================================================================
-    # Add table `users_group_repo_group_to_perm`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import UserGroupRepoGroupToPerm
-    UserGroupRepoGroupToPerm().__table__.create()
-
-    #==========================================================================
-    # Add table `changeset_comments`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import ChangesetComment
-    ChangesetComment().__table__.create()
-
-    #==========================================================================
-    # Add table `notifications`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import Notification
-    Notification().__table__.create()
-
-    #==========================================================================
-    # Add table `user_to_notification`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import UserNotification
-    UserNotification().__table__.create()
-
-    #==========================================================================
-    # Add unique to table `users_group_to_perm`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import UserGroupToPerm
-    tbl = UserGroupToPerm().__table__
-    cons = UniqueConstraint('users_group_id', 'permission_id', table=tbl)
-    cons.create()
-
-    #==========================================================================
-    # Fix unique constrain on table `user_logs`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import UserLog
-    tbl = UserLog().__table__
-    col = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'),
-                 nullable=False, unique=None, default=None)
-    col.alter(nullable=True, table=tbl)
-
-    #==========================================================================
-    # Rename table `group_to_perm` to `user_repo_group_to_perm`
-    #==========================================================================
-    tbl = Table('group_to_perm', MetaData(bind=migrate_engine), autoload=True,
-                    autoload_with=migrate_engine)
-    tbl.rename('user_repo_group_to_perm')
-
-    return
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
--- a/kallithea/lib/dbmigrate/versions/005_version_1_3_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,74 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """ Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-
-    #==========================================================================
-    # Change unique constraints of table `repo_to_perm`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import UserRepoToPerm
-    tbl = UserRepoToPerm().__table__
-    new_cons = UniqueConstraint('user_id', 'repository_id', 'permission_id', table=tbl)
-    new_cons.create()
-    old_cons = None
-    if migrate_engine.name in ['mysql']:
-        old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl, name="user_id")
-    elif migrate_engine.name in ['postgresql']:
-        old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl)
-    else:
-        # sqlite doesn't support dropping constraints...
-        print """Please manually drop UniqueConstraint('user_id', 'repository_id')"""
-
-    if old_cons:
-        try:
-            old_cons.drop()
-        except Exception as e:
-            # we don't care if this fails really... better to pass migration than
-            # leave this in intermediate state
-            print 'Failed to remove Unique for user_id, repository_id reason %s' % e
-
-
-    #==========================================================================
-    # fix uniques of table `user_repo_group_to_perm`
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import UserRepoGroupToPerm
-    tbl = UserRepoGroupToPerm().__table__
-    new_cons = UniqueConstraint('group_id', 'permission_id', 'user_id', table=tbl)
-    new_cons.create()
-    old_cons = None
-
-    # fix uniqueConstraints
-    if migrate_engine.name in ['mysql']:
-        #mysql is giving troubles here...
-        old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name="group_id")
-    elif migrate_engine.name in ['postgresql']:
-        old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name='group_to_perm_group_id_permission_id_key')
-    else:
-        # sqlite doesn't support dropping constraints...
-        print """Please manually drop UniqueConstraint('group_id', 'permission_id')"""
-
-    if old_cons:
-        try:
-            old_cons.drop()
-        except Exception as e:
-            # we don't care if this fails really... better to pass migration than
-            # leave this in intermediate state
-            print 'Failed to remove Unique for user_id, repository_id reason %s' % e
-
-    return
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
--- a/kallithea/lib/dbmigrate/versions/006_version_1_4_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,171 +0,0 @@
-import logging
-import datetime
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-
-    #==========================================================================
-    # USEREMAILMAP
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_4_0 import UserEmailMap
-    tbl = UserEmailMap.__table__
-    tbl.create()
-    #==========================================================================
-    # PULL REQUEST
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_4_0 import PullRequest
-    tbl = PullRequest.__table__
-    tbl.create()
-
-    #==========================================================================
-    # PULL REQUEST REVIEWERS
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_4_0 import PullRequestReviewers
-    tbl = PullRequestReviewers.__table__
-    tbl.create()
-
-    #==========================================================================
-    # CHANGESET STATUS
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_4_0 import ChangesetStatus
-    tbl = ChangesetStatus.__table__
-    tbl.create()
-
-    _reset_base(migrate_engine)
-
-    #==========================================================================
-    # USERS TABLE
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import User
-    tbl = User.__table__
-
-    # change column name -> firstname
-    col = User.__table__.columns.name
-    col.alter(index=Index('u_username_idx', 'username'))
-    col.alter(index=Index('u_email_idx', 'email'))
-    col.alter(name="firstname", table=tbl)
-
-    # add inherit_default_permission column
-    inherit_default_permissions = Column("inherit_default_permissions",
-                                         Boolean(), nullable=True, unique=None,
-                                         default=True)
-    inherit_default_permissions.create(table=tbl)
-    inherit_default_permissions.alter(nullable=False, default=True, table=tbl)
-
-    #==========================================================================
-    # USERS GROUP TABLE
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import UserGroup
-    tbl = UserGroup.__table__
-    # add inherit_default_permission column
-    gr_inherit_default_permissions = Column(
-                                    "users_group_inherit_default_permissions",
-                                    Boolean(), nullable=True, unique=None,
-                                    default=True)
-    gr_inherit_default_permissions.create(table=tbl)
-    gr_inherit_default_permissions.alter(nullable=False, default=True, table=tbl)
-
-    #==========================================================================
-    # REPOSITORIES
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import Repository
-    tbl = Repository.__table__
-
-    # add enable locking column
-    enable_locking = Column("enable_locking", Boolean(), nullable=True,
-                            unique=None, default=False)
-    enable_locking.create(table=tbl)
-    enable_locking.alter(nullable=False, default=False, table=tbl)
-
-    # add locked column
-    _locked = Column("locked", String(255), nullable=True, unique=False,
-                     default=None)
-    _locked.create(table=tbl)
-
-    #add langing revision column
-    landing_rev = Column("landing_revision", String(255), nullable=True,
-                         unique=False, default='tip')
-    landing_rev.create(table=tbl)
-    landing_rev.alter(nullable=False, default='tip', table=tbl)
-
-    #==========================================================================
-    # GROUPS
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import RepoGroup
-    tbl = RepoGroup.__table__
-
-    # add enable locking column
-    enable_locking = Column("enable_locking", Boolean(), nullable=True,
-                            unique=None, default=False)
-    enable_locking.create(table=tbl)
-    enable_locking.alter(nullable=False, default=False)
-
-    #==========================================================================
-    # CACHE INVALIDATION
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import CacheInvalidation
-    tbl = CacheInvalidation.__table__
-
-    # add INDEX for cache keys
-    col = CacheInvalidation.__table__.columns.cache_key
-    col.alter(index=Index('key_idx', 'cache_key'))
-
-    #==========================================================================
-    # NOTIFICATION
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import Notification
-    tbl = Notification.__table__
-
-    # add index for notification type
-    col = Notification.__table__.columns.type
-    col.alter(index=Index('notification_type_idx', 'type'),)
-
-    #==========================================================================
-    # CHANGESET_COMMENTS
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_3_0 import ChangesetComment
-
-    tbl = ChangesetComment.__table__
-    col = ChangesetComment.__table__.columns.revision
-
-    # add index for revisions
-    col.alter(index=Index('cc_revision_idx', 'revision'),)
-
-    # add hl_lines column
-    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
-    hl_lines.create(table=tbl)
-
-    # add created_on column
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True,
-                        default=datetime.datetime.now)
-    created_on.create(table=tbl)
-    created_on.alter(nullable=False, default=datetime.datetime.now)
-
-    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False,
-                         default=datetime.datetime.now)
-    modified_at.alter(type=DateTime(timezone=False), table=tbl)
-
-    # add FK to pull_request
-    pull_request_id = Column("pull_request_id", Integer(),
-                             ForeignKey('pull_requests.pull_request_id'),
-                             nullable=True)
-    pull_request_id.create(table=tbl)
-    _reset_base(migrate_engine)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
--- a/kallithea/lib/dbmigrate/versions/007_version_1_4_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,44 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-
-    #==========================================================================
-    # CHANGESET_COMMENTS
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_4_0 import ChangesetComment
-    tbl_name = ChangesetComment.__tablename__
-    tbl = Table(tbl_name,
-                MetaData(bind=migrate_engine), autoload=True,
-                autoload_with=migrate_engine)
-    col = tbl.columns.revision
-
-    # remove nullability from revision field
-    col.alter(nullable=True)
-
-    #==========================================================================
-    # REPOSITORY
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_4_0 import Repository
-    tbl = Repository.__table__
-    updated_on = Column('updated_on', DateTime(timezone=False),
-                        nullable=True, unique=None)
-    # create created on column for future lightweight main page
-    updated_on.create(table=tbl)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
--- a/kallithea/lib/dbmigrate/versions/008_version_1_5_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,123 +0,0 @@
-import logging
-
-from sqlalchemy import *
-from sqlalchemy.orm import joinedload
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_1_5_0
-    #==========================================================================
-    # USER LOGS
-    #==========================================================================
-
-    tbl = db_1_5_0.UserLog.__table__
-    username = Column("username", String(255, convert_unicode=False,
-                                         assert_unicode=None), nullable=True,
-                      unique=None, default=None)
-    # create username column
-    username.create(table=tbl)
-
-    _Session = meta.Session()
-    ## after adding that column fix all usernames
-    users_log = _Session.query(db_1_5_0.UserLog) \
-            .options(joinedload(db_1_5_0.UserLog.user)) \
-            .options(joinedload(db_1_5_0.UserLog.repository)).all()
-
-    for entry in users_log:
-        entry.username = entry.user.username
-        _Session.add(entry)
-    _Session.commit()
-
-    #alter username to not null
-    tbl_name = db_1_5_0.UserLog.__tablename__
-    tbl = Table(tbl_name,
-                MetaData(bind=migrate_engine), autoload=True,
-                autoload_with=migrate_engine)
-    col = tbl.columns.username
-
-    # remove nullability from revision field
-    col.alter(nullable=False)
-
-    # issue fixups
-    fixups(db_1_5_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    # ** create default permissions ** #
-    #=====================================
-    for p in models.Permission.PERMS:
-        if not models.Permission.get_by_key(p[0]):
-            new_perm = models.Permission()
-            new_perm.permission_name = p[0]
-            new_perm.permission_longname = p[0]  #translation err with p[1]
-            print 'Creating new permission %s' % p[0]
-            _SESSION().add(new_perm)
-
-    _SESSION().commit()
-
-    # ** populate default permissions ** #
-    #=====================================
-
-    user = models.User.query().filter(models.User.username == 'default').scalar()
-
-    def _make_perm(perm):
-        new_perm = models.UserToPerm()
-        new_perm.user = user
-        new_perm.permission = models.Permission.get_by_key(perm)
-        return new_perm
-
-    def _get_group(perm_name):
-        return '.'.join(perm_name.split('.')[:1])
-
-    perms = models.UserToPerm.query().filter(models.UserToPerm.user == user).all()
-    defined_perms_groups = map(_get_group,
-                              (x.permission.permission_name for x in perms))
-    log.debug('GOT ALREADY DEFINED:%s', perms)
-    DEFAULT_PERMS = models.Permission.DEFAULT_USER_PERMISSIONS
-
-    # for every default permission that needs to be created, we check if
-    # it's group is already defined, if it's not we create default perm
-    for perm_name in DEFAULT_PERMS:
-        gr = _get_group(perm_name)
-        if gr not in defined_perms_groups:
-            log.debug('GR:%s not found, creating permission %s',
-                      gr, perm_name)
-            new_perm = _make_perm(perm_name)
-            _SESSION().add(new_perm)
-    _SESSION().commit()
-
-    # ** create default options ** #
-    #===============================
-    skip_existing = True
-    for k, v in [
-        ('default_repo_enable_locking',  False),
-        ('default_repo_enable_downloads', False),
-        ('default_repo_enable_statistics', False),
-        ('default_repo_private', False),
-        ('default_repo_type', 'hg')]:
-
-        if skip_existing and models.Setting.get_by_name(k) is not None:
-            log.debug('Skipping option %s', k)
-            continue
-        setting = models.Setting(k, v)
-        _SESSION().add(setting)
-
-    _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/009_version_1_5_1.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,21 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    pass
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
--- a/kallithea/lib/dbmigrate/versions/010_version_1_5_2.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,50 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_1_5_2
-    #==========================================================================
-    # USER LOGS
-    #==========================================================================
-    tbl = db_1_5_2.UserIpMap.__table__
-    tbl.create()
-
-    #==========================================================================
-    # REPOSITORIES
-    #==========================================================================
-    tbl = db_1_5_2.Repository.__table__
-    changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True)
-    # create username column
-    changeset_cache.create(table=tbl)
-
-    # issue fixups
-    fixups(db_1_5_2, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Upgrading repositories Caches')
-    repositories = models.Repository.getAll()
-    for repo in repositories:
-        print repo
-        repo.update_changeset_cache()
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/011_version_1_6_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,43 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_1_6_0
-
-    #==========================================================================
-    # USER LOGS
-    #==========================================================================
-    tbl = db_1_6_0.RepositoryField.__table__
-    tbl.create()
-
-    # issue fixups
-    fixups(db_1_6_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Upgrading repositories Caches')
-    repositories = models.Repository.getAll()
-    for repo in repositories:
-        print repo
-        repo.update_changeset_cache()
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/012_version_1_7_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,144 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_1_7_0
-
-    #==========================================================================
-    # UserUserGroupToPerm
-    #==========================================================================
-    tbl = db_1_7_0.UserUserGroupToPerm.__table__
-    tbl.create()
-
-    #==========================================================================
-    # UserGroupUserGroupToPerm
-    #==========================================================================
-    tbl = db_1_7_0.UserGroupUserGroupToPerm.__table__
-    tbl.create()
-
-    #==========================================================================
-    # Gist
-    #==========================================================================
-    tbl = db_1_7_0.Gist.__table__
-    tbl.create()
-
-    #==========================================================================
-    # UserGroup
-    #==========================================================================
-    tbl = db_1_7_0.UserGroup.__table__
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'),
-                     nullable=True, unique=False, default=None)
-    # create username column
-    user_id.create(table=tbl)
-
-    #==========================================================================
-    # RepoGroup
-    #==========================================================================
-    tbl = db_1_7_0.RepoGroup.__table__
-    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'),
-                     nullable=True, unique=False, default=None)
-    # create username column
-    user_id.create(table=tbl)
-
-    # issue fixups
-    fixups(db_1_7_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    # ** create default permissions ** #
-    #=====================================
-    for p in models.Permission.PERMS:
-        if not models.Permission.get_by_key(p[0]):
-            new_perm = models.Permission()
-            new_perm.permission_name = p[0]
-            new_perm.permission_longname = p[0]  #translation err with p[1]
-            _SESSION().add(new_perm)
-
-    _SESSION().commit()
-
-    # ** populate default permissions ** #
-    #=====================================
-
-    user = models.User.query().filter(models.User.username == 'default').scalar()
-
-    def _make_perm(perm):
-        new_perm = models.UserToPerm()
-        new_perm.user = user
-        new_perm.permission = models.Permission.get_by_key(perm)
-        return new_perm
-
-    def _get_group(perm_name):
-        return '.'.join(perm_name.split('.')[:1])
-
-    perms = models.UserToPerm.query().filter(models.UserToPerm.user == user).all()
-    defined_perms_groups = map(_get_group,
-                              (x.permission.permission_name for x in perms))
-    log.debug('GOT ALREADY DEFINED:%s', perms)
-    DEFAULT_PERMS = models.Permission.DEFAULT_USER_PERMISSIONS
-
-    # for every default permission that needs to be created, we check if
-    # it's group is already defined, if it's not we create default perm
-    for perm_name in DEFAULT_PERMS:
-        gr = _get_group(perm_name)
-        if gr not in defined_perms_groups:
-            log.debug('GR:%s not found, creating permission %s',
-                      gr, perm_name)
-            new_perm = _make_perm(perm_name)
-            _SESSION().add(new_perm)
-    _SESSION().commit()
-
-    #fix all usergroups
-
-    def _create_default_perms(user_group):
-        # create default permission
-        default_perm = 'usergroup.read'
-        def_user = models.User.get_default_user()
-        for p in def_user.user_perms:
-            if p.permission.permission_name.startswith('usergroup.'):
-                default_perm = p.permission.permission_name
-                break
-
-        user_group_to_perm = models.UserUserGroupToPerm()
-        user_group_to_perm.permission = models.Permission.get_by_key(default_perm)
-
-        user_group_to_perm.user_group = user_group
-        user_group_to_perm.user_id = def_user.user_id
-        return user_group_to_perm
-
-    for ug in models.UserGroup.get_all():
-        perm_obj = _create_default_perms(ug)
-        _SESSION().add(perm_obj)
-    _SESSION().commit()
-
-    adm = models.User.get_first_admin()
-    # fix owners of UserGroup
-    for ug in _SESSION().query(models.UserGroup).all():
-        ug.user_id = adm.user_id
-        _SESSION().add(ug)
-    _SESSION().commit()
-
-    # fix owners of RepoGroup
-    for ug in _SESSION().query(models.RepoGroup).all():
-        ug.user_id = adm.user_id
-        _SESSION().add(ug)
-    _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/013_version_1_7_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,40 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-
-
-    #==========================================================================
-    # UserGroup
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_7_0 import UserGroup
-    tbl = UserGroup.__table__
-    user_id = tbl.columns.user_id
-    user_id.alter(nullable=False)
-
-    #==========================================================================
-    # RepoGroup
-    #==========================================================================
-    from kallithea.lib.dbmigrate.schema.db_1_7_0 import RepoGroup
-    tbl = RepoGroup.__table__
-    user_id = tbl.columns.user_id
-    user_id.alter(nullable=False)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
--- a/kallithea/lib/dbmigrate/versions/014_version_1_7_1.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-import logging
-import datetime
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_1_7_0
-
-    #==========================================================================
-    # Gist
-    #==========================================================================
-    tbl = db_1_7_0.Gist.__table__
-    user_id = tbl.columns.gist_expires
-    user_id.alter(type=Float(53))
-
-    # issue fixups
-    fixups(db_1_7_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    # fix nullable columns on last_update
-    for r in models.Repository().get_all():
-        if r.updated_on is None:
-            r.updated_on = datetime.datetime.fromtimestamp(0)
-            _SESSION().add(r)
-    _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/015_version_1_8_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,76 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_1_8_0
-    tbl = db_1_8_0.Setting.__table__
-    app_settings_type = Column("app_settings_type",
-                               String(255, convert_unicode=False, assert_unicode=None),
-                               nullable=True, unique=None, default=None)
-    app_settings_type.create(table=tbl)
-
-    # issue fixups
-    fixups(db_1_8_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Fixing default options now...')
-
-    settings = [
-        #general
-        ('realm', '', 'unicode'),
-        ('title', '', 'unicode'),
-        ('ga_code', '', 'unicode'),
-        ('show_public_icon', False, 'bool'),
-        ('show_private_icon', True, 'bool'),
-        ('stylify_metatags', True, 'bool'),
-
-        # defaults
-        ('default_repo_enable_locking',  False, 'bool'),
-        ('default_repo_enable_downloads', False, 'bool'),
-        ('default_repo_enable_statistics', False, 'bool'),
-        ('default_repo_private', False, 'bool'),
-        ('default_repo_type', 'hg', 'unicode'),
-
-        #other
-        ('dashboard_items', 100, 'int'),
-        ('show_version', True, 'bool')
-    ]
-
-    for name, default, type_ in settings:
-        setting = models.Setting.get_by_name(name)
-        if not setting:
-            # if we don't have this option create it
-            setting = models.Setting(name, default, type_)
-
-        # fix certain key to new defaults
-        if name in ['title', 'show_public_icon']:
-            # change title if it's only the default
-            if name == 'title' and setting.app_settings_value == 'Kallithea':
-                setting.app_settings_value = default
-            else:
-                setting.app_settings_value = default
-
-        setting._app_settings_type = type_
-        _SESSION().add(setting)
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/016_version_2_0_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,65 +0,0 @@
-import logging
-import datetime
-
-from sqlalchemy import *
-
-from kallithea import EXTERN_TYPE_INTERNAL
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_0_0
-    tbl = db_2_0_0.User.__table__
-
-    extern_type = Column("extern_type",
-                         String(255, convert_unicode=False, assert_unicode=None),
-                         nullable=True, unique=None, default=None)
-    extern_type.create(table=tbl)
-
-    extern_name = Column("extern_name", String(255, convert_unicode=False, assert_unicode=None),
-                         nullable=True, unique=None, default=None)
-    extern_name.create(table=tbl)
-
-    created_on = Column('created_on', DateTime(timezone=False),
-                        nullable=True, default=datetime.datetime.now)
-    created_on.create(table=tbl)
-
-    # issue fixups
-    fixups(db_2_0_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Fixing default created on')
-
-    for usr in models.User.get_all():
-        usr.created_on = datetime.datetime.now()
-        _SESSION().add(usr)
-        _SESSION().commit()
-
-    notify('Migrating LDAP attribute to extern')
-    for usr in models.User.get_all():
-        ldap_dn = usr.ldap_dn
-        if ldap_dn:
-            usr.extern_name = ldap_dn
-            usr.extern_type = 'ldap'
-        else:
-            usr.extern_name = EXTERN_TYPE_INTERNAL
-            usr.extern_type = EXTERN_TYPE_INTERNAL
-        _SESSION().add(usr)
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/017_version_2_0_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,48 +0,0 @@
-import logging
-import datetime
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_0_0
-    tbl = db_2_0_0.UserGroup.__table__
-
-    user_group_description = Column("user_group_description",
-                                    String(10000, convert_unicode=False,
-                                           assert_unicode=None), nullable=True,
-                                    unique=None, default=None)
-    user_group_description.create(table=tbl)
-
-    created_on = Column('created_on', DateTime(timezone=False),
-                        nullable=True, default=datetime.datetime.now)
-    created_on.create(table=tbl)
-
-    # issue fixups
-    fixups(db_2_0_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Fixing default created on')
-
-    for gr in models.UserGroup.get_all():
-        gr.created_on = datetime.datetime.now()
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/018_version_2_0_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,74 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-from kallithea.lib.utils2 import str2bool
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_0_0
-
-    # issue fixups
-    fixups(db_2_0_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Fixing default auth modules')
-    plugins = 'kallithea.lib.auth_modules.auth_internal'
-    opts = []
-    ldap_enabled = str2bool(getattr(
-        models.Setting.get_by_name('ldap_active'),
-        'app_settings_value', False))
-    if ldap_enabled:
-        plugins += ',kallithea.lib.auth_modules.auth_ldap'
-        opts.append(('auth_ldap_enabled', 'True', 'bool'))
-
-    opts.append(('auth_plugins', plugins, 'list'),)
-    opts.append(('auth_internal_enabled', 'True', 'bool'))
-
-    for name, default, type_ in opts:
-        setting = models.Setting.get_by_name(name)
-        if not setting:
-            # if we don't have this option create it
-            setting = models.Setting(name, default, type_)
-
-        _SESSION().add(setting)
-        _SESSION().commit()
-
-    #copy over the LDAP settings
-    old_ldap = [('ldap_active', 'false', 'bool'), ('ldap_host', '', 'unicode'),
-                ('ldap_port', '389', 'int'), ('ldap_tls_kind', 'PLAIN', 'unicode'),
-                ('ldap_tls_reqcert', '', 'unicode'), ('ldap_dn_user', '', 'unicode'),
-                ('ldap_dn_pass', '', 'unicode'), ('ldap_base_dn', '', 'unicode'),
-                ('ldap_filter', '', 'unicode'), ('ldap_search_scope', '', 'unicode'),
-                ('ldap_attr_login', '', 'unicode'), ('ldap_attr_firstname', '', 'unicode'),
-                ('ldap_attr_lastname', '', 'unicode'), ('ldap_attr_email', '', 'unicode')]
-    for k, v, t in old_ldap:
-        old_setting = models.Setting.get_by_name(k)
-        name = 'auth_%s' % k
-        setting = models.Setting.get_by_name(name)
-        if setting is None:
-            # if we don't have this option create it
-            if old_setting is not None:
-                v = old_setting.app_settings_value
-            setting = models.Setting(name, v, t)
-
-        _SESSION().add(setting)
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/019_version_2_0_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_0_0
-    tbl = db_2_0_0.Setting.__table__
-    settings_value = tbl.columns.app_settings_value
-    settings_value.alter(type=String(4096))
-
-    # issue fixups
-    fixups(db_2_0_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    return
--- a/kallithea/lib/dbmigrate/versions/020_version_2_0_1.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea import EXTERN_TYPE_INTERNAL
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_0_1
-
-    # issue fixups
-    fixups(db_2_0_1, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    #fix all empty extern type users to default 'internal'
-    for usr in models.User.query().all():
-        if not usr.extern_name:
-            usr.extern_name = EXTERN_TYPE_INTERNAL
-            usr.extern_type = EXTERN_TYPE_INTERNAL
-            _SESSION().add(usr)
-            _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/021_version_2_0_2.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,70 +0,0 @@
-import os
-import logging
-import datetime
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_0_1
-    tbl = db_2_0_1.RepoGroup.__table__
-
-    created_on = Column('created_on', DateTime(timezone=False), nullable=True,
-                        default=datetime.datetime.now)
-    created_on.create(table=tbl)
-
-    #fix null values on certain columns when upgrading from older releases
-    tbl = db_2_0_1.UserLog.__table__
-    col = tbl.columns.user_id
-    col.alter(nullable=True)
-
-    tbl = db_2_0_1.UserFollowing.__table__
-    col = tbl.columns.follows_repository_id
-    col.alter(nullable=True)
-
-    tbl = db_2_0_1.UserFollowing.__table__
-    col = tbl.columns.follows_user_id
-    col.alter(nullable=True)
-
-    # issue fixups
-    fixups(db_2_0_1, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Fixing default created on for repo groups')
-
-    for gr in models.RepoGroup.get_all():
-        gr.created_on = datetime.datetime.now()
-        _SESSION().add(gr)
-        _SESSION().commit()
-
-    repo_store_path = models.Ui.get_repos_location()
-    _store = os.path.join(repo_store_path, '.cache', 'largefiles')
-    notify('Setting largefiles usercache')
-    print _store
-
-    if not models.Ui.get_by_key('usercache'):
-        largefiles = models.Ui()
-        largefiles.ui_section = 'largefiles'
-        largefiles.ui_key = 'usercache'
-        largefiles.ui_value = _store
-        _SESSION().add(largefiles)
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/022_version_2_0_2.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,63 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_0_2
-
-    # issue fixups
-    fixups(db_2_0_2, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('fixing new schema for landing_rev')
-
-    for repo in models.Repository.get_all():
-        print u'repo %s old landing rev is: %s' % (repo, repo.landing_rev)
-        _rev = repo.landing_rev[1]
-        _rev_type = 'rev'  # default
-
-        if _rev in ['default', 'master']:
-            _rev_type = 'branch'
-        elif _rev in ['tip']:
-            _rev_type = 'rev'
-        else:
-            try:
-                scm = repo.scm_instance
-                if scm:
-                    known_branches = scm.branches.keys()
-                    known_bookmarks = scm.bookmarks.keys()
-                    if _rev in known_branches:
-                        _rev_type = 'branch'
-                    elif _rev in known_bookmarks:
-                        _rev_type = 'book'
-            except Exception as e:
-                print e
-                print 'continue...'
-                #we don't want any error to break the process
-                pass
-
-        _new_landing_rev = '%s:%s' % (_rev_type, _rev)
-        print u'setting to %s' % _new_landing_rev
-        repo.landing_rev = _new_landing_rev
-        _SESSION().add(repo)
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/023_version_2_1_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_1_0
-
-    tbl = db_2_1_0.UserApiKeys.__table__
-    tbl.create()
-
-    # issue fixups
-    fixups(db_2_1_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    pass
--- a/kallithea/lib/dbmigrate/versions/024_version_2_1_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,61 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-from kallithea.lib.utils2 import str2bool
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_1_0
-
-    # issue fixups
-    fixups(db_2_1_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    from pylons import config
-
-    notify('migrating options from .ini file')
-    use_gravatar = str2bool(config.get('use_gravatar'))
-    print('Setting gravatar use to: %s' % use_gravatar)
-    sett = models.Setting.create_or_update('use_gravatar',
-                                                    use_gravatar, 'bool')
-    _SESSION().add(sett)
-    _SESSION.commit()
-    #set the new format of gravatar URL
-    gravatar_url = models.User.DEFAULT_GRAVATAR_URL
-    if config.get('alternative_gravatar_url'):
-        gravatar_url = config.get('alternative_gravatar_url')
-
-    print('Setting gravatar url to:%s' % gravatar_url)
-    sett = models.Setting.create_or_update('gravatar_url',
-                                                    gravatar_url, 'unicode')
-    _SESSION().add(sett)
-    _SESSION.commit()
-
-    #now create new changed value of clone_url
-    clone_uri_tmpl = models.Repository.DEFAULT_CLONE_URI
-    print('settings new clone url template to %s' % clone_uri_tmpl)
-
-    sett = models.Setting.create_or_update('clone_uri_tmpl',
-                                                    clone_uri_tmpl, 'unicode')
-    _SESSION().add(sett)
-    _SESSION.commit()
--- a/kallithea/lib/dbmigrate/versions/025_version_2_1_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,36 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_1_0
-
-    # issue fixups
-    fixups(db_2_1_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Creating upgrade URL')
-    sett = models.Setting.create_or_update('update_url',
-                            models.Setting.DEFAULT_UPDATE_URL, 'unicode')
-    _SESSION().add(sett)
-    _SESSION.commit()
--- a/kallithea/lib/dbmigrate/versions/026_version_2_2_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_2_0
-
-    tbl = db_2_2_0.User.__table__
-
-    user_data = Column("user_data", LargeBinary(), nullable=True)  # JSON data
-    user_data.create(table=tbl)
-
-    # issue fixups
-    fixups(db_2_2_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    pass
--- a/kallithea/lib/dbmigrate/versions/027_version_2_2_0.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,52 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_2_0
-
-    # issue fixups
-    fixups(db_2_2_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    # ** create default permissions ** #
-    #=====================================
-    for p in models.Permission.PERMS:
-        if not models.Permission.get_by_key(p[0]):
-            new_perm = models.Permission()
-            new_perm.permission_name = p[0]
-            new_perm.permission_longname = p[0]  #translation err with p[1]
-            print 'Creating new permission %s' % p[0]
-            _SESSION().add(new_perm)
-
-    _SESSION().commit()
-
-    # ** set default create_on_write to active
-    user = models.User.get_default_user()
-    _def = 'hg.create.write_on_repogroup.true'
-    new = models.UserToPerm()
-    new.user = user
-    new.permission = models.Permission.get_by_key(_def)
-    print 'Setting default to %s' % _def
-    _SESSION().add(new)
-    _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/028_version_2_2_3.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_2_0
-
-    tbl = db_2_2_0.UserGroup.__table__
-
-    user_data = Column("group_data", LargeBinary(), nullable=True)  # JSON data
-    user_data.create(table=tbl)
-
-    # issue fixups
-    fixups(db_2_2_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    pass
--- a/kallithea/lib/dbmigrate/versions/029_version_2_2_3.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_2_0
-
-    # issue fixups
-    fixups(db_2_2_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Adding grid items options now...')
-
-    settings = [
-        ('admin_grid_items', 25, 'int'),  # old hardcoded value was 25
-    ]
-
-    for name, default, type_ in settings:
-        setting = models.Setting.get_by_name(name)
-        if not setting:
-            # if we don't have this option create it
-            setting = models.Setting(name, default, type_)
-        setting._app_settings_type = type_
-        _SESSION().add(setting)
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/030_version_2_2_3.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_2_0
-
-    tbl = db_2_2_0.Repository.__table__
-
-    repo_state = Column("repo_state", String(255), nullable=True)
-    repo_state.create(table=tbl)
-
-    # issue fixups
-    fixups(db_2_2_0, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    pass
--- a/kallithea/lib/dbmigrate/versions/031_version_2_2_3.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,38 +0,0 @@
-import logging
-
-from sqlalchemy import *
-
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-from kallithea.lib.dbmigrate.versions import _reset_base, notify
-
-log = logging.getLogger(__name__)
-
-
-def upgrade(migrate_engine):
-    """
-    Upgrade operations go here.
-    Don't create your own engine; bind migrate_engine to your metadata
-    """
-    _reset_base(migrate_engine)
-    from kallithea.lib.dbmigrate.schema import db_2_2_3
-
-    # issue fixups
-    fixups(db_2_2_3, meta.Session)
-
-
-def downgrade(migrate_engine):
-    meta = MetaData()
-    meta.bind = migrate_engine
-
-
-def fixups(models, _SESSION):
-    notify('Creating repository states')
-    for repo in models.Repository.get_all():
-        _state = models.Repository.STATE_CREATED
-        print 'setting repo %s state to "%s"' % (repo, _state)
-        repo.repo_state = _state
-        _SESSION().add(repo)
-        _SESSION().commit()
--- a/kallithea/lib/dbmigrate/versions/__init__.py	Mon Jul 18 14:08:43 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-kallithea.lib.dbmigrate.versions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Package containing new versions of database models
-
-This file was forked by the Kallithea project in July 2014.
-Original author and date, and relevant copyright and licensing information is below:
-:created_on: Dec 11, 2010
-:author: marcink
-:copyright: (c) 2013 RhodeCode GmbH, and others.
-:license: GPLv3, see LICENSE.md for more details.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.ext.declarative import declarative_base
-from kallithea.lib.dbmigrate.migrate import *
-from kallithea.lib.dbmigrate.migrate.changeset import *
-
-from kallithea.model import meta
-
-
-def notify(msg, caps=True):
-    """
-    Notification for migrations messages
-    """
-    ml = len(msg) + (4 * 2)
-    formatted_msg = ('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml))
-    if caps:
-        formatted_msg = formatted_msg.upper()
-    print(formatted_msg)
-
-
-def _reset_base(migrate_engine):
-    ## RESET COMPLETELY THE metadata for sqlalchemy to use previous declared Base
-    Base = declarative_base()
-    Base.metadata.clear()
-    Base.metadata = MetaData()
-    Base.metadata.bind = migrate_engine
-
-    # new session and base
-    #meta.Session = scoped_session(sessionmaker(expire_on_commit=True,))
-    #meta.Session.configure(bind=migrate_engine)
-    meta.Base = Base
-
-    notify('SQLA BASE RESET !')