# HG changeset patch # User Mads Kiilerich # Date 1604097858 -3600 # Node ID 3669e58f3002325ce06aac73d945fc067fafc8a4 # Parent 6bde1c0a04d4be70345ea14d63475b936d558043# Parent c387989f868f2d7712de7c4de351969c502cd1fd Merge stable diff -r c387989f868f -r 3669e58f3002 .coveragerc --- a/.coveragerc Wed Oct 28 14:58:18 2020 +0100 +++ b/.coveragerc Fri Oct 30 23:44:18 2020 +0100 @@ -8,9 +8,6 @@ kallithea/lib/dbmigrate/* # the tests themselves should not be part of the coverage report kallithea/tests/* - # the scm hooks are not run in the kallithea process - kallithea/config/post_receive_tmpl.py - kallithea/config/pre_receive_tmpl.py # same omit lines should be present in sections 'run' and 'report' [report] @@ -23,9 +20,6 @@ kallithea/lib/dbmigrate/* # the tests themselves should not be part of the coverage report kallithea/tests/* - # the scm hooks are not run in the kallithea process - kallithea/config/post_receive_tmpl.py - kallithea/config/pre_receive_tmpl.py [paths] source = diff -r c387989f868f -r 3669e58f3002 .hgignore --- a/.hgignore Wed Oct 28 14:58:18 2020 +0100 +++ b/.hgignore Fri Oct 30 23:44:18 2020 +0100 @@ -10,7 +10,6 @@ *.rej *.bak .eggs/ -tarballcache/ syntax: regexp ^rcextensions @@ -52,4 +51,5 @@ ^\.idea$ ^\.cache$ ^\.pytest_cache$ +^venv$ /__pycache__$ diff -r c387989f868f -r 3669e58f3002 MANIFEST.in --- a/MANIFEST.in Wed Oct 28 14:58:18 2020 +0100 +++ b/MANIFEST.in Fri Oct 30 23:44:18 2020 +0100 @@ -18,7 +18,6 @@ recursive-include init.d * recursive-include kallithea/alembic * include kallithea/bin/ldap_sync.conf -include kallithea/lib/paster_commands/template.ini.mako recursive-include kallithea/front-end * recursive-include kallithea/i18n * recursive-include kallithea/public * diff -r c387989f868f -r 3669e58f3002 README.rst --- a/README.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/README.rst Fri Oct 30 23:44:18 2020 +0100 @@ -74,8 +74,8 @@ web interface using simple editor or upload binary files using simple form. - Powerful pull request driven review system with inline commenting, changeset statuses, and notification system. -- Importing and syncing repositories from remote locations for Git_, Mercurial_ - and Subversion. +- Importing and syncing repositories from remote locations for Git_ and + Mercurial_. - Mako templates let you customize the look and feel of the application. - Beautiful diffs, annotations and source code browsing all colored by pygments. Raw diffs are made in Git-diff format for both VCS systems, @@ -175,7 +175,6 @@ .. _Mercurial: http://mercurial.selenic.com/ .. _Bitbucket: http://bitbucket.org/ .. _GitHub: http://github.com/ -.. _Subversion: http://subversion.tigris.org/ .. _Git: http://git-scm.com/ .. _Celery: http://celeryproject.org/ .. _Software Freedom Conservancy: http://sfconservancy.org/ diff -r c387989f868f -r 3669e58f3002 dev_requirements.txt --- a/dev_requirements.txt Wed Oct 28 14:58:18 2020 +0100 +++ b/dev_requirements.txt Fri Oct 30 23:44:18 2020 +0100 @@ -1,9 +1,9 @@ -pytest >= 4.6.6, < 5.4 +pytest >= 4.6.6, < 5.5 pytest-sugar >= 0.9.2, < 0.10 pytest-benchmark >= 3.2.2, < 3.3 pytest-localserver >= 0.5.0, < 0.6 mock >= 3.0.0, < 4.1 -Sphinx >= 1.8.0, < 2.4 +Sphinx >= 1.8.0, < 3.1 WebTest >= 2.0.6, < 2.1 -isort == 4.3.21 -pyflakes == 2.1.1 +isort == 5.1.2 +pyflakes == 2.2.0 diff -r c387989f868f -r 3669e58f3002 development.ini --- a/development.ini Wed Oct 28 14:58:18 2020 +0100 +++ b/development.ini Fri Oct 30 23:44:18 2020 +0100 @@ -67,11 +67,11 @@ host = 0.0.0.0 port = 5000 -## WAITRESS ## +## Gearbox serve uses the Waitress web server ## use = egg:waitress#main -## number of worker threads +## avoid multi threading threads = 1 -## MAX BODY SIZE 100GB +## allow push of repos bigger than the default of 1 GB max_request_body_size = 107374182400 ## use poll instead of select, fixes fd limits, may not work on old ## windows systems. @@ -102,7 +102,7 @@ index_dir = %(here)s/data/index ## uncomment and set this path to use archive download cache -archive_cache_dir = %(here)s/tarballcache +archive_cache_dir = %(here)s/data/tarballcache ## change this to unique ID for security #app_instance_uuid = VERY-SECRET @@ -346,7 +346,6 @@ get trace_errors.smtp_password = smtp_password get trace_errors.smtp_use_tls = smtp_use_tls - ################################## ## LOGVIEW CONFIG ## ################################## @@ -359,10 +358,10 @@ ## DB CONFIG ## ######################### -## SQLITE [default] sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60 - -## see sqlalchemy docs for other backends +#sqlalchemy.url = postgresql://kallithea:password@localhost/kallithea +#sqlalchemy.url = mysql://kallithea:password@localhost/kallithea?charset=utf8mb4 +## Note: the mysql:// prefix should also be used for MariaDB sqlalchemy.pool_recycle = 3600 diff -r c387989f868f -r 3669e58f3002 docs/conf.py --- a/docs/conf.py Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/conf.py Fri Oct 30 23:44:18 2020 +0100 @@ -14,7 +14,7 @@ import os import sys -from kallithea import __version__ +import kallithea # If extensions (or modules to document with autodoc) are in another directory, @@ -56,9 +56,9 @@ # The short X.Y version. root = os.path.dirname(os.path.dirname(__file__)) sys.path.append(root) -version = __version__ +version = kallithea.__version__ # The full version, including alpha/beta/rc tags. -release = __version__ +release = kallithea.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff -r c387989f868f -r 3669e58f3002 docs/contributing.rst --- a/docs/contributing.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/contributing.rst Fri Oct 30 23:44:18 2020 +0100 @@ -26,12 +26,13 @@ Getting started --------------- -To get started with Kallithea development:: +To get started with Kallithea development run the following commands in your +bash shell:: hg clone https://kallithea-scm.org/repos/kallithea cd kallithea - python3 -m venv ../kallithea-venv - source ../kallithea-venv/bin/activate + python3 -m venv venv + . venv/bin/activate pip install --upgrade pip setuptools pip install --upgrade -e . -r dev_requirements.txt python-ldap python-pam kallithea-cli config-create my.ini @@ -84,6 +85,17 @@ and the test suite creates repositories in the temporary directory. Linux systems with /tmp mounted noexec will thus fail. +Tests can be run on PostgreSQL like:: + + sudo -u postgres createuser 'kallithea-test' --pwprompt # password password + sudo -u postgres createdb 'kallithea-test' --owner 'kallithea-test' + REUSE_TEST_DB='postgresql://kallithea-test:password@localhost/kallithea-test' py.test + +Tests can be run on MariaDB/MySQL like:: + + echo "GRANT ALL PRIVILEGES ON \`kallithea-test\`.* TO 'kallithea-test'@'localhost' IDENTIFIED BY 'password'" | sudo -u mysql mysql + TEST_DB='mysql://kallithea-test:password@localhost/kallithea-test?charset=utf8mb4' py.test + You can also use ``tox`` to run the tests with all supported Python versions. When running tests, Kallithea generates a `test.ini` based on template values @@ -147,8 +159,9 @@ lot about preservation of copyright and license information for existing code that is brought into the project. -Contributions will be accepted in most formats -- such as commits hosted on your own Kallithea instance, or patches sent by -email to the `kallithea-general`_ mailing list. +Contributions will be accepted in most formats -- such as commits hosted on your +own Kallithea instance, or patches sent by email to the `kallithea-general`_ +mailing list. Make sure to test your changes both manually and with the automatic tests before posting. diff -r c387989f868f -r 3669e58f3002 docs/index.rst --- a/docs/index.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/index.rst Fri Oct 30 23:44:18 2020 +0100 @@ -81,7 +81,6 @@ .. _python: http://www.python.org/ .. _django: http://www.djangoproject.com/ .. _mercurial: https://www.mercurial-scm.org/ -.. _subversion: http://subversion.tigris.org/ .. _git: http://git-scm.com/ .. _celery: http://celeryproject.org/ .. _Sphinx: http://sphinx.pocoo.org/ diff -r c387989f868f -r 3669e58f3002 docs/installation.rst --- a/docs/installation.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/installation.rst Fri Oct 30 23:44:18 2020 +0100 @@ -19,12 +19,12 @@ installations side by side or remove it entirely by just removing the virtualenv directory) and does not require root privileges. -- :ref:`installation-without-virtualenv`: The alternative method of installing - a Kallithea release is using standard pip. The package will be installed in - the same location as all other Python packages you have ever installed. As a - result, removing it is not as straightforward as with a virtualenv, as you'd - have to remove its dependencies manually and make sure that they are not - needed by other packages. +- Kallithea can also be installed with plain pip - globally or with ``--user`` + or similar. The package will be installed in the same location as all other + Python packages you have ever installed. As a result, removing it is not as + straightforward as with a virtualenv, as you'd have to remove its + dependencies manually and make sure that they are not needed by other + packages. We recommend using virtualenv. Regardless of the installation method you may need to make sure you have appropriate development packages installed, as installation of some of the @@ -49,17 +49,24 @@ ----------------------------------- To install Kallithea in a virtualenv using the stable branch of the development -repository, follow the instructions below:: +repository, use the following commands in your bash shell:: hg clone https://kallithea-scm.org/repos/kallithea -u stable cd kallithea - python3 -m venv ../kallithea-venv - . ../kallithea-venv/bin/activate + python3 -m venv venv + . venv/bin/activate pip install --upgrade pip setuptools pip install --upgrade -e . python3 setup.py compile_catalog # for translation of the UI -You can now proceed to :ref:`setup`. +.. note:: + This will install all Python dependencies into the virtualenv. Kallithea + itself will however only be installed as a pointer to the source location. + The source clone must thus be kept in the same location, and it shouldn't be + updated to other revisions unless you want to upgrade. Edits in the source + tree will have immediate impact (possibly after a restart of the service). + +You can now proceed to :ref:`install-front-end`. .. _installation-virtualenv: @@ -73,27 +80,30 @@ problematic when upgrading the system or Kallithea. An additional benefit of virtualenv is that it doesn't require root privileges. -- Assuming you have installed virtualenv, create a new virtual environment - for example, in `/srv/kallithea/venv`, using the venv command:: +- Don't install as root - install as a dedicated user like ``kallithea``. + If necessary, create the top directory for the virtualenv (like + ``/srv/kallithea/venv``) as root and assign ownership to the user. + + Make a parent folder for the virtualenv (and perhaps also Kallithea + configuration and data files) such as ``/srv/kallithea``. Create the + directory as root if necessary and grant ownership to the ``kallithea`` user. + +- Create a new virtual environment, for example in ``/srv/kallithea/venv``, + specifying the right Python binary:: python3 -m venv /srv/kallithea/venv - Activate the virtualenv in your current shell session and make sure the - basic requirements are up-to-date by running:: + basic requirements are up-to-date by running the following commands in your + bash shell:: . /srv/kallithea/venv/bin/activate pip install --upgrade pip setuptools -.. note:: You can't use UNIX ``sudo`` to source the ``virtualenv`` script; it - will "activate" a shell that terminates immediately. It is also perfectly - acceptable (and desirable) to create a virtualenv as a normal user. +.. note:: You can't use UNIX ``sudo`` to source the ``activate`` script; it + will "activate" a shell that terminates immediately. -- Make a folder for Kallithea data files, and configuration somewhere on the - filesystem. For example:: - - mkdir /srv/kallithea - -- Go into the created directory and run this command to install Kallithea:: +- Install Kallithea in the activated virtualenv:: pip install --upgrade kallithea @@ -105,31 +115,28 @@ This might require installation of development packages using your distribution's package manager. - Alternatively, download a .tar.gz from http://pypi.python.org/pypi/Kallithea, - extract it and install from source by running:: + Alternatively, download a .tar.gz from http://pypi.python.org/pypi/Kallithea, + extract it and install from source by running:: - pip install --upgrade . + pip install --upgrade . - This will install Kallithea together with all other required Python libraries into the activated virtualenv. -You can now proceed to :ref:`setup`. - -.. _installation-without-virtualenv: +You can now proceed to :ref:`install-front-end`. -Installing a released version without virtualenv ------------------------------------------------- - -For installation without virtualenv, 'just' use:: - - pip install kallithea +Prepare front-end files +----------------------- -Note that this method requires root privileges and will install packages -globally without using the system's package manager. +Finally, the front-end files with CSS and JavaScript must be prepared. This +depends on having some commands available in the shell search path: ``npm`` +version 6 or later, and ``node.js`` (version 12 or later) available as +``node``. The installation method for these dependencies varies between +operating systems and distributions. -To install as a regular user in ``~/.local``, you can use:: +Prepare the front-end by running:: - pip install --user kallithea + kallithea-cli front-end-build You can now proceed to :ref:`setup`. diff -r c387989f868f -r 3669e58f3002 docs/overview.rst --- a/docs/overview.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/overview.rst Fri Oct 30 23:44:18 2020 +0100 @@ -20,23 +20,27 @@ 2. **Install Kallithea software.** This makes the ``kallithea-cli`` command line tool available. -3. **Create low level configuration file.** +3. **Prepare front-end files** + Some front-end files must be fetched or created using ``npm`` and ``node`` + tooling so they can be served to the client as static files. + +4. **Create low level configuration file.** Use ``kallithea-cli config-create`` to create a ``.ini`` file with database connection info, mail server information, configuration for the specified web server, etc. -4. **Populate the database.** +5. **Populate the database.** Use ``kallithea-cli db-create`` with the ``.ini`` file to create the database schema and insert the most basic information: the location of the repository store and an initial local admin user. -5. **Configure the web server.** +6. **Configure the web server.** The web server must invoke the WSGI entrypoint for the Kallithea software using the ``.ini`` file (and thus the database). This makes the web application available so the local admin user can log in and tweak the configuration further. -6. **Configure users.** +7. **Configure users.** The initial admin user can create additional local users, or configure how users can be created and authenticated from other user directories. @@ -44,6 +48,45 @@ :ref:`setup` for details on these steps. +File system location +-------------------- + +Kallithea can be installed in many different ways. The main parts are: + +- A location for the Kallithea software and its dependencies. This includes + the Python code, template files, and front-end code. After installation, this + will be read-only (except when upgrading). + +- A location for the ``.ini`` configuration file that tells the Kallithea + instance which database to use (and thus also the repository location). + After installation, this will be read-only (except when upgrading). + +- A location for various data files and caches for the Kallithea instance. This + is by default in a ``data`` directory next to the ``.ini`` file. This will + have to be writable by the running Kallithea service. + +- A database. The ``.ini`` file specifies which database to use. The database + will be a separate service and live elsewhere in the filesystem if using + PostgreSQL or MariaDB/MySQL. If using SQLite, it will by default live next to + the ``.ini`` file, as ``kallithea.db``. + +- A location for the repositories that are hosted by this Kallithea instance. + This will have to be writable by the running Kallithea service. The path to + this location will be configured in the database. + +For production setups, one recommendation is to use ``/srv/kallithea`` for the +``.ini`` and ``data``, place the virtualenv in ``venv``, and use a Kallithea +clone in ``kallithea``. Create a ``kallithea`` user, let it own +``/srv/kallithea``, and run as that user when installing. + +For simple setups, it is fine to just use something like a ``kallithea`` user +with home in ``/home/kallithea`` and place everything there. + +For experiments, it might be convenient to run everything as yourself and work +inside a clone of Kallithea, with the ``.ini`` and SQLite database in the root +of the clone, and a virtualenv in ``venv``. + + Python environment ------------------ @@ -177,7 +220,7 @@ to get a configuration starting point for your choice of web server. (Gearbox will do like ``paste`` and use the WSGI application entry point - ``kallithea.config.middleware:make_app`` as specified in ``setup.py``.) + ``kallithea.config.application:make_app`` as specified in ``setup.py``.) - `Apache httpd`_ can serve WSGI applications directly using mod_wsgi_ and a simple Python file with the necessary configuration. This is a good option if @@ -216,13 +259,13 @@ .. _Python: http://www.python.org/ .. _Gunicorn: http://gunicorn.org/ .. _Gevent: http://www.gevent.org/ -.. _Waitress: http://waitress.readthedocs.org/en/latest/ -.. _Gearbox: http://turbogears.readthedocs.io/en/latest/turbogears/gearbox.html +.. _Waitress: https://docs.pylonsproject.org/projects/waitress/ +.. _Gearbox: https://turbogears.readthedocs.io/en/latest/turbogears/gearbox.html .. _PyPI: https://pypi.python.org/pypi .. _Apache httpd: http://httpd.apache.org/ -.. _mod_wsgi: https://code.google.com/p/modwsgi/ +.. _mod_wsgi: https://modwsgi.readthedocs.io/ .. _isapi-wsgi: https://github.com/hexdump42/isapi-wsgi -.. _uWSGI: https://uwsgi-docs.readthedocs.org/en/latest/ +.. _uWSGI: https://uwsgi-docs.readthedocs.io/ .. _nginx: http://nginx.org/en/ .. _iis: http://en.wikipedia.org/wiki/Internet_Information_Services .. _pip: http://en.wikipedia.org/wiki/Pip_%28package_manager%29 diff -r c387989f868f -r 3669e58f3002 docs/setup.rst --- a/docs/setup.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/setup.rst Fri Oct 30 23:44:18 2020 +0100 @@ -5,35 +5,72 @@ ===== -Setting up Kallithea --------------------- +Setting up a Kallithea instance +------------------------------- + +Some further details to the steps mentioned in the overview. -First, you will need to create a Kallithea configuration file. Run the -following command to do so:: +Create low level configuration file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +First, you will need to create a Kallithea configuration file. The +configuration file is a ``.ini`` file that contains various low level settings +for Kallithea, e.g. configuration of how to use database, web server, email, +and logging. - kallithea-cli config-create my.ini +Change to the desired directory (such as ``/srv/kallithea``) as the right user +and run the following command to create the file ``my.ini`` in the current +directory:: + + kallithea-cli config-create my.ini http_server=waitress -This will create the file ``my.ini`` in the current directory. This -configuration file contains the various settings for Kallithea, e.g. -proxy port, email settings, usage of static files, cache, Celery -settings, and logging. Extra settings can be specified like:: +To get a good starting point for your configuration, specify the http server +you intend to use. It can be ``waitress``, ``gearbox``, ``gevent``, +``gunicorn``, or ``uwsgi``. (Apache ``mod_wsgi`` will not use this +configuration file, and it is fine to keep the default http_server configuration +unused. ``mod_wsgi`` is configured using ``httpd.conf`` directives and a WSGI +wrapper script.) + +Extra custom settings can be specified like:: kallithea-cli config-create my.ini host=8.8.8.8 "[handler_console]" formatter=color_formatter -Next, you need to create the databases used by Kallithea. It is recommended to -use PostgreSQL or SQLite (default). If you choose a database other than the -default, ensure you properly adjust the database URL in your ``my.ini`` -configuration file to use this other database. Kallithea currently supports -PostgreSQL, SQLite and MariaDB/MySQL databases. Create the database by running -the following command:: +Populate the database +^^^^^^^^^^^^^^^^^^^^^ + +Next, you need to create the databases used by Kallithea. Kallithea currently +supports PostgreSQL, SQLite and MariaDB/MySQL databases. It is recommended to +start out using SQLite (the default) and move to PostgreSQL if it becomes a +bottleneck or to get a "proper" database. MariaDB/MySQL is also supported. + +For PostgreSQL, run ``pip install psycopg2`` to get the database driver. Make +sure the PostgreSQL server is initialized and running. Make sure you have a +database user with password authentication with permissions to create databases +- for example by running:: + + sudo -u postgres createuser 'kallithea' --pwprompt --createdb + +For MariaDB/MySQL, run ``pip install mysqlclient`` to get the ``MySQLdb`` +database driver. Make sure the database server is initialized and running. Make +sure you have a database user with password authentication with permissions to +create the database - for example by running:: + + echo 'CREATE USER "kallithea"@"localhost" IDENTIFIED BY "password"' | sudo -u mysql mysql + echo 'GRANT ALL PRIVILEGES ON `kallithea`.* TO "kallithea"@"localhost"' | sudo -u mysql mysql + +Check and adjust ``sqlalchemy.url`` in your ``my.ini`` configuration file to use +this database. + +Create the database, tables, and initial content by running the following +command:: kallithea-cli db-create -c my.ini -This will prompt you for a "root" path. This "root" path is the location where -Kallithea will store all of its repositories on the current machine. After -entering this "root" path ``db-create`` will also prompt you for a username -and password for the initial admin account which ``db-create`` sets -up for you. +This will first prompt you for a "root" path. This "root" path is the location +where Kallithea will store all of its repositories on the current machine. This +location must be writable for the running Kallithea application. Next, +``db-create`` will prompt you for a username and password for the initial admin +account it sets up for you. The ``db-create`` values can also be given on the command line. Example:: @@ -48,19 +85,20 @@ location to its database. (Note: make sure you specify the correct path to the root). -.. note:: the given path for Mercurial_ repositories **must** be write - accessible for the application. It's very important since - the Kallithea web interface will work without write access, - but when trying to do a push it will fail with permission - denied errors unless it has write access. +.. note:: It is also possible to use an existing database. For example, + when using PostgreSQL without granting general createdb privileges to + the PostgreSQL kallithea user, set ``sqlalchemy.url = + postgresql://kallithea:password@localhost/kallithea`` and create the + database like:: -Finally, the front-end files must be prepared. This requires ``npm`` version 6 -or later, which needs ``node.js`` (version 12 or later). Prepare the front-end -by running:: + sudo -u postgres createdb 'kallithea' --owner 'kallithea' + kallithea-cli db-create -c my.ini --reuse - kallithea-cli front-end-build +Running +^^^^^^^ -You are now ready to use Kallithea. To run it simply execute:: +You are now ready to use Kallithea. To run it using a gearbox web server, +simply execute:: gearbox serve -c my.ini @@ -186,7 +224,7 @@ Kallithea provides full text search of repositories using `Whoosh`__. -.. __: https://whoosh.readthedocs.io/en/latest/ +.. __: https://whoosh.readthedocs.io/ For an incremental index build, run:: @@ -556,43 +594,19 @@ WSGIRestrictEmbedded On -- Create a WSGI dispatch script, like the one below. Make sure you - check that the paths correctly point to where you installed Kallithea - and its Python Virtual Environment. +- Create a WSGI dispatch script, like the one below. The ``WSGIDaemonProcess`` + ``python-home`` directive will make sure it uses the right Python Virtual + Environment and that paste thus can pick up the right Kallithea + application. .. code-block:: python - import os - os.environ['PYTHON_EGG_CACHE'] = '/srv/kallithea/.egg-cache' - - # sometimes it's needed to set the current dir - os.chdir('/srv/kallithea/') - - import site - site.addsitedir("/srv/kallithea/venv/lib/python3.7/site-packages") - ini = '/srv/kallithea/my.ini' from logging.config import fileConfig fileConfig(ini, {'__file__': ini, 'here': '/srv/kallithea'}) from paste.deploy import loadapp application = loadapp('config:' + ini) - Or using proper virtualenv activation: - - .. code-block:: python - - activate_this = '/srv/kallithea/venv/bin/activate_this.py' - execfile(activate_this, dict(__file__=activate_this)) - - import os - os.environ['HOME'] = '/srv/kallithea' - - ini = '/srv/kallithea/kallithea.ini' - from logging.config import fileConfig - fileConfig(ini, {'__file__': ini, 'here': '/srv/kallithea'}) - from paste.deploy import loadapp - application = loadapp('config:' + ini) - - Add the necessary ``WSGI*`` directives to the Apache Virtual Host configuration file, like in the example below. Notice that the WSGI dispatch script created above is referred to with the ``WSGIScriptAlias`` directive. @@ -617,15 +631,6 @@ WSGIScriptAlias / /srv/kallithea/dispatch.wsgi WSGIPassAuthorization On - Or if using a dispatcher WSGI script with proper virtualenv activation: - - .. code-block:: apache - - WSGIDaemonProcess kallithea processes=5 threads=1 maximum-requests=100 lang=en_US.utf8 - WSGIProcessGroup kallithea - WSGIScriptAlias / /srv/kallithea/dispatch.wsgi - WSGIPassAuthorization On - Other configuration files ------------------------- diff -r c387989f868f -r 3669e58f3002 docs/upgrade.rst --- a/docs/upgrade.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/upgrade.rst Fri Oct 30 23:44:18 2020 +0100 @@ -39,8 +39,8 @@ Make a copy of your Kallithea configuration (``.ini``) file. -If you are using :ref:`rcextensions `, you should also -make a copy of the entire ``rcextensions`` directory. +If you are using custom :ref:`extensions `, you should also +make a copy of the ``extensions.py`` file. Back up your database ^^^^^^^^^^^^^^^^^^^^^ diff -r c387989f868f -r 3669e58f3002 docs/usage/customization.rst --- a/docs/usage/customization.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/usage/customization.rst Fri Oct 30 23:44:18 2020 +0100 @@ -39,13 +39,14 @@ .. _less: http://lesscss.org/ -Behavioral customization: rcextensions --------------------------------------- +Behavioral customization: Kallithea extensions +---------------------------------------------- -Some behavioral customization can be done in Python using ``rcextensions``, a -custom Python package that can extend Kallithea functionality. +Some behavioral customization can be done in Python using Kallithea +``extensions``, a custom Python file you can create to extend Kallithea +functionality. -With ``rcextensions`` it's possible to add additional mappings for Whoosh +With ``extensions`` it's possible to add additional mappings for Whoosh indexing and statistics, to add additional code into the push/pull/create/delete repository hooks (for example to send signals to build bots such as Jenkins) and even to monkey-patch certain parts of the Kallithea source code (for example @@ -55,9 +56,14 @@ kallithea-cli extensions-create -c my.ini -This will create an ``rcextensions`` package next to the specified ``ini`` file. -See the ``__init__.py`` file inside the generated ``rcextensions`` package -for more details. +This will create an ``extensions.py`` file next to the specified ``ini`` file. +You can find more details inside this file. + +For compatibility with previous releases of Kallithea, a directory named +``rcextensions`` with a file ``__init__.py`` inside of it can also be used. If +both an ``extensions.py`` file and an ``rcextensions`` directory are found, only +``extensions.py`` will be loaded. Note that the name ``rcextensions`` is +deprecated and support for it will be removed in a future release. Behavioral customization: code changes diff -r c387989f868f -r 3669e58f3002 docs/usage/email.rst --- a/docs/usage/email.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/usage/email.rst Fri Oct 30 23:44:18 2020 +0100 @@ -89,8 +89,8 @@ References ---------- -- `Error Middleware (Pylons documentation) `_ -- `ErrorHandler (Pylons modules documentation) `_ +- `Error Middleware (Pylons documentation) `_ +- `ErrorHandler (Pylons modules documentation) `_ .. _backlash: https://github.com/TurboGears/backlash diff -r c387989f868f -r 3669e58f3002 docs/usage/general.rst --- a/docs/usage/general.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/usage/general.rst Fri Oct 30 23:44:18 2020 +0100 @@ -118,22 +118,15 @@ Trending source files are calculated based on a predefined dictionary of known types and extensions. If an extension is missing or you would like to scan -custom files, it is possible to extend the ``LANGUAGES_EXTENSIONS_MAP`` -dictionary located in ``kallithea/config/conf.py`` with new types. +custom files, it is possible to add additional file extensions with +``EXTRA_MAPPINGS`` in your custom Kallithea extensions.py file. See +:ref:`customization`. Cloning remote repositories --------------------------- Kallithea has the ability to clone repositories from given remote locations. -Currently it supports the following options: - -- hg -> hg clone -- svn -> hg clone -- git -> git clone - -.. note:: svn -> hg cloning requires the ``hgsubversion`` library to be - installed. If you need to clone repositories that are protected via basic authentication, you can pass the credentials in the URL, e.g. diff -r c387989f868f -r 3669e58f3002 docs/usage/performance.rst --- a/docs/usage/performance.rst Wed Oct 28 14:58:18 2020 +0100 +++ b/docs/usage/performance.rst Fri Oct 30 23:44:18 2020 +0100 @@ -48,42 +48,37 @@ Horizontal scaling ------------------ -Scaling horizontally means running several Kallithea instances and let them -share the load. That can give huge performance benefits when dealing with large -amounts of traffic (many users, CI servers, etc.). Kallithea can be scaled -horizontally on one (recommended) or multiple machines. +Scaling horizontally means running several Kallithea instances (also known as +worker processes) and let them share the load. That is essential to serve other +users while processing a long-running request from a user. Usually, the +bottleneck on a Kallithea server is not CPU but I/O speed - especially network +speed. It is thus a good idea to run multiple worker processes on one server. -It is generally possible to run WSGI applications multithreaded, so that -several HTTP requests are served from the same Python process at once. That can -in principle give better utilization of internal caches and less process -overhead. +.. note:: -One danger of running multithreaded is that program execution becomes much more -complex; programs must be written to consider all combinations of events and -problems might depend on timing and be impossible to reproduce. + Kallithea and the embedded Mercurial backend are not thread-safe. Each + worker process must thus be single-threaded. -Kallithea can't promise to be thread-safe, just like the embedded Mercurial -backend doesn't make any strong promises when used as Kallithea uses it. -Instead, we recommend scaling by using multiple server processes. +Web servers can usually launch multiple worker processes - for example ``mod_wsgi`` with the +``WSGIDaemonProcess`` ``processes`` parameter or ``uWSGI`` or ``gunicorn`` with +their ``workers`` setting. -Web servers with multiple worker processes (such as ``mod_wsgi`` with the -``WSGIDaemonProcess`` ``processes`` parameter) will work out of the box. - +Kallithea can also be scaled horizontally across multiple machines. In order to scale horizontally on multiple machines, you need to do the following: - - Each instance's ``data`` storage needs to be configured to be stored on a - shared disk storage, preferably together with repositories. This ``data`` - dir contains template caches, sessions, whoosh index and is used for - task locking (so it is safe across multiple instances). Set the - ``cache_dir``, ``index_dir``, ``beaker.cache.data_dir``, ``beaker.cache.lock_dir`` - variables in each .ini file to a shared location across Kallithea instances - - If using several Celery instances, - the message broker should be common to all of them (e.g., one - shared RabbitMQ server) - - Load balance using round robin or IP hash, recommended is writing LB rules - that will separate regular user traffic from automated processes like CI - servers or build bots. +- Each instance's ``data`` storage needs to be configured to be stored on a + shared disk storage, preferably together with repositories. This ``data`` + dir contains template caches, sessions, whoosh index and is used for + task locking (so it is safe across multiple instances). Set the + ``cache_dir``, ``index_dir``, ``beaker.cache.data_dir``, ``beaker.cache.lock_dir`` + variables in each .ini file to a shared location across Kallithea instances +- If using several Celery instances, + the message broker should be common to all of them (e.g., one + shared RabbitMQ server) +- Load balance using round robin or IP hash, recommended is writing LB rules + that will separate regular user traffic from automated processes like CI + servers or build bots. Serve static files directly from the web server @@ -125,3 +120,6 @@ .. _SQLAlchemyGrate: https://github.com/shazow/sqlalchemygrate +.. _mod_wsgi: https://modwsgi.readthedocs.io/ +.. _uWSGI: https://uwsgi-docs.readthedocs.io/ +.. _gunicorn: http://pypi.python.org/pypi/gunicorn diff -r c387989f868f -r 3669e58f3002 init.d/kallithea-daemon-debian --- a/init.d/kallithea-daemon-debian Wed Oct 28 14:58:18 2020 +0100 +++ b/init.d/kallithea-daemon-debian Fri Oct 30 23:44:18 2020 +0100 @@ -37,7 +37,7 @@ start() { echo "Starting $APP_NAME" - PYTHON_EGG_CACHE="/tmp" start-stop-daemon -d $APP_PATH \ + start-stop-daemon -d $APP_PATH \ --start --quiet \ --pidfile $PID_PATH \ --user $RUN_AS \ diff -r c387989f868f -r 3669e58f3002 init.d/kallithea-daemon-gentoo --- a/init.d/kallithea-daemon-gentoo Wed Oct 28 14:58:18 2020 +0100 +++ b/init.d/kallithea-daemon-gentoo Fri Oct 30 23:44:18 2020 +0100 @@ -33,7 +33,7 @@ start() { ebegin "Starting $APP_NAME" - start-stop-daemon -d $APP_PATH -e PYTHON_EGG_CACHE="/tmp" \ + start-stop-daemon -d $APP_PATH \ --start --quiet \ --pidfile $PID_PATH \ --user $RUN_AS \ diff -r c387989f868f -r 3669e58f3002 init.d/kallithea-daemon-redhat --- a/init.d/kallithea-daemon-redhat Wed Oct 28 14:58:18 2020 +0100 +++ b/init.d/kallithea-daemon-redhat Fri Oct 30 23:44:18 2020 +0100 @@ -63,7 +63,7 @@ start_kallithea () { ensure_pid_dir - PYTHON_EGG_CACHE="/tmp" daemon --pidfile $PID_PATH \ + daemon --pidfile $PID_PATH \ --user $RUN_AS "$DAEMON $DAEMON_OPTS" RETVAL=$? [ $RETVAL -eq 0 ] && touch $LOCK_FILE diff -r c387989f868f -r 3669e58f3002 kallithea/__init__.py --- a/kallithea/__init__.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/__init__.py Fri Oct 30 23:44:18 2020 +0100 @@ -34,7 +34,7 @@ if sys.version_info < (3, 6): raise Exception('Kallithea requires python 3.6 or later') -VERSION = (0, 6, 2) +VERSION = (0, 6, 99) BACKENDS = { 'hg': 'Mercurial repository', 'git': 'Git repository', @@ -45,6 +45,10 @@ CONFIG = {} +# URL prefix for non repository related links - must start with `/` +ADMIN_PREFIX = '/_admin' +URL_SEP = '/' + # Linked module for extensions EXTENSIONS = {} diff -r c387989f868f -r 3669e58f3002 kallithea/alembic/env.py --- a/kallithea/alembic/env.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/alembic/env.py Fri Oct 30 23:44:18 2020 +0100 @@ -21,7 +21,7 @@ from alembic import context from sqlalchemy import engine_from_config, pool -from kallithea.model import db +from kallithea.model import meta # The alembic.config.Config object, which wraps the current .ini file. @@ -93,7 +93,7 @@ # Support autogeneration of migration scripts based on "diff" between # current database schema and kallithea.model.db schema. - target_metadata=db.Base.metadata, + target_metadata=meta.Base.metadata, include_object=include_in_autogeneration, render_as_batch=True, # batch mode is needed for SQLite support ) diff -r c387989f868f -r 3669e58f3002 kallithea/alembic/versions/a020f7044fd6_rename_hooks.py --- a/kallithea/alembic/versions/a020f7044fd6_rename_hooks.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/alembic/versions/a020f7044fd6_rename_hooks.py Fri Oct 30 23:44:18 2020 +0100 @@ -29,7 +29,7 @@ from alembic import op from sqlalchemy import MetaData, Table -from kallithea.model.db import Ui +from kallithea.model import db meta = MetaData() @@ -37,7 +37,7 @@ def upgrade(): meta.bind = op.get_bind() - ui = Table(Ui.__tablename__, meta, autoload=True) + ui = Table(db.Ui.__tablename__, meta, autoload=True) ui.update(values={ 'ui_key': 'prechangegroup.push_lock_handling', @@ -51,7 +51,7 @@ def downgrade(): meta.bind = op.get_bind() - ui = Table(Ui.__tablename__, meta, autoload=True) + ui = Table(db.Ui.__tablename__, meta, autoload=True) ui.update(values={ 'ui_key': 'prechangegroup.pre_push', diff -r c387989f868f -r 3669e58f3002 kallithea/alembic/versions/ad357ccd9521_drop_locking.py --- a/kallithea/alembic/versions/ad357ccd9521_drop_locking.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/alembic/versions/ad357ccd9521_drop_locking.py Fri Oct 30 23:44:18 2020 +0100 @@ -30,7 +30,7 @@ from alembic import op from sqlalchemy import MetaData, Table -from kallithea.model.db import Ui +from kallithea.model import db meta = MetaData() @@ -45,7 +45,7 @@ batch_op.drop_column('enable_locking') meta.bind = op.get_bind() - ui = Table(Ui.__tablename__, meta, autoload=True) + ui = Table(db.Ui.__tablename__, meta, autoload=True) ui.delete().where(ui.c.ui_key == 'prechangegroup.push_lock_handling').execute() ui.delete().where(ui.c.ui_key == 'preoutgoing.pull_lock_handling').execute() diff -r c387989f868f -r 3669e58f3002 kallithea/alembic/versions/f62826179f39_add_unique_constraint_on_.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/alembic/versions/f62826179f39_add_unique_constraint_on_.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,73 @@ +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""add unique constraint on PullRequestReviewer + +Revision ID: f62826179f39 +Revises: a0a1bf09c143 +Create Date: 2020-06-15 12:30:37.420321 + +""" + +# The following opaque hexadecimal identifiers ("revisions") are used +# by Alembic to track this migration script and its relations to others. +revision = 'f62826179f39' +down_revision = 'a0a1bf09c143' +branch_labels = None +depends_on = None + +import sqlalchemy as sa +from alembic import op + +from kallithea.model import db + + +def upgrade(): + session = sa.orm.session.Session(bind=op.get_bind()) + + # there may be existing duplicates in the database, remove them first + + seen = set() + # duplicate_values contains one copy of each duplicated pair + duplicate_values = ( + session + .query(db.PullRequestReviewer.pull_request_id, db.PullRequestReviewer.user_id) + .group_by(db.PullRequestReviewer.pull_request_id, db.PullRequestReviewer.user_id) + .having(sa.func.count(db.PullRequestReviewer.pull_request_reviewers_id) > 1) + ) + + for pull_request_id, user_id in duplicate_values: + # duplicate_occurrences contains all db records of the duplicate_value + # currently being processed + duplicate_occurrences = ( + session + .query(db.PullRequestReviewer) + .filter(db.PullRequestReviewer.pull_request_id == pull_request_id) + .filter(db.PullRequestReviewer.user_id == user_id) + ) + for prr in duplicate_occurrences: + if (pull_request_id, user_id) in seen: + session.delete(prr) + else: + seen.add((pull_request_id, user_id)) + + session.commit() + + # after deleting all duplicates, add the unique constraint + with op.batch_alter_table('pull_request_reviewers', schema=None) as batch_op: + batch_op.create_unique_constraint(batch_op.f('uq_pull_request_reviewers_pull_request_id'), ['pull_request_id', 'user_id']) + + +def downgrade(): + with op.batch_alter_table('pull_request_reviewers', schema=None) as batch_op: + batch_op.drop_constraint(batch_op.f('uq_pull_request_reviewers_pull_request_id'), type_='unique') diff -r c387989f868f -r 3669e58f3002 kallithea/bin/kallithea_cli_base.py --- a/kallithea/bin/kallithea_cli_base.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/bin/kallithea_cli_base.py Fri Oct 30 23:44:18 2020 +0100 @@ -23,7 +23,7 @@ import paste.deploy import kallithea -import kallithea.config.middleware +import kallithea.config.application # kallithea_cli is usually invoked through the 'kallithea-cli' wrapper script @@ -53,10 +53,10 @@ def cli(): """Various commands to manage a Kallithea instance.""" -def register_command(config_file=False, config_file_initialize_app=False, hidden=False): +def register_command(needs_config_file=False, config_file_initialize_app=False, hidden=False): """Register a kallithea-cli subcommand. - If one of the config_file flags are true, a config file must be specified + If one of the needs_config_file flags are true, a config file must be specified with -c and it is read and logging is configured. The configuration is available in the kallithea.CONFIG dict. @@ -64,20 +64,24 @@ (including tg.config), and database access will also be fully initialized. """ cli_command = cli.command(hidden=hidden) - if config_file or config_file_initialize_app: + if needs_config_file or config_file_initialize_app: def annotator(annotated): @click.option('--config_file', '-c', help="Path to .ini file with app configuration.", type=click.Path(dir_okay=False, exists=True, readable=True), required=True) @functools.wraps(annotated) # reuse meta data from the wrapped function so click can see other options def runtime_wrapper(config_file, *args, **kwargs): path_to_ini_file = os.path.realpath(config_file) - kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file) + config = paste.deploy.appconfig('config:' + path_to_ini_file) cp = configparser.ConfigParser(strict=False) cp.read_string(read_config(path_to_ini_file, strip_section_prefix=annotated.__name__)) logging.config.fileConfig(cp, {'__file__': path_to_ini_file, 'here': os.path.dirname(path_to_ini_file)}) if config_file_initialize_app: - kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf) + if needs_config_file: # special case for db creation: also call annotated function (with config parameter) *before* app initialization + annotated(*args, config=config, **kwargs) + kallithea.config.application.make_app(config.global_conf, **config.local_conf) + else: + kallithea.CONFIG = dict(config) # config is a dict subclass return annotated(*args, **kwargs) return cli_command(runtime_wrapper) return annotator diff -r c387989f868f -r 3669e58f3002 kallithea/bin/kallithea_cli_db.py --- a/kallithea/bin/kallithea_cli_db.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/bin/kallithea_cli_db.py Fri Oct 30 23:44:18 2020 +0100 @@ -16,10 +16,12 @@ import kallithea import kallithea.bin.kallithea_cli_base as cli_base from kallithea.lib.db_manage import DbManage -from kallithea.model.meta import Session +from kallithea.model import meta -@cli_base.register_command(config_file=True) +@cli_base.register_command(needs_config_file=True, config_file_initialize_app=True) +@click.option('--reuse/--no-reuse', default=False, + help='Reuse and clean existing database instead of dropping and creating (default: no reuse)') @click.option('--user', help='Username of administrator account.') @click.option('--password', help='Password for administrator account.') @click.option('--email', help='Email address of administrator account.') @@ -28,7 +30,7 @@ @click.option('--force-no', is_flag=True, help='Answer no to every question.') @click.option('--public-access/--no-public-access', default=True, help='Enable/disable public access on this installation (default: enable)') -def db_create(user, password, email, repos, force_yes, force_no, public_access): +def db_create(user, password, email, repos, force_yes, force_no, public_access, reuse, config=None): """Initialize the database. Create all required tables in the database specified in the configuration @@ -37,44 +39,43 @@ You can pass the answers to all questions as options to this command. """ - dbconf = kallithea.CONFIG['sqlalchemy.url'] + if config is not None: # first called with config, before app initialization + dbconf = config['sqlalchemy.url'] - # force_ask should be True (yes), False (no), or None (ask) - if force_yes: - force_ask = True - elif force_no: - force_ask = False - else: - force_ask = None + # force_ask should be True (yes), False (no), or None (ask) + if force_yes: + force_ask = True + elif force_no: + force_ask = False + else: + force_ask = None - cli_args = dict( - username=user, - password=password, - email=email, - repos_location=repos, - force_ask=force_ask, - public_access=public_access, - ) - dbmanage = DbManage(dbconf=dbconf, root=kallithea.CONFIG['here'], - tests=False, cli_args=cli_args) - dbmanage.create_tables(override=True) - repo_root_path = dbmanage.prompt_repo_root_path(None) - dbmanage.create_settings(repo_root_path) - dbmanage.create_default_user() - dbmanage.admin_prompt() - dbmanage.create_permissions() - dbmanage.populate_default_permissions() - Session().commit() + cli_args = dict( + username=user, + password=password, + email=email, + repos_location=repos, + force_ask=force_ask, + public_access=public_access, + ) + dbmanage = DbManage(dbconf=dbconf, root=config['here'], + cli_args=cli_args) + dbmanage.create_tables(reuse_database=reuse) + repo_root_path = dbmanage.prompt_repo_root_path(None) + dbmanage.create_settings(repo_root_path) + dbmanage.create_default_user() + dbmanage.create_admin_user() + dbmanage.create_permissions() + dbmanage.populate_default_permissions() + meta.Session().commit() - # initial repository scan - kallithea.config.middleware.make_app( - kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf) - added, _ = kallithea.lib.utils.repo2db_mapper(kallithea.model.scm.ScmModel().repo_scan()) - if added: - click.echo('Initial repository scan: added following repositories:') - click.echo('\t%s' % '\n\t'.join(added)) - else: - click.echo('Initial repository scan: no repositories found.') + else: # then called again after app initialization + added, _ = kallithea.lib.utils.repo2db_mapper(kallithea.model.scm.ScmModel().repo_scan()) + if added: + click.echo('Initial repository scan: added following repositories:') + click.echo('\t%s' % '\n\t'.join(added)) + else: + click.echo('Initial repository scan: no repositories found.') - click.echo('Database set up successfully.') - click.echo("Don't forget to build the front-end using 'kallithea-cli front-end-build'.") + click.echo('Database set up successfully.') + click.echo("Don't forget to build the front-end using 'kallithea-cli front-end-build'.") diff -r c387989f868f -r 3669e58f3002 kallithea/bin/kallithea_cli_extensions.py --- a/kallithea/bin/kallithea_cli_extensions.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/bin/kallithea_cli_extensions.py Fri Oct 30 23:44:18 2020 +0100 @@ -29,19 +29,19 @@ from kallithea.lib.utils2 import ask_ok -@cli_base.register_command(config_file=True) +@cli_base.register_command(needs_config_file=True) def extensions_create(): """Write template file for extending Kallithea in Python. - An rcextensions directory with a __init__.py file will be created next to - the ini file. Local customizations in that file will survive upgrades. - The file contains instructions on how it can be customized. + Create a template `extensions.py` file next to the ini file. Local + customizations in that file will survive upgrades. The file contains + instructions on how it can be customized. """ here = kallithea.CONFIG['here'] content = pkg_resources.resource_string( - 'kallithea', os.path.join('config', 'rcextensions', '__init__.py') + 'kallithea', os.path.join('templates', 'py', 'extensions.py') ) - ext_file = os.path.join(here, 'rcextensions', '__init__.py') + ext_file = os.path.join(here, 'extensions.py') if os.path.exists(ext_file): msg = ('Extension file %s already exists, do you want ' 'to overwrite it ? [y/n] ') % ext_file diff -r c387989f868f -r 3669e58f3002 kallithea/bin/kallithea_cli_iis.py --- a/kallithea/bin/kallithea_cli_iis.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/bin/kallithea_cli_iis.py Fri Oct 30 23:44:18 2020 +0100 @@ -57,7 +57,7 @@ HandleCommandLine(params) ''' -@cli_base.register_command(config_file=True) +@cli_base.register_command(needs_config_file=True) @click.option('--virtualdir', default='/', help='The virtual folder to install into on IIS.') def iis_install(virtualdir): diff -r c387989f868f -r 3669e58f3002 kallithea/bin/kallithea_cli_index.py --- a/kallithea/bin/kallithea_cli_index.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/bin/kallithea_cli_index.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,7 +28,7 @@ import kallithea.bin.kallithea_cli_base as cli_base from kallithea.lib.indexers.daemon import WhooshIndexingDaemon from kallithea.lib.pidlock import DaemonLock, LockHeld -from kallithea.lib.utils import load_rcextensions +from kallithea.lib.utils import load_extensions from kallithea.model.repo import RepoModel @@ -41,7 +41,7 @@ """Create or update full text search index""" index_location = kallithea.CONFIG['index_dir'] - load_rcextensions(kallithea.CONFIG['here']) + load_extensions(kallithea.CONFIG['here']) if not repo_location: repo_location = RepoModel().repos_path diff -r c387989f868f -r 3669e58f3002 kallithea/bin/kallithea_cli_repo.py --- a/kallithea/bin/kallithea_cli_repo.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/bin/kallithea_cli_repo.py Fri Oct 30 23:44:18 2020 +0100 @@ -30,8 +30,7 @@ import kallithea.bin.kallithea_cli_base as cli_base from kallithea.lib.utils import REMOVED_REPO_PAT, repo2db_mapper from kallithea.lib.utils2 import ask_ok -from kallithea.model.db import Repository -from kallithea.model.meta import Session +from kallithea.model import db, meta from kallithea.model.scm import ScmModel @@ -73,11 +72,11 @@ updated. """ if not repositories: - repo_list = Repository.query().all() + repo_list = db.Repository.query().all() else: repo_names = [n.strip() for n in repositories] - repo_list = list(Repository.query() - .filter(Repository.repo_name.in_(repo_names))) + repo_list = list(db.Repository.query() + .filter(db.Repository.repo_name.in_(repo_names))) for repo in repo_list: # update latest revision metadata in database @@ -86,7 +85,7 @@ # first access repo.set_invalidate() - Session().commit() + meta.Session().commit() click.echo('Updated database with information about latest change in the following %s repositories:' % (len(repo_list))) click.echo('\n'.join(repo.repo_name for repo in repo_list)) diff -r c387989f868f -r 3669e58f3002 kallithea/bin/kallithea_cli_ssh.py --- a/kallithea/bin/kallithea_cli_ssh.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/bin/kallithea_cli_ssh.py Fri Oct 30 23:44:18 2020 +0100 @@ -21,7 +21,7 @@ import kallithea import kallithea.bin.kallithea_cli_base as cli_base -from kallithea.lib.utils2 import str2bool +from kallithea.lib.utils2 import asbool from kallithea.lib.vcs.backends.git.ssh import GitSshHandler from kallithea.lib.vcs.backends.hg.ssh import MercurialSshHandler from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException @@ -40,8 +40,7 @@ protocol access. The access will be granted as the specified user ID, and logged as using the specified key ID. """ - ssh_enabled = kallithea.CONFIG.get('ssh_enabled', False) - if not str2bool(ssh_enabled): + if not asbool(kallithea.CONFIG.get('ssh_enabled', False)): sys.stderr.write("SSH access is disabled.\n") return sys.exit(1) diff -r c387989f868f -r 3669e58f3002 kallithea/config/app_cfg.py --- a/kallithea/config/app_cfg.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/config/app_cfg.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,82 +28,57 @@ from alembic.migration import MigrationContext from alembic.script.base import ScriptDirectory from sqlalchemy import create_engine -from tg.configuration import AppConfig -from tg.support.converters import asbool +from tg import FullStackApplicationConfigurator import kallithea.lib.locale import kallithea.model.base import kallithea.model.meta from kallithea.lib import celerypylons -from kallithea.lib.middleware.https_fixup import HttpsFixup -from kallithea.lib.middleware.permanent_repo_url import PermanentRepoUrl -from kallithea.lib.middleware.simplegit import SimpleGit -from kallithea.lib.middleware.simplehg import SimpleHg -from kallithea.lib.middleware.wrapper import RequestWrapper -from kallithea.lib.utils import check_git_version, load_rcextensions, set_app_settings, set_indexer_config, set_vcs_config -from kallithea.lib.utils2 import str2bool +from kallithea.lib.utils import check_git_version, load_extensions, set_app_settings, set_indexer_config, set_vcs_config +from kallithea.lib.utils2 import asbool from kallithea.model import db log = logging.getLogger(__name__) -class KallitheaAppConfig(AppConfig): - # Note: AppConfig has a misleading name, as it's not the application - # configuration, but the application configurator. The AppConfig values are - # used as a template to create the actual configuration, which might - # overwrite or extend the one provided by the configurator template. +base_config = FullStackApplicationConfigurator() - # To make it clear, AppConfig creates the config and sets into it the same - # values that AppConfig itself has. Then the values from the config file and - # gearbox options are loaded and merged into the configuration. Then an - # after_init_config(conf) method of AppConfig is called for any change that - # might depend on options provided by configuration files. +base_config.update_blueprint({ + 'package': kallithea, - def __init__(self): - super(KallitheaAppConfig, self).__init__() - - self['package'] = kallithea + # Rendering Engines Configuration + 'renderers': [ + 'json', + 'mako', + ], + 'default_renderer': 'mako', + 'use_dotted_templatenames': False, - self['prefer_toscawidgets2'] = False - self['use_toscawidgets'] = False - - self['renderers'] = [] - - # Enable json in expose - self['renderers'].append('json') + # Configure Sessions, store data as JSON to avoid pickle security issues + 'session.enabled': True, + 'session.data_serializer': 'json', - # Configure template rendering - self['renderers'].append('mako') - self['default_renderer'] = 'mako' - self['use_dotted_templatenames'] = False + # Configure the base SQLALchemy Setup + 'use_sqlalchemy': True, + 'model': kallithea.model.base, + 'DBSession': kallithea.model.meta.Session, - # Configure Sessions, store data as JSON to avoid pickle security issues - self['session.enabled'] = True - self['session.data_serializer'] = 'json' - - # Configure the base SQLALchemy Setup - self['use_sqlalchemy'] = True - self['model'] = kallithea.model.base - self['DBSession'] = kallithea.model.meta.Session + # Configure App without an authentication backend. + 'auth_backend': None, - # Configure App without an authentication backend. - self['auth_backend'] = None - - # Use custom error page for these errors. By default, Turbogears2 does not add - # 400 in this list. - # Explicitly listing all is considered more robust than appending to defaults, - # in light of possible future framework changes. - self['errorpage.status_codes'] = [400, 401, 403, 404] + # Use custom error page for these errors. By default, Turbogears2 does not add + # 400 in this list. + # Explicitly listing all is considered more robust than appending to defaults, + # in light of possible future framework changes. + 'errorpage.status_codes': [400, 401, 403, 404], - # Disable transaction manager -- currently Kallithea takes care of transactions itself - self['tm.enabled'] = False + # Disable transaction manager -- currently Kallithea takes care of transactions itself + 'tm.enabled': False, - # Set the default i18n source language so TG doesn't search beyond 'en' in Accept-Language. - self['i18n.lang'] = 'en' - - -base_config = KallitheaAppConfig() + # Set the default i18n source language so TG doesn't search beyond 'en' in Accept-Language. + 'i18n.lang': 'en', +}) # DebugBar, a debug toolbar for TurboGears2. # (https://github.com/TurboGears/tgext.debugbar) @@ -111,13 +86,13 @@ # 'debug = true' (not in production!) # See the Kallithea documentation for more information. try: + import kajiki # only to check its existence from tgext.debugbar import enable_debugbar - import kajiki # only to check its existence assert kajiki except ImportError: pass else: - base_config['renderers'].append('kajiki') + base_config.get_blueprint_value('renderers').append('kajiki') enable_debugbar(base_config) @@ -134,7 +109,7 @@ mercurial.encoding.encoding = hgencoding if config.get('ignore_alembic_revision', False): - log.warn('database alembic revision checking is disabled') + log.warning('database alembic revision checking is disabled') else: dbconf = config['sqlalchemy.url'] alembic_cfg = alembic.config.Config() @@ -160,11 +135,11 @@ # store some globals into kallithea kallithea.DEFAULT_USER_ID = db.User.get_default_user().user_id - if str2bool(config.get('use_celery')): + if asbool(config.get('use_celery')): kallithea.CELERY_APP = celerypylons.make_app() kallithea.CONFIG = config - load_rcextensions(root_path=config['here']) + load_extensions(root_path=config['here']) set_app_settings(config) @@ -188,27 +163,3 @@ tg.hooks.register('configure_new_app', setup_configuration) - - -def setup_application(app): - config = app.config - - # we want our low level middleware to get to the request ASAP. We don't - # need any stack middleware in them - especially no StatusCodeRedirect buffering - app = SimpleHg(app, config) - app = SimpleGit(app, config) - - # Enable https redirects based on HTTP_X_URL_SCHEME set by proxy - if any(asbool(config.get(x)) for x in ['https_fixup', 'force_https', 'use_htsts']): - app = HttpsFixup(app, config) - - app = PermanentRepoUrl(app, config) - - # Optional and undocumented wrapper - gives more verbose request/response logging, but has a slight overhead - if str2bool(config.get('use_wsgi_wrapper')): - app = RequestWrapper(app, config) - - return app - - -tg.hooks.register('before_config', setup_application) diff -r c387989f868f -r 3669e58f3002 kallithea/config/application.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/application.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""WSGI middleware initialization for the Kallithea application.""" + +from kallithea.config.app_cfg import base_config +from kallithea.config.middleware.https_fixup import HttpsFixup +from kallithea.config.middleware.permanent_repo_url import PermanentRepoUrl +from kallithea.config.middleware.simplegit import SimpleGit +from kallithea.config.middleware.simplehg import SimpleHg +from kallithea.config.middleware.wrapper import RequestWrapper +from kallithea.lib.utils2 import asbool + + +__all__ = ['make_app'] + + +def wrap_app(app): + """Wrap the TG WSGI application in Kallithea middleware""" + config = app.config + + # we want our low level middleware to get to the request ASAP. We don't + # need any stack middleware in them - especially no StatusCodeRedirect buffering + app = SimpleHg(app, config) + app = SimpleGit(app, config) + + # Enable https redirects based on HTTP_X_URL_SCHEME set by proxy + if any(asbool(config.get(x)) for x in ['https_fixup', 'force_https', 'use_htsts']): + app = HttpsFixup(app, config) + + app = PermanentRepoUrl(app, config) + + # Optional and undocumented wrapper - gives more verbose request/response logging, but has a slight overhead + if asbool(config.get('use_wsgi_wrapper')): + app = RequestWrapper(app, config) + + return app + + +def make_app(global_conf, **app_conf): + """ + Set up Kallithea with the settings found in the PasteDeploy configuration + file used. + + :param global_conf: The global settings for Kallithea (those + defined under the ``[DEFAULT]`` section). + :return: The Kallithea application with all the relevant middleware + loaded. + + This is the PasteDeploy factory for the Kallithea application. + + ``app_conf`` contains all the application-specific settings (those defined + under ``[app:main]``. + """ + assert app_conf.get('sqlalchemy.url') # must be called with a Kallithea .ini file, which for example must have this config option + assert global_conf.get('here') and global_conf.get('__file__') # app config should be initialized the paste way ... + + return base_config.make_wsgi_app(global_conf, app_conf, wrap_app=wrap_app) diff -r c387989f868f -r 3669e58f3002 kallithea/config/conf.py --- a/kallithea/config/conf.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.config.conf -~~~~~~~~~~~~~~~~~~~~~ - -Various config settings for Kallithea - -This file was forked by the Kallithea project in July 2014. -Original author and date, and relevant copyright and licensing information is below: -:created_on: Mar 7, 2012 -:author: marcink -:copyright: (c) 2013 RhodeCode GmbH, and others. -:license: GPLv3, see LICENSE.md for more details. -""" - -from kallithea.lib import pygmentsutils - - -# language map is also used by whoosh indexer, which for those specified -# extensions will index it's content -LANGUAGES_EXTENSIONS_MAP = pygmentsutils.get_extension_descriptions() - -# Whoosh index targets - -# Extensions we want to index content of using whoosh -INDEX_EXTENSIONS = list(LANGUAGES_EXTENSIONS_MAP) - -# Filenames we want to index content of using whoosh -INDEX_FILENAMES = pygmentsutils.get_index_filenames() - -# list of readme files to search in file tree and display in summary -# attached weights defines the search order lower is first -ALL_READMES = [ - ('readme', 0), ('README', 0), ('Readme', 0), - ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1), - ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2), - ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2), - ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2), -] - -# extension together with weights to search lower is first -RST_EXTS = [ - ('', 0), ('.rst', 1), ('.rest', 1), - ('.RST', 2), ('.REST', 2), - ('.txt', 3), ('.TXT', 3) -] - -MARKDOWN_EXTS = [ - ('.md', 1), ('.MD', 1), - ('.mkdn', 2), ('.MKDN', 2), - ('.mdown', 3), ('.MDOWN', 3), - ('.markdown', 4), ('.MARKDOWN', 4) -] - -PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)] - -ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS diff -r c387989f868f -r 3669e58f3002 kallithea/config/environment.py --- a/kallithea/config/environment.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -"""WSGI environment setup for Kallithea.""" - -from kallithea.config.app_cfg import base_config - - -__all__ = ['load_environment'] - -# Use base_config to setup the environment loader function -load_environment = base_config.make_load_environment() diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware.py --- a/kallithea/config/middleware.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -"""WSGI middleware initialization for the Kallithea application.""" - -from kallithea.config.app_cfg import base_config -from kallithea.config.environment import load_environment - - -__all__ = ['make_app'] - -# Use base_config to setup the necessary PasteDeploy application factory. -# make_base_app will wrap the TurboGears2 app with all the middleware it needs. -make_base_app = base_config.setup_tg_wsgi_app(load_environment) - - -def make_app(global_conf, full_stack=True, **app_conf): - """ - Set up Kallithea with the settings found in the PasteDeploy configuration - file used. - - :param global_conf: The global settings for Kallithea (those - defined under the ``[DEFAULT]`` section). - :type global_conf: dict - :param full_stack: Should the whole TurboGears2 stack be set up? - :type full_stack: str or bool - :return: The Kallithea application with all the relevant middleware - loaded. - - This is the PasteDeploy factory for the Kallithea application. - - ``app_conf`` contains all the application-specific settings (those defined - under ``[app:main]``. - """ - assert app_conf.get('sqlalchemy.url') # must be called with a Kallithea .ini file, which for example must have this config option - assert global_conf.get('here') and global_conf.get('__file__') # app config should be initialized the paste way ... - return make_base_app(global_conf, full_stack=full_stack, **app_conf) diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/middleware/__init__.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware/appenlight.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/middleware/appenlight.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.middleware.appenlight +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +middleware to handle appenlight publishing of errors + +This file was forked by the Kallithea project in July 2014. +Original author and date, and relevant copyright and licensing information is below: +:created_on: October 18, 2012 +:author: marcink +:copyright: (c) 2013 RhodeCode GmbH, and others. +:license: GPLv3, see LICENSE.md for more details. +""" + + +try: + from appenlight_client import make_appenlight_middleware +except ImportError: + AppEnlight = None +else: + AppEnlight = make_appenlight_middleware diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware/https_fixup.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/middleware/https_fixup.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.middleware.https_fixup +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +middleware to handle https correctly + +This file was forked by the Kallithea project in July 2014. +Original author and date, and relevant copyright and licensing information is below: +:created_on: May 23, 2010 +:author: marcink +:copyright: (c) 2013 RhodeCode GmbH, and others. +:license: GPLv3, see LICENSE.md for more details. +""" + + +from kallithea.lib.utils2 import asbool + + +class HttpsFixup(object): + + def __init__(self, app, config): + self.application = app + self.config = config + + def __call__(self, environ, start_response): + self.__fixup(environ) + debug = asbool(self.config.get('debug')) + is_ssl = environ['wsgi.url_scheme'] == 'https' + + def custom_start_response(status, headers, exc_info=None): + if is_ssl and asbool(self.config.get('use_htsts')) and not debug: + headers.append(('Strict-Transport-Security', + 'max-age=8640000; includeSubDomains')) + return start_response(status, headers, exc_info) + + return self.application(environ, custom_start_response) + + def __fixup(self, environ): + """ + Function to fixup the environ as needed. In order to use this + middleware you should set this header inside your + proxy ie. nginx, apache etc. + """ + # DETECT PROTOCOL ! + if 'HTTP_X_URL_SCHEME' in environ: + proto = environ.get('HTTP_X_URL_SCHEME') + elif 'HTTP_X_FORWARDED_SCHEME' in environ: + proto = environ.get('HTTP_X_FORWARDED_SCHEME') + elif 'HTTP_X_FORWARDED_PROTO' in environ: + proto = environ.get('HTTP_X_FORWARDED_PROTO') + else: + proto = 'http' + org_proto = proto + + # if we have force, just override + if asbool(self.config.get('force_https')): + proto = 'https' + + environ['wsgi.url_scheme'] = proto + environ['wsgi._org_proto'] = org_proto diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware/permanent_repo_url.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/middleware/permanent_repo_url.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.middleware.permanent_repo_url +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +middleware to handle permanent repo URLs, replacing PATH_INFO '/_123/yada' with +'/name/of/repo/yada' after looking 123 up in the database. +""" + + +from kallithea.lib.utils import fix_repo_id_name +from kallithea.lib.utils2 import safe_bytes, safe_str + + +class PermanentRepoUrl(object): + + def __init__(self, app, config): + self.application = app + self.config = config + + def __call__(self, environ, start_response): + # Extract path_info as get_path_info does, but do it explicitly because + # we also have to do the reverse operation when patching it back in + path_info = safe_str(environ['PATH_INFO'].encode('latin1')) + if path_info.startswith('/'): # it must + path_info = '/' + fix_repo_id_name(path_info[1:]) + environ['PATH_INFO'] = safe_bytes(path_info).decode('latin1') + + return self.application(environ, start_response) diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware/pygrack.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/middleware/pygrack.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,229 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.middleware.pygrack +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Python implementation of git-http-backend's Smart HTTP protocol + +Based on original code from git_http_backend.py project. + +Copyright (c) 2010 Daniel Dotsenko +Copyright (c) 2012 Marcin Kuzminski + +This file was forked by the Kallithea project in July 2014. +""" + +import logging +import os +import socket +import traceback + +from dulwich.server import update_server_info +from dulwich.web import GunzipFilter, LimitedInputFilter +from webob import Request, Response, exc + +import kallithea +from kallithea.lib.utils2 import ascii_bytes +from kallithea.lib.vcs import subprocessio + + +log = logging.getLogger(__name__) + + +class FileWrapper(object): + + def __init__(self, fd, content_length): + self.fd = fd + self.content_length = content_length + self.remain = content_length + + def read(self, size): + if size <= self.remain: + try: + data = self.fd.read(size) + except socket.error: + raise IOError(self) + self.remain -= size + elif self.remain: + data = self.fd.read(self.remain) + self.remain = 0 + else: + data = None + return data + + def __repr__(self): + return '' % ( + self.fd, self.content_length, self.content_length - self.remain + ) + + +class GitRepository(object): + git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs']) + commands = ['git-upload-pack', 'git-receive-pack'] + + def __init__(self, repo_name, content_path): + files = set([f.lower() for f in os.listdir(content_path)]) + if not (self.git_folder_signature.intersection(files) + == self.git_folder_signature): + raise OSError('%s missing git signature' % content_path) + self.content_path = content_path + self.valid_accepts = ['application/x-%s-result' % + c for c in self.commands] + self.repo_name = repo_name + + def _get_fixedpath(self, path): + """ + Small fix for repo_path + + :param path: + """ + assert path.startswith('/' + self.repo_name + '/') + return path[len(self.repo_name) + 2:].strip('/') + + def inforefs(self, req, environ): + """ + WSGI Response producer for HTTP GET Git Smart + HTTP /info/refs request. + """ + + git_command = req.GET.get('service') + if git_command not in self.commands: + log.debug('command %s not allowed', git_command) + return exc.HTTPMethodNotAllowed() + + # From Documentation/technical/http-protocol.txt shipped with Git: + # + # Clients MUST verify the first pkt-line is `# service=$servicename`. + # Servers MUST set $servicename to be the request parameter value. + # Servers SHOULD include an LF at the end of this line. + # Clients MUST ignore an LF at the end of the line. + # + # smart_reply = PKT-LINE("# service=$servicename" LF) + # ref_list + # "0000" + server_advert = '# service=%s\n' % git_command + packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower() + _git_path = kallithea.CONFIG.get('git_path', 'git') + cmd = [_git_path, git_command[4:], + '--stateless-rpc', '--advertise-refs', self.content_path] + log.debug('handling cmd %s', cmd) + try: + out = subprocessio.SubprocessIOChunker(cmd, + starting_values=[ascii_bytes(packet_len + server_advert + '0000')] + ) + except EnvironmentError as e: + log.error(traceback.format_exc()) + raise exc.HTTPExpectationFailed() + resp = Response() + resp.content_type = 'application/x-%s-advertisement' % git_command + resp.charset = None + resp.app_iter = out + return resp + + def backend(self, req, environ): + """ + WSGI Response producer for HTTP POST Git Smart HTTP requests. + Reads commands and data from HTTP POST's body. + returns an iterator obj with contents of git command's + response to stdout + """ + _git_path = kallithea.CONFIG.get('git_path', 'git') + git_command = self._get_fixedpath(req.path_info) + if git_command not in self.commands: + log.debug('command %s not allowed', git_command) + return exc.HTTPMethodNotAllowed() + + if 'CONTENT_LENGTH' in environ: + inputstream = FileWrapper(environ['wsgi.input'], + req.content_length) + else: + inputstream = environ['wsgi.input'] + + gitenv = dict(os.environ) + # forget all configs + gitenv['GIT_CONFIG_NOGLOBAL'] = '1' + cmd = [_git_path, git_command[4:], '--stateless-rpc', self.content_path] + log.debug('handling cmd %s', cmd) + try: + out = subprocessio.SubprocessIOChunker( + cmd, + inputstream=inputstream, + env=gitenv, + cwd=self.content_path, + ) + except EnvironmentError as e: + log.error(traceback.format_exc()) + raise exc.HTTPExpectationFailed() + + if git_command in ['git-receive-pack']: + # updating refs manually after each push. + # Needed for pre-1.7.0.4 git clients using regular HTTP mode. + + from kallithea.lib.vcs import get_repo + repo = get_repo(self.content_path) + if repo: + update_server_info(repo._repo) + + resp = Response() + resp.content_type = 'application/x-%s-result' % git_command + resp.charset = None + resp.app_iter = out + return resp + + def __call__(self, environ, start_response): + req = Request(environ) + _path = self._get_fixedpath(req.path_info) + if _path.startswith('info/refs'): + app = self.inforefs + elif req.accept.acceptable_offers(self.valid_accepts): + app = self.backend + try: + resp = app(req, environ) + except exc.HTTPException as e: + resp = e + log.error(traceback.format_exc()) + except Exception as e: + log.error(traceback.format_exc()) + resp = exc.HTTPInternalServerError() + return resp(environ, start_response) + + +class GitDirectory(object): + + def __init__(self, repo_root, repo_name): + repo_location = os.path.join(repo_root, repo_name) + if not os.path.isdir(repo_location): + raise OSError(repo_location) + + self.content_path = repo_location + self.repo_name = repo_name + self.repo_location = repo_location + + def __call__(self, environ, start_response): + content_path = self.content_path + try: + app = GitRepository(self.repo_name, content_path) + except (AssertionError, OSError): + content_path = os.path.join(content_path, '.git') + if os.path.isdir(content_path): + app = GitRepository(self.repo_name, content_path) + else: + return exc.HTTPNotFound()(environ, start_response) + return app(environ, start_response) + + +def make_wsgi_app(repo_name, repo_root): + app = GitDirectory(repo_root, repo_name) + return GunzipFilter(LimitedInputFilter(app)) diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware/simplegit.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/middleware/simplegit.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.middleware.simplegit +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +SimpleGit middleware for handling Git protocol requests (push/clone etc.) +It's implemented with basic auth function + +This file was forked by the Kallithea project in July 2014. +Original author and date, and relevant copyright and licensing information is below: +:created_on: Apr 28, 2010 +:author: marcink +:copyright: (c) 2013 RhodeCode GmbH, and others. +:license: GPLv3, see LICENSE.md for more details. + +""" + + +import logging +import re + +from kallithea.config.middleware.pygrack import make_wsgi_app +from kallithea.lib.base import BaseVCSController, get_path_info +from kallithea.lib.hooks import log_pull_action +from kallithea.lib.utils import make_ui +from kallithea.model import db + + +log = logging.getLogger(__name__) + + +GIT_PROTO_PAT = re.compile(r'^/(.+)/(info/refs|git-upload-pack|git-receive-pack)$') + + +cmd_mapping = { + 'git-receive-pack': 'push', + 'git-upload-pack': 'pull', +} + + +class SimpleGit(BaseVCSController): + + scm_alias = 'git' + + @classmethod + def parse_request(cls, environ): + path_info = get_path_info(environ) + m = GIT_PROTO_PAT.match(path_info) + if m is None: + return None + + class parsed_request(object): + # See https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols#_the_smart_protocol + repo_name = m.group(1).rstrip('/') + cmd = m.group(2) + + query_string = environ['QUERY_STRING'] + if cmd == 'info/refs' and query_string.startswith('service='): + service = query_string.split('=', 1)[1] + action = cmd_mapping.get(service) + else: + service = None + action = cmd_mapping.get(cmd) + + return parsed_request + + def _make_app(self, parsed_request): + """ + Return a pygrack wsgi application. + """ + pygrack_app = make_wsgi_app(parsed_request.repo_name, self.basepath) + + def wrapper_app(environ, start_response): + if (parsed_request.cmd == 'info/refs' and + parsed_request.service == 'git-upload-pack' + ): + baseui = make_ui() + repo = db.Repository.get_by_repo_name(parsed_request.repo_name) + scm_repo = repo.scm_instance + # Run hooks, like Mercurial outgoing.pull_logger does + log_pull_action(ui=baseui, repo=scm_repo._repo) + # Note: push hooks are handled by post-receive hook + + return pygrack_app(environ, start_response) + + return wrapper_app diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware/simplehg.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/middleware/simplehg.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.middleware.simplehg +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +SimpleHg middleware for handling Mercurial protocol requests (push/clone etc.). +It's implemented with basic auth function + +This file was forked by the Kallithea project in July 2014. +Original author and date, and relevant copyright and licensing information is below: +:created_on: Apr 28, 2010 +:author: marcink +:copyright: (c) 2013 RhodeCode GmbH, and others. +:license: GPLv3, see LICENSE.md for more details. + +""" + + +import logging +import os +import urllib.parse + +import mercurial.hgweb + +from kallithea.lib.base import BaseVCSController, get_path_info +from kallithea.lib.utils import make_ui +from kallithea.lib.utils2 import safe_bytes + + +log = logging.getLogger(__name__) + + +def get_header_hgarg(environ): + """Decode the special Mercurial encoding of big requests over multiple headers. + >>> get_header_hgarg({}) + '' + >>> get_header_hgarg({'HTTP_X_HGARG_0': ' ', 'HTTP_X_HGARG_1': 'a','HTTP_X_HGARG_2': '','HTTP_X_HGARG_3': 'b+c %20'}) + 'ab+c %20' + """ + chunks = [] + i = 1 + while True: + v = environ.get('HTTP_X_HGARG_%d' % i) + if v is None: + break + chunks.append(v) + i += 1 + return ''.join(chunks) + + +cmd_mapping = { + # 'batch' is not in this list - it is handled explicitly + 'between': 'pull', + 'branches': 'pull', + 'branchmap': 'pull', + 'capabilities': 'pull', + 'changegroup': 'pull', + 'changegroupsubset': 'pull', + 'changesetdata': 'pull', + 'clonebundles': 'pull', + 'debugwireargs': 'pull', + 'filedata': 'pull', + 'getbundle': 'pull', + 'getlfile': 'pull', + 'heads': 'pull', + 'hello': 'pull', + 'known': 'pull', + 'lheads': 'pull', + 'listkeys': 'pull', + 'lookup': 'pull', + 'manifestdata': 'pull', + 'narrow_widen': 'pull', + 'protocaps': 'pull', + 'statlfile': 'pull', + 'stream_out': 'pull', + 'pushkey': 'push', + 'putlfile': 'push', + 'unbundle': 'push', + } + + +class SimpleHg(BaseVCSController): + + scm_alias = 'hg' + + @classmethod + def parse_request(cls, environ): + http_accept = environ.get('HTTP_ACCEPT', '') + if not http_accept.startswith('application/mercurial'): + return None + path_info = get_path_info(environ) + if not path_info.startswith('/'): # it must! + return None + + class parsed_request(object): + repo_name = path_info[1:].rstrip('/') + + query_string = environ['QUERY_STRING'] + + action = None + for qry in query_string.split('&'): + parts = qry.split('=', 1) + if len(parts) == 2 and parts[0] == 'cmd': + cmd = parts[1] + if cmd == 'batch': + hgarg = get_header_hgarg(environ) + if not hgarg.startswith('cmds='): + action = 'push' # paranoid and safe + break + action = 'pull' + for cmd_arg in hgarg[5:].split(';'): + cmd, _args = urllib.parse.unquote_plus(cmd_arg).split(' ', 1) + op = cmd_mapping.get(cmd, 'push') + if op != 'pull': + assert op == 'push' + action = 'push' + break + else: + action = cmd_mapping.get(cmd, 'push') + break # only process one cmd + + return parsed_request + + def _make_app(self, parsed_request): + """ + Make an hgweb wsgi application. + """ + repo_name = parsed_request.repo_name + repo_path = os.path.join(self.basepath, repo_name) + baseui = make_ui(repo_path=repo_path) + hgweb_app = mercurial.hgweb.hgweb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui) + + def wrapper_app(environ, start_response): + environ['REPO_NAME'] = repo_name # used by mercurial.hgweb.hgweb + return hgweb_app(environ, start_response) + + return wrapper_app diff -r c387989f868f -r 3669e58f3002 kallithea/config/middleware/wrapper.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/config/middleware/wrapper.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.middleware.wrapper +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Wrap app to measure request and response time ... all the way to the response +WSGI iterator has been closed. + +This file was forked by the Kallithea project in July 2014. +Original author and date, and relevant copyright and licensing information is below: +:created_on: May 23, 2013 +:author: marcink +:copyright: (c) 2013 RhodeCode GmbH, and others. +:license: GPLv3, see LICENSE.md for more details. +""" + +import logging +import time + +from kallithea.lib.base import _get_ip_addr, get_path_info + + +log = logging.getLogger(__name__) + + +class Meter: + + def __init__(self, start_response): + self._start_response = start_response + self._start = time.time() + self.status = None + self._size = 0 + + def duration(self): + return time.time() - self._start + + def start_response(self, status, response_headers, exc_info=None): + self.status = status + write = self._start_response(status, response_headers, exc_info) + def metered_write(s): + self.measure(s) + write(s) + return metered_write + + def measure(self, chunk): + self._size += len(chunk) + + def size(self): + return self._size + + +class ResultIter: + + def __init__(self, result, meter, description): + self._result_close = getattr(result, 'close', None) or (lambda: None) + self._next = iter(result).__next__ + self._meter = meter + self._description = description + + def __iter__(self): + return self + + def __next__(self): + chunk = self._next() + self._meter.measure(chunk) + return chunk + + def close(self): + self._result_close() + log.info("%s responded %r after %.3fs with %s bytes", self._description, self._meter.status, self._meter.duration(), self._meter.size()) + + +class RequestWrapper(object): + + def __init__(self, app, config): + self.application = app + self.config = config + + def __call__(self, environ, start_response): + meter = Meter(start_response) + description = "Request from %s for %s" % ( + _get_ip_addr(environ), + get_path_info(environ), + ) + log.info("%s received", description) + try: + result = self.application(environ, meter.start_response) + finally: + log.info("%s responding %r after %.3fs", description, meter.status, meter.duration()) + return ResultIter(result, meter, description) diff -r c387989f868f -r 3669e58f3002 kallithea/config/post_receive_tmpl.py --- a/kallithea/config/post_receive_tmpl.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -"""Kallithea Git hook - -This hook is installed and maintained by Kallithea. It will be overwritten -by Kallithea - don't customize it manually! - -When Kallithea invokes Git, the KALLITHEA_EXTRAS environment variable will -contain additional info like the Kallithea instance and user info that this -hook will use. -""" - -import os -import sys - -import kallithea.lib.hooks - - -# Set output mode on windows to binary for stderr. -# This prevents python (or the windows console) from replacing \n with \r\n. -# Git doesn't display remote output lines that contain \r, -# and therefore without this modification git would display empty lines -# instead of the exception output. -if sys.platform == "win32": - import msvcrt - msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) - -KALLITHEA_HOOK_VER = '_TMPL_' -os.environ['KALLITHEA_HOOK_VER'] = KALLITHEA_HOOK_VER - - -def main(): - repo_path = os.path.abspath('.') - git_stdin_lines = sys.stdin.readlines() - sys.exit(kallithea.lib.hooks.handle_git_post_receive(repo_path, git_stdin_lines)) - - -if __name__ == '__main__': - main() diff -r c387989f868f -r 3669e58f3002 kallithea/config/pre_receive_tmpl.py --- a/kallithea/config/pre_receive_tmpl.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -"""Kallithea Git hook - -This hook is installed and maintained by Kallithea. It will be overwritten -by Kallithea - don't customize it manually! - -When Kallithea invokes Git, the KALLITHEA_EXTRAS environment variable will -contain additional info like the Kallithea instance and user info that this -hook will use. -""" - -import os -import sys - -import kallithea.lib.hooks - - -# Set output mode on windows to binary for stderr. -# This prevents python (or the windows console) from replacing \n with \r\n. -# Git doesn't display remote output lines that contain \r, -# and therefore without this modification git would display empty lines -# instead of the exception output. -if sys.platform == "win32": - import msvcrt - msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) - -KALLITHEA_HOOK_VER = '_TMPL_' -os.environ['KALLITHEA_HOOK_VER'] = KALLITHEA_HOOK_VER - - -def main(): - repo_path = os.path.abspath('.') - git_stdin_lines = sys.stdin.readlines() - sys.exit(kallithea.lib.hooks.handle_git_pre_receive(repo_path, git_stdin_lines)) - - -if __name__ == '__main__': - main() diff -r c387989f868f -r 3669e58f3002 kallithea/config/rcextensions/__init__.py --- a/kallithea/config/rcextensions/__init__.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,225 +0,0 @@ -# Additional mappings that are not present in the pygments lexers -# used for building stats -# format is {'ext':['Names']} eg. {'py':['Python']} note: there can be -# more than one name for extension -# NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP -# build by pygments -EXTRA_MAPPINGS = {} - -# additional lexer definitions for custom files -# it's overrides pygments lexers, and uses defined name of lexer to colorize the -# files. Format is {'ext': 'lexer_name'} -# List of lexers can be printed running: -# python -c "import pprint;from pygments import lexers;pprint.pprint([(x[0], x[1]) for x in lexers.get_all_lexers()]);" - -EXTRA_LEXERS = {} - -#============================================================================== -# WHOOSH INDEX EXTENSIONS -#============================================================================== -# if INDEX_EXTENSIONS is [] it'll use pygments lexers extensions by default. -# To set your own just add to this list extensions to index with content -INDEX_EXTENSIONS = [] - -# additional extensions for indexing besides the default from pygments -# those gets added to INDEX_EXTENSIONS -EXTRA_INDEX_EXTENSIONS = [] - - -#============================================================================== -# POST CREATE REPOSITORY HOOK -#============================================================================== -# this function will be executed after each repository is created -def _crrepohook(*args, **kwargs): - """ - Post create repository HOOK - kwargs available: - :param repo_name: - :param repo_type: - :param description: - :param private: - :param created_on: - :param enable_downloads: - :param repo_id: - :param owner_id: - :param enable_statistics: - :param clone_uri: - :param fork_id: - :param group_id: - :param created_by: - """ - return 0 - - -CREATE_REPO_HOOK = _crrepohook - - -#============================================================================== -# PRE CREATE USER HOOK -#============================================================================== -# this function will be executed before each user is created -def _pre_cruserhook(*args, **kwargs): - """ - Pre create user HOOK, it returns a tuple of bool, reason. - If bool is False the user creation will be stopped and reason - will be displayed to the user. - kwargs available: - :param username: - :param password: - :param email: - :param firstname: - :param lastname: - :param active: - :param admin: - :param created_by: - """ - reason = 'allowed' - return True, reason - - -PRE_CREATE_USER_HOOK = _pre_cruserhook - -#============================================================================== -# POST CREATE USER HOOK -#============================================================================== -# this function will be executed after each user is created -def _cruserhook(*args, **kwargs): - """ - Post create user HOOK - kwargs available: - :param username: - :param full_name_or_username: - :param full_contact: - :param user_id: - :param name: - :param firstname: - :param short_contact: - :param admin: - :param lastname: - :param ip_addresses: - :param ldap_dn: - :param email: - :param api_key: - :param last_login: - :param full_name: - :param active: - :param password: - :param emails: - :param created_by: - """ - return 0 - - -CREATE_USER_HOOK = _cruserhook - - -#============================================================================== -# POST DELETE REPOSITORY HOOK -#============================================================================== -# this function will be executed after each repository deletion -def _dlrepohook(*args, **kwargs): - """ - Post delete repository HOOK - kwargs available: - :param repo_name: - :param repo_type: - :param description: - :param private: - :param created_on: - :param enable_downloads: - :param repo_id: - :param owner_id: - :param enable_statistics: - :param clone_uri: - :param fork_id: - :param group_id: - :param deleted_by: - :param deleted_on: - """ - return 0 - - -DELETE_REPO_HOOK = _dlrepohook - - -#============================================================================== -# POST DELETE USER HOOK -#============================================================================== -# this function will be executed after each user is deleted -def _dluserhook(*args, **kwargs): - """ - Post delete user HOOK - kwargs available: - :param username: - :param full_name_or_username: - :param full_contact: - :param user_id: - :param name: - :param firstname: - :param short_contact: - :param admin: - :param lastname: - :param ip_addresses: - :param ldap_dn: - :param email: - :param api_key: - :param last_login: - :param full_name: - :param active: - :param password: - :param emails: - :param deleted_by: - """ - return 0 - - -DELETE_USER_HOOK = _dluserhook - - -#============================================================================== -# POST PUSH HOOK -#============================================================================== - -# this function will be executed after each push it's executed after the -# build-in hook that Kallithea uses for logging pushes -def _pushhook(*args, **kwargs): - """ - Post push hook - kwargs available: - - :param config: path to .ini config used - :param scm: type of VS 'git' or 'hg' - :param username: name of user who pushed - :param ip: ip of who pushed - :param action: push - :param repository: repository name - :param pushed_revs: list of pushed revisions - """ - return 0 - - -PUSH_HOOK = _pushhook - - -#============================================================================== -# POST PULL HOOK -#============================================================================== - -# this function will be executed after each push it's executed after the -# build-in hook that Kallithea uses for logging pulls -def _pullhook(*args, **kwargs): - """ - Post pull hook - kwargs available:: - - :param config: path to .ini config used - :param scm: type of VS 'git' or 'hg' - :param username: name of user who pulled - :param ip: ip of who pulled - :param action: pull - :param repository: repository name - """ - return 0 - - -PULL_HOOK = _pullhook diff -r c387989f868f -r 3669e58f3002 kallithea/config/routing.py --- a/kallithea/config/routing.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,803 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -Routes configuration - -The more specific and detailed routes should be defined first so they -may take precedent over the more generic routes. For more information -refer to the routes manual at http://routes.groovie.org/docs/ -""" - -import routes -from tg import request - -from kallithea.lib.utils2 import safe_str - - -# prefix for non repository related links needs to be prefixed with `/` -ADMIN_PREFIX = '/_admin' - - -class Mapper(routes.Mapper): - """ - Subclassed Mapper with routematch patched to decode "unicode" str url to - *real* unicode str before applying matches and invoking controller methods. - """ - - def routematch(self, url=None, environ=None): - """ - routematch that also decode url from "fake bytes" to real unicode - string before matching and invoking controllers. - """ - # Process url like get_path_info does ... but PATH_INFO has already - # been retrieved from environ and is passed, so - let's just use that - # instead. - url = safe_str(url.encode('latin1')) - return super().routematch(url=url, environ=environ) - - -def make_map(config): - """Create, configure and return the routes Mapper""" - rmap = Mapper(directory=config['paths']['controllers'], - always_scan=config['debug']) - rmap.minimization = False - rmap.explicit = False - - from kallithea.lib.utils import is_valid_repo, is_valid_repo_group - - def check_repo(environ, match_dict): - """ - Check for valid repository for proper 404 handling. - Also, a bit of side effect modifying match_dict ... - """ - if match_dict.get('f_path'): - # fix for multiple initial slashes that causes errors - match_dict['f_path'] = match_dict['f_path'].lstrip('/') - - return is_valid_repo(match_dict['repo_name'], config['base_path']) - - def check_group(environ, match_dict): - """ - check for valid repository group for proper 404 handling - - :param environ: - :param match_dict: - """ - repo_group_name = match_dict.get('group_name') - return is_valid_repo_group(repo_group_name, config['base_path']) - - def check_group_skip_path(environ, match_dict): - """ - check for valid repository group for proper 404 handling, but skips - verification of existing path - - :param environ: - :param match_dict: - """ - repo_group_name = match_dict.get('group_name') - return is_valid_repo_group(repo_group_name, config['base_path'], - skip_path_check=True) - - def check_user_group(environ, match_dict): - """ - check for valid user group for proper 404 handling - - :param environ: - :param match_dict: - """ - return True - - def check_int(environ, match_dict): - return match_dict.get('id').isdigit() - - #========================================================================== - # CUSTOM ROUTES HERE - #========================================================================== - - # MAIN PAGE - rmap.connect('home', '/', controller='home') - rmap.connect('about', '/about', controller='home', action='about') - rmap.redirect('/favicon.ico', '/images/favicon.ico') - rmap.connect('repo_switcher_data', '/_repos', controller='home', - action='repo_switcher_data') - rmap.connect('users_and_groups_data', '/_users_and_groups', controller='home', - action='users_and_groups_data') - - rmap.connect('rst_help', - "http://docutils.sourceforge.net/docs/user/rst/quickref.html", - _static=True) - rmap.connect('kallithea_project_url', "https://kallithea-scm.org/", _static=True) - rmap.connect('issues_url', 'https://bitbucket.org/conservancy/kallithea/issues', _static=True) - - # ADMIN REPOSITORY ROUTES - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/repos') as m: - m.connect("repos", "/repos", - action="create", conditions=dict(method=["POST"])) - m.connect("repos", "/repos", - conditions=dict(method=["GET"])) - m.connect("new_repo", "/create_repository", - action="create_repository", conditions=dict(method=["GET"])) - m.connect("update_repo", "/repos/{repo_name:.*?}", - action="update", conditions=dict(method=["POST"], - function=check_repo)) - m.connect("delete_repo", "/repos/{repo_name:.*?}/delete", - action="delete", conditions=dict(method=["POST"])) - - # ADMIN REPOSITORY GROUPS ROUTES - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/repo_groups') as m: - m.connect("repos_groups", "/repo_groups", - action="create", conditions=dict(method=["POST"])) - m.connect("repos_groups", "/repo_groups", - conditions=dict(method=["GET"])) - m.connect("new_repos_group", "/repo_groups/new", - action="new", conditions=dict(method=["GET"])) - m.connect("update_repos_group", "/repo_groups/{group_name:.*?}", - action="update", conditions=dict(method=["POST"], - function=check_group)) - - m.connect("repos_group", "/repo_groups/{group_name:.*?}", - action="show", conditions=dict(method=["GET"], - function=check_group)) - - # EXTRAS REPO GROUP ROUTES - m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit", - action="edit", - conditions=dict(method=["GET"], function=check_group)) - - m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced", - action="edit_repo_group_advanced", - conditions=dict(method=["GET"], function=check_group)) - - m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions", - action="edit_repo_group_perms", - conditions=dict(method=["GET"], function=check_group)) - m.connect("edit_repo_group_perms_update", "/repo_groups/{group_name:.*?}/edit/permissions", - action="update_perms", - conditions=dict(method=["POST"], function=check_group)) - m.connect("edit_repo_group_perms_delete", "/repo_groups/{group_name:.*?}/edit/permissions/delete", - action="delete_perms", - conditions=dict(method=["POST"], function=check_group)) - - m.connect("delete_repo_group", "/repo_groups/{group_name:.*?}/delete", - action="delete", conditions=dict(method=["POST"], - function=check_group_skip_path)) - - # ADMIN USER ROUTES - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/users') as m: - m.connect("new_user", "/users/new", - action="create", conditions=dict(method=["POST"])) - m.connect("users", "/users", - conditions=dict(method=["GET"])) - m.connect("formatted_users", "/users.{format}", - conditions=dict(method=["GET"])) - m.connect("new_user", "/users/new", - action="new", conditions=dict(method=["GET"])) - m.connect("update_user", "/users/{id}", - action="update", conditions=dict(method=["POST"])) - m.connect("delete_user", "/users/{id}/delete", - action="delete", conditions=dict(method=["POST"])) - m.connect("edit_user", "/users/{id}/edit", - action="edit", conditions=dict(method=["GET"])) - - # EXTRAS USER ROUTES - m.connect("edit_user_advanced", "/users/{id}/edit/advanced", - action="edit_advanced", conditions=dict(method=["GET"])) - - m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys", - action="edit_api_keys", conditions=dict(method=["GET"])) - m.connect("edit_user_api_keys_update", "/users/{id}/edit/api_keys", - action="add_api_key", conditions=dict(method=["POST"])) - m.connect("edit_user_api_keys_delete", "/users/{id}/edit/api_keys/delete", - action="delete_api_key", conditions=dict(method=["POST"])) - - m.connect("edit_user_ssh_keys", "/users/{id}/edit/ssh_keys", - action="edit_ssh_keys", conditions=dict(method=["GET"])) - m.connect("edit_user_ssh_keys", "/users/{id}/edit/ssh_keys", - action="ssh_keys_add", conditions=dict(method=["POST"])) - m.connect("edit_user_ssh_keys_delete", "/users/{id}/edit/ssh_keys/delete", - action="ssh_keys_delete", conditions=dict(method=["POST"])) - - m.connect("edit_user_perms", "/users/{id}/edit/permissions", - action="edit_perms", conditions=dict(method=["GET"])) - m.connect("edit_user_perms_update", "/users/{id}/edit/permissions", - action="update_perms", conditions=dict(method=["POST"])) - - m.connect("edit_user_emails", "/users/{id}/edit/emails", - action="edit_emails", conditions=dict(method=["GET"])) - m.connect("edit_user_emails_update", "/users/{id}/edit/emails", - action="add_email", conditions=dict(method=["POST"])) - m.connect("edit_user_emails_delete", "/users/{id}/edit/emails/delete", - action="delete_email", conditions=dict(method=["POST"])) - - m.connect("edit_user_ips", "/users/{id}/edit/ips", - action="edit_ips", conditions=dict(method=["GET"])) - m.connect("edit_user_ips_update", "/users/{id}/edit/ips", - action="add_ip", conditions=dict(method=["POST"])) - m.connect("edit_user_ips_delete", "/users/{id}/edit/ips/delete", - action="delete_ip", conditions=dict(method=["POST"])) - - # ADMIN USER GROUPS REST ROUTES - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/user_groups') as m: - m.connect("users_groups", "/user_groups", - action="create", conditions=dict(method=["POST"])) - m.connect("users_groups", "/user_groups", - conditions=dict(method=["GET"])) - m.connect("new_users_group", "/user_groups/new", - action="new", conditions=dict(method=["GET"])) - m.connect("update_users_group", "/user_groups/{id}", - action="update", conditions=dict(method=["POST"])) - m.connect("delete_users_group", "/user_groups/{id}/delete", - action="delete", conditions=dict(method=["POST"])) - m.connect("edit_users_group", "/user_groups/{id}/edit", - action="edit", conditions=dict(method=["GET"]), - function=check_user_group) - - # EXTRAS USER GROUP ROUTES - m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms", - action="edit_default_perms", conditions=dict(method=["GET"])) - m.connect("edit_user_group_default_perms_update", "/user_groups/{id}/edit/default_perms", - action="update_default_perms", conditions=dict(method=["POST"])) - - m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms", - action="edit_perms", conditions=dict(method=["GET"])) - m.connect("edit_user_group_perms_update", "/user_groups/{id}/edit/perms", - action="update_perms", conditions=dict(method=["POST"])) - m.connect("edit_user_group_perms_delete", "/user_groups/{id}/edit/perms/delete", - action="delete_perms", conditions=dict(method=["POST"])) - - m.connect("edit_user_group_advanced", "/user_groups/{id}/edit/advanced", - action="edit_advanced", conditions=dict(method=["GET"])) - - m.connect("edit_user_group_members", "/user_groups/{id}/edit/members", - action="edit_members", conditions=dict(method=["GET"])) - - # ADMIN PERMISSIONS ROUTES - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/permissions') as m: - m.connect("admin_permissions", "/permissions", - action="permission_globals", conditions=dict(method=["POST"])) - m.connect("admin_permissions", "/permissions", - action="permission_globals", conditions=dict(method=["GET"])) - - m.connect("admin_permissions_ips", "/permissions/ips", - action="permission_ips", conditions=dict(method=["GET"])) - - m.connect("admin_permissions_perms", "/permissions/perms", - action="permission_perms", conditions=dict(method=["GET"])) - - # ADMIN DEFAULTS ROUTES - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/defaults') as m: - m.connect('defaults', '/defaults') - m.connect('defaults_update', 'defaults/{id}/update', - action="update", conditions=dict(method=["POST"])) - - # ADMIN AUTH SETTINGS - rmap.connect('auth_settings', '%s/auth' % ADMIN_PREFIX, - controller='admin/auth_settings', action='auth_settings', - conditions=dict(method=["POST"])) - rmap.connect('auth_home', '%s/auth' % ADMIN_PREFIX, - controller='admin/auth_settings') - - # ADMIN SETTINGS ROUTES - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/settings') as m: - m.connect("admin_settings", "/settings", - action="settings_vcs", conditions=dict(method=["POST"])) - m.connect("admin_settings", "/settings", - action="settings_vcs", conditions=dict(method=["GET"])) - - m.connect("admin_settings_mapping", "/settings/mapping", - action="settings_mapping", conditions=dict(method=["POST"])) - m.connect("admin_settings_mapping", "/settings/mapping", - action="settings_mapping", conditions=dict(method=["GET"])) - - m.connect("admin_settings_global", "/settings/global", - action="settings_global", conditions=dict(method=["POST"])) - m.connect("admin_settings_global", "/settings/global", - action="settings_global", conditions=dict(method=["GET"])) - - m.connect("admin_settings_visual", "/settings/visual", - action="settings_visual", conditions=dict(method=["POST"])) - m.connect("admin_settings_visual", "/settings/visual", - action="settings_visual", conditions=dict(method=["GET"])) - - m.connect("admin_settings_email", "/settings/email", - action="settings_email", conditions=dict(method=["POST"])) - m.connect("admin_settings_email", "/settings/email", - action="settings_email", conditions=dict(method=["GET"])) - - m.connect("admin_settings_hooks", "/settings/hooks", - action="settings_hooks", conditions=dict(method=["POST"])) - m.connect("admin_settings_hooks_delete", "/settings/hooks/delete", - action="settings_hooks", conditions=dict(method=["POST"])) - m.connect("admin_settings_hooks", "/settings/hooks", - action="settings_hooks", conditions=dict(method=["GET"])) - - m.connect("admin_settings_search", "/settings/search", - action="settings_search", conditions=dict(method=["POST"])) - m.connect("admin_settings_search", "/settings/search", - action="settings_search", conditions=dict(method=["GET"])) - - m.connect("admin_settings_system", "/settings/system", - action="settings_system", conditions=dict(method=["POST"])) - m.connect("admin_settings_system", "/settings/system", - action="settings_system", conditions=dict(method=["GET"])) - - # ADMIN MY ACCOUNT - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/my_account') as m: - - m.connect("my_account", "/my_account", - action="my_account", conditions=dict(method=["GET"])) - m.connect("my_account", "/my_account", - action="my_account", conditions=dict(method=["POST"])) - - m.connect("my_account_password", "/my_account/password", - action="my_account_password", conditions=dict(method=["GET"])) - m.connect("my_account_password", "/my_account/password", - action="my_account_password", conditions=dict(method=["POST"])) - - m.connect("my_account_repos", "/my_account/repos", - action="my_account_repos", conditions=dict(method=["GET"])) - - m.connect("my_account_watched", "/my_account/watched", - action="my_account_watched", conditions=dict(method=["GET"])) - - m.connect("my_account_perms", "/my_account/perms", - action="my_account_perms", conditions=dict(method=["GET"])) - - m.connect("my_account_emails", "/my_account/emails", - action="my_account_emails", conditions=dict(method=["GET"])) - m.connect("my_account_emails", "/my_account/emails", - action="my_account_emails_add", conditions=dict(method=["POST"])) - m.connect("my_account_emails_delete", "/my_account/emails/delete", - action="my_account_emails_delete", conditions=dict(method=["POST"])) - - m.connect("my_account_api_keys", "/my_account/api_keys", - action="my_account_api_keys", conditions=dict(method=["GET"])) - m.connect("my_account_api_keys", "/my_account/api_keys", - action="my_account_api_keys_add", conditions=dict(method=["POST"])) - m.connect("my_account_api_keys_delete", "/my_account/api_keys/delete", - action="my_account_api_keys_delete", conditions=dict(method=["POST"])) - - m.connect("my_account_ssh_keys", "/my_account/ssh_keys", - action="my_account_ssh_keys", conditions=dict(method=["GET"])) - m.connect("my_account_ssh_keys", "/my_account/ssh_keys", - action="my_account_ssh_keys_add", conditions=dict(method=["POST"])) - m.connect("my_account_ssh_keys_delete", "/my_account/ssh_keys/delete", - action="my_account_ssh_keys_delete", conditions=dict(method=["POST"])) - - # ADMIN GIST - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/gists') as m: - m.connect("gists", "/gists", - action="create", conditions=dict(method=["POST"])) - m.connect("gists", "/gists", - conditions=dict(method=["GET"])) - m.connect("new_gist", "/gists/new", - action="new", conditions=dict(method=["GET"])) - - m.connect("gist_delete", "/gists/{gist_id}/delete", - action="delete", conditions=dict(method=["POST"])) - m.connect("edit_gist", "/gists/{gist_id}/edit", - action="edit", conditions=dict(method=["GET", "POST"])) - m.connect("edit_gist_check_revision", "/gists/{gist_id}/edit/check_revision", - action="check_revision", conditions=dict(method=["POST"])) - - m.connect("gist", "/gists/{gist_id}", - action="show", conditions=dict(method=["GET"])) - m.connect("gist_rev", "/gists/{gist_id}/{revision}", - revision="tip", - action="show", conditions=dict(method=["GET"])) - m.connect("formatted_gist", "/gists/{gist_id}/{revision}/{format}", - revision="tip", - action="show", conditions=dict(method=["GET"])) - m.connect("formatted_gist_file", "/gists/{gist_id}/{revision}/{format}/{f_path:.*}", - revision='tip', - action="show", conditions=dict(method=["GET"])) - - # ADMIN MAIN PAGES - with rmap.submapper(path_prefix=ADMIN_PREFIX, - controller='admin/admin') as m: - m.connect('admin_home', '') - m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9. _-]*}', - action='add_repo') - #========================================================================== - # API V2 - #========================================================================== - with rmap.submapper(path_prefix=ADMIN_PREFIX, controller='api/api', - action='_dispatch') as m: - m.connect('api', '/api') - - # USER JOURNAL - rmap.connect('journal', '%s/journal' % ADMIN_PREFIX, - controller='journal') - rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX, - controller='journal', action='journal_rss') - rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX, - controller='journal', action='journal_atom') - - rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX, - controller='journal', action="public_journal") - - rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX, - controller='journal', action="public_journal_rss") - - rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX, - controller='journal', action="public_journal_rss") - - rmap.connect('public_journal_atom', - '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal', - action="public_journal_atom") - - rmap.connect('public_journal_atom_old', - '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal', - action="public_journal_atom") - - rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX, - controller='journal', action='toggle_following', - conditions=dict(method=["POST"])) - - # SEARCH - rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',) - rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX, - controller='search', - conditions=dict(function=check_repo)) - rmap.connect('search_repo', '/{repo_name:.*?}/search', - controller='search', - conditions=dict(function=check_repo), - ) - - # LOGIN/LOGOUT/REGISTER/SIGN IN - rmap.connect('session_csrf_secret_token', '%s/session_csrf_secret_token' % ADMIN_PREFIX, controller='login', action='session_csrf_secret_token') - rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login') - rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login', - action='logout') - - rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login', - action='register') - - rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX, - controller='login', action='password_reset') - - rmap.connect('reset_password_confirmation', - '%s/password_reset_confirmation' % ADMIN_PREFIX, - controller='login', action='password_reset_confirmation') - - # FEEDS - rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss', - controller='feed', action='rss', - conditions=dict(function=check_repo)) - - rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom', - controller='feed', action='atom', - conditions=dict(function=check_repo)) - - #========================================================================== - # REPOSITORY ROUTES - #========================================================================== - rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating', - controller='admin/repos', action='repo_creating') - rmap.connect('repo_check_home', '/{repo_name:.*?}/repo_check_creating', - controller='admin/repos', action='repo_check') - - rmap.connect('summary_home', '/{repo_name:.*?}', - controller='summary', - conditions=dict(function=check_repo)) - - # must be here for proper group/repo catching - rmap.connect('repos_group_home', '/{group_name:.*}', - controller='admin/repo_groups', action="show_by_name", - conditions=dict(function=check_group)) - rmap.connect('repo_stats_home', '/{repo_name:.*?}/statistics', - controller='summary', action='statistics', - conditions=dict(function=check_repo)) - - rmap.connect('repo_size', '/{repo_name:.*?}/repo_size', - controller='summary', action='repo_size', - conditions=dict(function=check_repo)) - - rmap.connect('repo_refs_data', '/{repo_name:.*?}/refs-data', - controller='home', action='repo_refs_data') - - rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision:.*}', - controller='changeset', revision='tip', - conditions=dict(function=check_repo)) - rmap.connect('changeset_children', '/{repo_name:.*?}/changeset_children/{revision}', - controller='changeset', revision='tip', action="changeset_children", - conditions=dict(function=check_repo)) - rmap.connect('changeset_parents', '/{repo_name:.*?}/changeset_parents/{revision}', - controller='changeset', revision='tip', action="changeset_parents", - conditions=dict(function=check_repo)) - - # repo edit options - rmap.connect("edit_repo", "/{repo_name:.*?}/settings", - controller='admin/repos', action="edit", - conditions=dict(method=["GET"], function=check_repo)) - - rmap.connect("edit_repo_perms", "/{repo_name:.*?}/settings/permissions", - controller='admin/repos', action="edit_permissions", - conditions=dict(method=["GET"], function=check_repo)) - rmap.connect("edit_repo_perms_update", "/{repo_name:.*?}/settings/permissions", - controller='admin/repos', action="edit_permissions_update", - conditions=dict(method=["POST"], function=check_repo)) - rmap.connect("edit_repo_perms_revoke", "/{repo_name:.*?}/settings/permissions/delete", - controller='admin/repos', action="edit_permissions_revoke", - conditions=dict(method=["POST"], function=check_repo)) - - rmap.connect("edit_repo_fields", "/{repo_name:.*?}/settings/fields", - controller='admin/repos', action="edit_fields", - conditions=dict(method=["GET"], function=check_repo)) - rmap.connect('create_repo_fields', "/{repo_name:.*?}/settings/fields/new", - controller='admin/repos', action="create_repo_field", - conditions=dict(method=["POST"], function=check_repo)) - rmap.connect('delete_repo_fields', "/{repo_name:.*?}/settings/fields/{field_id}/delete", - controller='admin/repos', action="delete_repo_field", - conditions=dict(method=["POST"], function=check_repo)) - - rmap.connect("edit_repo_advanced", "/{repo_name:.*?}/settings/advanced", - controller='admin/repos', action="edit_advanced", - conditions=dict(method=["GET"], function=check_repo)) - - rmap.connect("edit_repo_advanced_journal", "/{repo_name:.*?}/settings/advanced/journal", - controller='admin/repos', action="edit_advanced_journal", - conditions=dict(method=["POST"], function=check_repo)) - - rmap.connect("edit_repo_advanced_fork", "/{repo_name:.*?}/settings/advanced/fork", - controller='admin/repos', action="edit_advanced_fork", - conditions=dict(method=["POST"], function=check_repo)) - - rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote", - controller='admin/repos', action="edit_remote", - conditions=dict(method=["GET"], function=check_repo)) - rmap.connect("edit_repo_remote_update", "/{repo_name:.*?}/settings/remote", - controller='admin/repos', action="edit_remote", - conditions=dict(method=["POST"], function=check_repo)) - - rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics", - controller='admin/repos', action="edit_statistics", - conditions=dict(method=["GET"], function=check_repo)) - rmap.connect("edit_repo_statistics_update", "/{repo_name:.*?}/settings/statistics", - controller='admin/repos', action="edit_statistics", - conditions=dict(method=["POST"], function=check_repo)) - - # still working url for backward compat. - rmap.connect('raw_changeset_home_depraced', - '/{repo_name:.*?}/raw-changeset/{revision}', - controller='changeset', action='changeset_raw', - revision='tip', conditions=dict(function=check_repo)) - - ## new URLs - rmap.connect('changeset_raw_home', - '/{repo_name:.*?}/changeset-diff/{revision}', - controller='changeset', action='changeset_raw', - revision='tip', conditions=dict(function=check_repo)) - - rmap.connect('changeset_patch_home', - '/{repo_name:.*?}/changeset-patch/{revision}', - controller='changeset', action='changeset_patch', - revision='tip', conditions=dict(function=check_repo)) - - rmap.connect('changeset_download_home', - '/{repo_name:.*?}/changeset-download/{revision}', - controller='changeset', action='changeset_download', - revision='tip', conditions=dict(function=check_repo)) - - rmap.connect('changeset_comment', - '/{repo_name:.*?}/changeset-comment/{revision}', - controller='changeset', revision='tip', action='comment', - conditions=dict(function=check_repo)) - - rmap.connect('changeset_comment_delete', - '/{repo_name:.*?}/changeset-comment/{comment_id}/delete', - controller='changeset', action='delete_comment', - conditions=dict(function=check_repo, method=["POST"])) - - rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}', - controller='changeset', action='changeset_info') - - rmap.connect('compare_home', - '/{repo_name:.*?}/compare', - controller='compare', - conditions=dict(function=check_repo)) - - rmap.connect('compare_url', - '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref_name:.*?}...{other_ref_type}@{other_ref_name:.*?}', - controller='compare', action='compare', - conditions=dict(function=check_repo), - requirements=dict( - org_ref_type='(branch|book|tag|rev|__other_ref_type__)', - other_ref_type='(branch|book|tag|rev|__org_ref_type__)') - ) - - rmap.connect('pullrequest_home', - '/{repo_name:.*?}/pull-request/new', controller='pullrequests', - conditions=dict(function=check_repo, - method=["GET"])) - - rmap.connect('pullrequest_repo_info', - '/{repo_name:.*?}/pull-request-repo-info', - controller='pullrequests', action='repo_info', - conditions=dict(function=check_repo, method=["GET"])) - - rmap.connect('pullrequest', - '/{repo_name:.*?}/pull-request/new', controller='pullrequests', - action='create', conditions=dict(function=check_repo, - method=["POST"])) - - rmap.connect('pullrequest_show', - '/{repo_name:.*?}/pull-request/{pull_request_id:\\d+}{extra:(/.*)?}', extra='', - controller='pullrequests', - action='show', conditions=dict(function=check_repo, - method=["GET"])) - rmap.connect('pullrequest_post', - '/{repo_name:.*?}/pull-request/{pull_request_id}', - controller='pullrequests', - action='post', conditions=dict(function=check_repo, - method=["POST"])) - rmap.connect('pullrequest_delete', - '/{repo_name:.*?}/pull-request/{pull_request_id}/delete', - controller='pullrequests', - action='delete', conditions=dict(function=check_repo, - method=["POST"])) - - rmap.connect('pullrequest_show_all', - '/{repo_name:.*?}/pull-request', - controller='pullrequests', - action='show_all', conditions=dict(function=check_repo, - method=["GET"])) - - rmap.connect('my_pullrequests', - '/my_pullrequests', - controller='pullrequests', - action='show_my', conditions=dict(method=["GET"])) - - rmap.connect('pullrequest_comment', - '/{repo_name:.*?}/pull-request-comment/{pull_request_id}', - controller='pullrequests', - action='comment', conditions=dict(function=check_repo, - method=["POST"])) - - rmap.connect('pullrequest_comment_delete', - '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete', - controller='pullrequests', action='delete_comment', - conditions=dict(function=check_repo, method=["POST"])) - - rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary', - controller='summary', conditions=dict(function=check_repo)) - - rmap.connect('changelog_home', '/{repo_name:.*?}/changelog', - controller='changelog', conditions=dict(function=check_repo)) - - rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}', - controller='changelog', - conditions=dict(function=check_repo)) - - rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}', - controller='changelog', action='changelog_details', - conditions=dict(function=check_repo)) - - rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}', - controller='files', revision='tip', f_path='', - conditions=dict(function=check_repo)) - - rmap.connect('files_home_nopath', '/{repo_name:.*?}/files/{revision}', - controller='files', revision='tip', f_path='', - conditions=dict(function=check_repo)) - - rmap.connect('files_history_home', - '/{repo_name:.*?}/history/{revision}/{f_path:.*}', - controller='files', action='history', revision='tip', f_path='', - conditions=dict(function=check_repo)) - - rmap.connect('files_authors_home', - '/{repo_name:.*?}/authors/{revision}/{f_path:.*}', - controller='files', action='authors', revision='tip', f_path='', - conditions=dict(function=check_repo)) - - rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}', - controller='files', action='diff', revision='tip', f_path='', - conditions=dict(function=check_repo)) - - rmap.connect('files_diff_2way_home', '/{repo_name:.*?}/diff-2way/{f_path:.+}', - controller='files', action='diff_2way', revision='tip', f_path='', - conditions=dict(function=check_repo)) - - rmap.connect('files_rawfile_home', - '/{repo_name:.*?}/rawfile/{revision}/{f_path:.*}', - controller='files', action='rawfile', revision='tip', - f_path='', conditions=dict(function=check_repo)) - - rmap.connect('files_raw_home', - '/{repo_name:.*?}/raw/{revision}/{f_path:.*}', - controller='files', action='raw', revision='tip', f_path='', - conditions=dict(function=check_repo)) - - rmap.connect('files_annotate_home', - '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}', - controller='files', revision='tip', - f_path='', annotate='1', conditions=dict(function=check_repo)) - - rmap.connect('files_edit_home', - '/{repo_name:.*?}/edit/{revision}/{f_path:.*}', - controller='files', action='edit', revision='tip', - f_path='', conditions=dict(function=check_repo)) - - rmap.connect('files_add_home', - '/{repo_name:.*?}/add/{revision}/{f_path:.*}', - controller='files', action='add', revision='tip', - f_path='', conditions=dict(function=check_repo)) - - rmap.connect('files_delete_home', - '/{repo_name:.*?}/delete/{revision}/{f_path:.*}', - controller='files', action='delete', revision='tip', - f_path='', conditions=dict(function=check_repo)) - - rmap.connect('files_archive_home', '/{repo_name:.*?}/archive/{fname}', - controller='files', action='archivefile', - conditions=dict(function=check_repo)) - - rmap.connect('files_nodelist_home', - '/{repo_name:.*?}/nodelist/{revision}/{f_path:.*}', - controller='files', action='nodelist', - conditions=dict(function=check_repo)) - - rmap.connect('repo_fork_create_home', '/{repo_name:.*?}/fork', - controller='forks', action='fork_create', - conditions=dict(function=check_repo, method=["POST"])) - - rmap.connect('repo_fork_home', '/{repo_name:.*?}/fork', - controller='forks', action='fork', - conditions=dict(function=check_repo)) - - rmap.connect('repo_forks_home', '/{repo_name:.*?}/forks', - controller='forks', action='forks', - conditions=dict(function=check_repo)) - - rmap.connect('repo_followers_home', '/{repo_name:.*?}/followers', - controller='followers', action='followers', - conditions=dict(function=check_repo)) - - return rmap - - -class UrlGenerator(object): - """Emulate pylons.url in providing a wrapper around routes.url - - This code was added during migration from Pylons to Turbogears2. Pylons - already provided a wrapper like this, but Turbogears2 does not. - - When the routing of Kallithea is changed to use less Routes and more - Turbogears2-style routing, this class may disappear or change. - - url() (the __call__ method) returns the URL based on a route name and - arguments. - url.current() returns the URL of the current page with arguments applied. - - Refer to documentation of Routes for details: - https://routes.readthedocs.io/en/latest/generating.html#generation - """ - def __call__(self, *args, **kwargs): - return request.environ['routes.url'](*args, **kwargs) - - def current(self, *args, **kwargs): - return request.environ['routes.url'].current(*args, **kwargs) - - -url = UrlGenerator() diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/admin.py --- a/kallithea/controllers/admin/admin.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/admin.py Fri Oct 30 23:44:18 2020 +0100 @@ -41,7 +41,7 @@ from kallithea.lib.indexers import JOURNAL_SCHEMA from kallithea.lib.page import Page from kallithea.lib.utils2 import remove_prefix, remove_suffix, safe_int -from kallithea.model.db import UserLog +from kallithea.model import db log = logging.getLogger(__name__) @@ -77,15 +77,15 @@ def get_filterion(field, val, term): if field == 'repository': - field = getattr(UserLog, 'repository_name') + field = getattr(db.UserLog, 'repository_name') elif field == 'ip': - field = getattr(UserLog, 'user_ip') + field = getattr(db.UserLog, 'user_ip') elif field == 'date': - field = getattr(UserLog, 'action_date') + field = getattr(db.UserLog, 'action_date') elif field == 'username': - field = getattr(UserLog, 'username') + field = getattr(db.UserLog, 'username') else: - field = getattr(UserLog, field) + field = getattr(db.UserLog, field) log.debug('filter field: %s val=>%s', field, val) # sql filtering @@ -126,15 +126,15 @@ @HasPermissionAnyDecorator('hg.admin') def index(self): - users_log = UserLog.query() \ - .options(joinedload(UserLog.user)) \ - .options(joinedload(UserLog.repository)) + users_log = db.UserLog.query() \ + .options(joinedload(db.UserLog.user)) \ + .options(joinedload(db.UserLog.repository)) # FILTERING c.search_term = request.GET.get('filter') users_log = _journal_filter(users_log, c.search_term) - users_log = users_log.order_by(UserLog.action_date.desc()) + users_log = users_log.order_by(db.UserLog.action_date.desc()) p = safe_int(request.GET.get('page'), 1) diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/auth_settings.py --- a/kallithea/controllers/admin/auth_settings.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/auth_settings.py Fri Oct 30 23:44:18 2020 +0100 @@ -32,14 +32,13 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPFound -from kallithea.config.routing import url from kallithea.lib import auth_modules from kallithea.lib import helpers as h from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired from kallithea.lib.base import BaseController, render -from kallithea.model.db import Setting +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import AuthSettingsForm -from kallithea.model.meta import Session log = logging.getLogger(__name__) @@ -77,7 +76,7 @@ if "default" in v: c.defaults[fullname] = v["default"] # Current values will be the default on the form, if there are any - setting = Setting.get_by_name(fullname) + setting = db.Setting.get_by_name(fullname) if setting is not None: c.defaults[fullname] = setting.app_settings_value if defaults: @@ -131,8 +130,8 @@ # we want to store it comma separated inside our settings v = ','.join(v) log.debug("%s = %s", k, str(v)) - setting = Setting.create_or_update(k, v) - Session().commit() + setting = db.Setting.create_or_update(k, v) + meta.Session().commit() h.flash(_('Auth settings updated successfully'), category='success') except formencode.Invalid as errors: diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/defaults.py --- a/kallithea/controllers/admin/defaults.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/defaults.py Fri Oct 30 23:44:18 2020 +0100 @@ -34,13 +34,12 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPFound -from kallithea.config.routing import url from kallithea.lib import helpers as h from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired from kallithea.lib.base import BaseController, render -from kallithea.model.db import Setting +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import DefaultsForm -from kallithea.model.meta import Session log = logging.getLogger(__name__) @@ -54,7 +53,7 @@ super(DefaultsController, self)._before(*args, **kwargs) def index(self, format='html'): - defaults = Setting.get_default_repo_settings() + defaults = db.Setting.get_default_repo_settings() return htmlfill.render( render('admin/defaults/defaults.html'), @@ -69,8 +68,8 @@ try: form_result = _form.to_python(dict(request.POST)) for k, v in form_result.items(): - setting = Setting.create_or_update(k, v) - Session().commit() + setting = db.Setting.create_or_update(k, v) + meta.Session().commit() h.flash(_('Default settings updated successfully'), category='success') diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/gists.py --- a/kallithea/controllers/admin/gists.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/gists.py Fri Oct 30 23:44:18 2020 +0100 @@ -35,24 +35,22 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPForbidden, HTTPFound, HTTPNotFound -from kallithea.config.routing import url from kallithea.lib import helpers as h from kallithea.lib.auth import LoginRequired from kallithea.lib.base import BaseController, jsonify, render from kallithea.lib.page import Page from kallithea.lib.utils2 import safe_int, safe_str, time_to_datetime from kallithea.lib.vcs.exceptions import NodeNotChangedError, VCSError -from kallithea.model.db import Gist +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import GistForm from kallithea.model.gist import GistModel -from kallithea.model.meta import Session log = logging.getLogger(__name__) class GistsController(BaseController): - """REST Controller styled on the Atom Publishing Protocol""" def __load_defaults(self, extra_values=None): c.lifetime_values = [ @@ -77,28 +75,28 @@ elif c.show_private: url_params['private'] = 1 - gists = Gist().query() \ + gists = db.Gist().query() \ .filter_by(is_expired=False) \ - .order_by(Gist.created_on.desc()) + .order_by(db.Gist.created_on.desc()) # MY private if c.show_private and not c.show_public: - gists = gists.filter(Gist.gist_type == Gist.GIST_PRIVATE) \ - .filter(Gist.owner_id == request.authuser.user_id) + gists = gists.filter(db.Gist.gist_type == db.Gist.GIST_PRIVATE) \ + .filter(db.Gist.owner_id == request.authuser.user_id) # MY public elif c.show_public and not c.show_private: - gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC) \ - .filter(Gist.owner_id == request.authuser.user_id) + gists = gists.filter(db.Gist.gist_type == db.Gist.GIST_PUBLIC) \ + .filter(db.Gist.owner_id == request.authuser.user_id) # MY public+private elif c.show_private and c.show_public: - gists = gists.filter(or_(Gist.gist_type == Gist.GIST_PUBLIC, - Gist.gist_type == Gist.GIST_PRIVATE)) \ - .filter(Gist.owner_id == request.authuser.user_id) + gists = gists.filter(or_(db.Gist.gist_type == db.Gist.GIST_PUBLIC, + db.Gist.gist_type == db.Gist.GIST_PRIVATE)) \ + .filter(db.Gist.owner_id == request.authuser.user_id) # default show ALL public gists if not c.show_public and not c.show_private: - gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC) + gists = gists.filter(db.Gist.gist_type == db.Gist.GIST_PUBLIC) c.gists = gists p = safe_int(request.GET.get('page'), 1) @@ -113,7 +111,7 @@ try: form_result = gist_form.to_python(dict(request.POST)) # TODO: multiple files support, from the form - filename = form_result['filename'] or Gist.DEFAULT_FILENAME + filename = form_result['filename'] or db.Gist.DEFAULT_FILENAME nodes = { filename: { 'content': form_result['content'], @@ -121,7 +119,7 @@ } } _public = form_result['public'] - gist_type = Gist.GIST_PUBLIC if _public else Gist.GIST_PRIVATE + gist_type = db.Gist.GIST_PUBLIC if _public else db.Gist.GIST_PRIVATE gist = GistModel().create( description=form_result['description'], owner=request.authuser.user_id, @@ -130,7 +128,7 @@ gist_type=gist_type, lifetime=form_result['lifetime'] ) - Session().commit() + meta.Session().commit() new_gist_id = gist.gist_access_id except formencode.Invalid as errors: defaults = errors.value @@ -160,7 +158,7 @@ owner = gist.owner_id == request.authuser.user_id if h.HasPermissionAny('hg.admin')() or owner: GistModel().delete(gist) - Session().commit() + meta.Session().commit() h.flash(_('Deleted gist %s') % gist.gist_access_id, category='success') else: raise HTTPForbidden() @@ -169,7 +167,7 @@ @LoginRequired(allow_default_user=True) def show(self, gist_id, revision='tip', format='html', f_path=None): - c.gist = Gist.get_or_404(gist_id) + c.gist = db.Gist.get_or_404(gist_id) if c.gist.is_expired: log.error('Gist expired at %s', @@ -192,7 +190,7 @@ @LoginRequired() def edit(self, gist_id, format='html'): - c.gist = Gist.get_or_404(gist_id) + c.gist = db.Gist.get_or_404(gist_id) if c.gist.is_expired: log.error('Gist expired at %s', @@ -233,12 +231,12 @@ lifetime=rpost['lifetime'] ) - Session().commit() + meta.Session().commit() h.flash(_('Successfully updated gist content'), category='success') except NodeNotChangedError: # raised if nothing was changed in repo itself. We anyway then # store only DB stuff for gist - Session().commit() + meta.Session().commit() h.flash(_('Successfully updated gist data'), category='success') except Exception: log.error(traceback.format_exc()) @@ -252,7 +250,7 @@ @LoginRequired() @jsonify def check_revision(self, gist_id): - c.gist = Gist.get_or_404(gist_id) + c.gist = db.Gist.get_or_404(gist_id) last_rev = c.gist.scm_instance.get_changeset() success = True revision = request.POST.get('revision') diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/my_account.py --- a/kallithea/controllers/admin/my_account.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/my_account.py Fri Oct 30 23:44:18 2020 +0100 @@ -35,16 +35,15 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPFound -from kallithea.config.routing import url from kallithea.lib import auth_modules from kallithea.lib import helpers as h from kallithea.lib.auth import AuthUser, LoginRequired from kallithea.lib.base import BaseController, IfSshEnabled, render from kallithea.lib.utils2 import generate_api_key, safe_int +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.api_key import ApiKeyModel -from kallithea.model.db import Repository, User, UserEmailMap, UserFollowing from kallithea.model.forms import PasswordChangeForm, UserForm -from kallithea.model.meta import Session from kallithea.model.repo import RepoModel from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException from kallithea.model.user import UserModel @@ -54,18 +53,13 @@ class MyAccountController(BaseController): - """REST Controller styled on the Atom Publishing Protocol""" - # To properly map this controller, ensure your config/routing.py - # file has a resource setup: - # map.resource('setting', 'settings', controller='admin/settings', - # path_prefix='/admin', name_prefix='admin_') @LoginRequired() def _before(self, *args, **kwargs): super(MyAccountController, self)._before(*args, **kwargs) def __load_data(self): - c.user = User.get(request.authuser.user_id) + c.user = db.User.get(request.authuser.user_id) if c.user.is_default_user: h.flash(_("You can't edit this user since it's" " crucial for entire application"), category='warning') @@ -74,14 +68,14 @@ def _load_my_repos_data(self, watched=False): if watched: admin = False - repos_list = Session().query(Repository) \ - .join(UserFollowing) \ - .filter(UserFollowing.user_id == + repos_list = meta.Session().query(db.Repository) \ + .join(db.UserFollowing) \ + .filter(db.UserFollowing.user_id == request.authuser.user_id).all() else: admin = True - repos_list = Session().query(Repository) \ - .filter(Repository.owner_id == + repos_list = meta.Session().query(db.Repository) \ + .filter(db.Repository.owner_id == request.authuser.user_id).all() return RepoModel().get_repos_as_dict(repos_list, admin=admin) @@ -91,7 +85,7 @@ self.__load_data() c.perm_user = AuthUser(user_id=request.authuser.user_id) managed_fields = auth_modules.get_managed_fields(c.user) - def_user_perms = AuthUser(dbuser=User.get_default_user()).permissions['global'] + def_user_perms = AuthUser(dbuser=db.User.get_default_user()).global_permissions if 'hg.register.none' in def_user_perms: managed_fields.extend(['username', 'firstname', 'lastname', 'email']) @@ -118,7 +112,7 @@ skip_attrs=skip_attrs) h.flash(_('Your account was updated successfully'), category='success') - Session().commit() + meta.Session().commit() update = True except formencode.Invalid as errors: @@ -153,7 +147,7 @@ try: form_result = _form.to_python(request.POST) UserModel().update(request.authuser.user_id, form_result) - Session().commit() + meta.Session().commit() h.flash(_("Successfully updated password"), category='success') except formencode.Invalid as errors: return htmlfill.render( @@ -196,8 +190,8 @@ c.active = 'emails' self.__load_data() - c.user_email_map = UserEmailMap.query() \ - .filter(UserEmailMap.user == c.user).all() + c.user_email_map = db.UserEmailMap.query() \ + .filter(db.UserEmailMap.user == c.user).all() return render('admin/my_account/my_account.html') def my_account_emails_add(self): @@ -205,7 +199,7 @@ try: UserModel().add_extra_email(request.authuser.user_id, email) - Session().commit() + meta.Session().commit() h.flash(_("Added email %s to user") % email, category='success') except formencode.Invalid as error: msg = error.error_dict['email'] @@ -220,7 +214,7 @@ email_id = request.POST.get('del_email_id') user_model = UserModel() user_model.delete_extra_email(request.authuser.user_id, email_id) - Session().commit() + meta.Session().commit() h.flash(_("Removed email from user"), category='success') raise HTTPFound(location=url('my_account_emails')) @@ -244,20 +238,20 @@ lifetime = safe_int(request.POST.get('lifetime'), -1) description = request.POST.get('description') ApiKeyModel().create(request.authuser.user_id, description, lifetime) - Session().commit() + meta.Session().commit() h.flash(_("API key successfully created"), category='success') raise HTTPFound(location=url('my_account_api_keys')) def my_account_api_keys_delete(self): api_key = request.POST.get('del_api_key') if request.POST.get('del_api_key_builtin'): - user = User.get(request.authuser.user_id) + user = db.User.get(request.authuser.user_id) user.api_key = generate_api_key() - Session().commit() + meta.Session().commit() h.flash(_("API key successfully reset"), category='success') elif api_key: ApiKeyModel().delete(api_key, request.authuser.user_id) - Session().commit() + meta.Session().commit() h.flash(_("API key successfully deleted"), category='success') raise HTTPFound(location=url('my_account_api_keys')) @@ -276,7 +270,7 @@ try: new_ssh_key = SshKeyModel().create(request.authuser.user_id, description, public_key) - Session().commit() + meta.Session().commit() SshKeyModel().write_authorized_keys() h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success') except SshKeyModelException as e: @@ -288,7 +282,7 @@ fingerprint = request.POST.get('del_public_key_fingerprint') try: SshKeyModel().delete(fingerprint, request.authuser.user_id) - Session().commit() + meta.Session().commit() SshKeyModel().write_authorized_keys() h.flash(_("SSH key successfully deleted"), category='success') except SshKeyModelException as e: diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/permissions.py --- a/kallithea/controllers/admin/permissions.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/permissions.py Fri Oct 30 23:44:18 2020 +0100 @@ -36,13 +36,12 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPFound -from kallithea.config.routing import url from kallithea.lib import helpers as h from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired from kallithea.lib.base import BaseController, render -from kallithea.model.db import User, UserIpMap +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import DefaultPermissionsForm -from kallithea.model.meta import Session from kallithea.model.permission import PermissionModel @@ -50,10 +49,6 @@ class PermissionsController(BaseController): - """REST Controller styled on the Atom Publishing Protocol""" - # To properly map this controller, ensure your config/routing.py - # file has a resource setup: - # map.resource('permission', 'permissions') @LoginRequired() @HasPermissionAnyDecorator('hg.admin') @@ -61,18 +56,22 @@ super(PermissionsController, self)._before(*args, **kwargs) def __load_data(self): + # Permissions for the Default user on new repositories c.repo_perms_choices = [('repository.none', _('None'),), ('repository.read', _('Read'),), ('repository.write', _('Write'),), ('repository.admin', _('Admin'),)] + # Permissions for the Default user on new repository groups c.group_perms_choices = [('group.none', _('None'),), ('group.read', _('Read'),), ('group.write', _('Write'),), ('group.admin', _('Admin'),)] + # Permissions for the Default user on new user groups c.user_group_perms_choices = [('usergroup.none', _('None'),), ('usergroup.read', _('Read'),), ('usergroup.write', _('Write'),), ('usergroup.admin', _('Admin'),)] + # Registration - allow new Users to create an account c.register_choices = [ ('hg.register.none', _('Disabled')), @@ -80,26 +79,18 @@ _('Allowed with manual account activation')), ('hg.register.auto_activate', _('Allowed with automatic account activation')), ] - + # External auth account activation c.extern_activate_choices = [ ('hg.extern_activate.manual', _('Manual activation of external account')), ('hg.extern_activate.auto', _('Automatic activation of external account')), ] - + # Top level repository creation c.repo_create_choices = [('hg.create.none', _('Disabled')), ('hg.create.repository', _('Enabled'))] - - c.repo_create_on_write_choices = [ - ('hg.create.write_on_repogroup.true', _('Enabled')), - ('hg.create.write_on_repogroup.false', _('Disabled')), - ] - + # User group creation c.user_group_create_choices = [('hg.usergroup.create.false', _('Disabled')), ('hg.usergroup.create.true', _('Enabled'))] - - c.repo_group_create_choices = [('hg.repogroup.create.false', _('Disabled')), - ('hg.repogroup.create.true', _('Enabled'))] - + # Repository forking: c.fork_choices = [('hg.fork.none', _('Disabled')), ('hg.fork.repository', _('Enabled'))] @@ -112,8 +103,6 @@ [x[0] for x in c.group_perms_choices], [x[0] for x in c.user_group_perms_choices], [x[0] for x in c.repo_create_choices], - [x[0] for x in c.repo_create_on_write_choices], - [x[0] for x in c.repo_group_create_choices], [x[0] for x in c.user_group_create_choices], [x[0] for x in c.fork_choices], [x[0] for x in c.register_choices], @@ -123,7 +112,7 @@ form_result = _form.to_python(dict(request.POST)) form_result.update({'perm_user_name': 'default'}) PermissionModel().update(form_result) - Session().commit() + meta.Session().commit() h.flash(_('Global permissions updated successfully'), category='success') @@ -144,7 +133,7 @@ raise HTTPFound(location=url('admin_permissions')) - c.user = User.get_default_user() + c.user = db.User.get_default_user() defaults = {'anonymous': c.user.active} for p in c.user.user_perms: @@ -157,15 +146,9 @@ if p.permission.permission_name.startswith('usergroup.'): defaults['default_user_group_perm'] = p.permission.permission_name - if p.permission.permission_name.startswith('hg.create.write_on_repogroup.'): - defaults['create_on_write'] = p.permission.permission_name - elif p.permission.permission_name.startswith('hg.create.'): defaults['default_repo_create'] = p.permission.permission_name - if p.permission.permission_name.startswith('hg.repogroup.'): - defaults['default_repo_group_create'] = p.permission.permission_name - if p.permission.permission_name.startswith('hg.usergroup.'): defaults['default_user_group_create'] = p.permission.permission_name @@ -186,14 +169,14 @@ def permission_ips(self): c.active = 'ips' - c.user = User.get_default_user() - c.user_ip_map = UserIpMap.query() \ - .filter(UserIpMap.user == c.user).all() + c.user = db.User.get_default_user() + c.user_ip_map = db.UserIpMap.query() \ + .filter(db.UserIpMap.user == c.user).all() return render('admin/permissions/permissions.html') def permission_perms(self): c.active = 'perms' - c.user = User.get_default_user() + c.user = db.User.get_default_user() c.perm_user = AuthUser(dbuser=c.user) return render('admin/permissions/permissions.html') diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/repo_groups.py --- a/kallithea/controllers/admin/repo_groups.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/repo_groups.py Fri Oct 30 23:44:18 2020 +0100 @@ -36,14 +36,13 @@ from tg.i18n import ungettext from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound -from kallithea.config.routing import url from kallithea.lib import helpers as h from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoGroupPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseController, render from kallithea.lib.utils2 import safe_int -from kallithea.model.db import RepoGroup, Repository +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import RepoGroupForm, RepoGroupPermsForm -from kallithea.model.meta import Session from kallithea.model.repo import RepoModel from kallithea.model.repo_group import RepoGroupModel from kallithea.model.scm import AvailableRepoGroupChoices, RepoGroupList @@ -63,7 +62,7 @@ exclude is used for not moving group to itself TODO: also exclude descendants Note: only admin can create top level groups """ - repo_groups = AvailableRepoGroupChoices([], 'admin', extras) + repo_groups = AvailableRepoGroupChoices('admin', extras) exclude_group_ids = set(rg.group_id for rg in exclude) c.repo_groups = [rg for rg in repo_groups if rg[0] not in exclude_group_ids] @@ -74,7 +73,7 @@ :param group_id: """ - repo_group = RepoGroup.get_or_404(group_id) + repo_group = db.RepoGroup.get_or_404(group_id) data = repo_group.get_dict() data['group_name'] = repo_group.name @@ -98,7 +97,7 @@ return False def index(self, format='html'): - _list = RepoGroup.query(sorted=True).all() + _list = db.RepoGroup.query(sorted=True).all() group_iter = RepoGroupList(_list, perm_level='admin') repo_groups_data = [] _tmpl_lookup = app_globals.mako_lookup @@ -150,7 +149,7 @@ owner=request.authuser.user_id, # TODO: make editable copy_permissions=form_result['group_copy_permissions'] ) - Session().commit() + meta.Session().commit() # TODO: in future action_logger(, '', '', '') except formencode.Invalid as errors: return htmlfill.render( @@ -181,7 +180,7 @@ # we pass in parent group into creation form, thus we know # what would be the group, we can check perms here ! group_id = safe_int(request.GET.get('parent_group')) - group = RepoGroup.get(group_id) if group_id else None + group = db.RepoGroup.get(group_id) if group_id else None group_name = group.group_name if group else None if HasRepoGroupPermissionLevel('admin')(group_name, 'group create'): pass @@ -193,7 +192,7 @@ @HasRepoGroupPermissionLevelDecorator('admin') def update(self, group_name): - c.repo_group = RepoGroup.guess_instance(group_name) + c.repo_group = db.RepoGroup.guess_instance(group_name) self.__load_defaults(extras=[c.repo_group.parent_group], exclude=[c.repo_group]) @@ -215,7 +214,7 @@ form_result = repo_group_form.to_python(dict(request.POST)) new_gr = RepoGroupModel().update(group_name, form_result) - Session().commit() + meta.Session().commit() h.flash(_('Updated repository group %s') % form_result['group_name'], category='success') # we now have new name ! @@ -239,7 +238,7 @@ @HasRepoGroupPermissionLevelDecorator('admin') def delete(self, group_name): - gr = c.repo_group = RepoGroup.guess_instance(group_name) + gr = c.repo_group = db.RepoGroup.guess_instance(group_name) repos = gr.repositories.all() if repos: h.flash(_('This group contains %s repositories and cannot be ' @@ -254,7 +253,7 @@ try: RepoGroupModel().delete(group_name) - Session().commit() + meta.Session().commit() h.flash(_('Removed repository group %s') % group_name, category='success') # TODO: in future action_logger(, '', '', '') @@ -273,7 +272,7 @@ the group by id view instead """ group_name = group_name.rstrip('/') - id_ = RepoGroup.get_by_group_name(group_name) + id_ = db.RepoGroup.get_by_group_name(group_name) if id_: return self.show(group_name) raise HTTPNotFound @@ -282,12 +281,12 @@ def show(self, group_name): c.active = 'settings' - c.group = c.repo_group = RepoGroup.guess_instance(group_name) + c.group = c.repo_group = db.RepoGroup.guess_instance(group_name) - groups = RepoGroup.query(sorted=True).filter_by(parent_group=c.group).all() + groups = db.RepoGroup.query(sorted=True).filter_by(parent_group=c.group).all() repo_groups_list = self.scm_model.get_repo_groups(groups) - repos_list = Repository.query(sorted=True).filter_by(group=c.group).all() + repos_list = db.Repository.query(sorted=True).filter_by(group=c.group).all() c.data = RepoModel().get_repos_as_dict(repos_list, repo_groups_list=repo_groups_list, short_name=True) @@ -298,7 +297,7 @@ def edit(self, group_name): c.active = 'settings' - c.repo_group = RepoGroup.guess_instance(group_name) + c.repo_group = db.RepoGroup.guess_instance(group_name) self.__load_defaults(extras=[c.repo_group.parent_group], exclude=[c.repo_group]) defaults = self.__load_data(c.repo_group.group_id) @@ -313,14 +312,14 @@ @HasRepoGroupPermissionLevelDecorator('admin') def edit_repo_group_advanced(self, group_name): c.active = 'advanced' - c.repo_group = RepoGroup.guess_instance(group_name) + c.repo_group = db.RepoGroup.guess_instance(group_name) return render('admin/repo_groups/repo_group_edit.html') @HasRepoGroupPermissionLevelDecorator('admin') def edit_repo_group_perms(self, group_name): c.active = 'perms' - c.repo_group = RepoGroup.guess_instance(group_name) + c.repo_group = db.RepoGroup.guess_instance(group_name) self.__load_defaults() defaults = self.__load_data(c.repo_group.group_id) @@ -339,7 +338,7 @@ :param group_name: """ - c.repo_group = RepoGroup.guess_instance(group_name) + c.repo_group = db.RepoGroup.guess_instance(group_name) valid_recursive_choices = ['none', 'repos', 'groups', 'all'] form_result = RepoGroupPermsForm(valid_recursive_choices)().to_python(request.POST) if not request.authuser.is_admin: @@ -358,7 +357,7 @@ # TODO: implement this #action_logger(request.authuser, 'admin_changed_repo_permissions', # repo_name, request.ip_addr) - Session().commit() + meta.Session().commit() h.flash(_('Repository group permissions updated'), category='success') raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name)) @@ -388,7 +387,7 @@ obj_type='user_group', recursive=recursive) - Session().commit() + meta.Session().commit() except Exception: log.error(traceback.format_exc()) h.flash(_('An error occurred during revoking of permission'), diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/repos.py --- a/kallithea/controllers/admin/repos.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/repos.py Fri Oct 30 23:44:18 2020 +0100 @@ -37,17 +37,16 @@ from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound import kallithea -from kallithea.config.routing import url from kallithea.lib import helpers as h -from kallithea.lib.auth import HasPermissionAny, HasRepoPermissionLevelDecorator, LoginRequired, NotAnonymous +from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired, NotAnonymous from kallithea.lib.base import BaseRepoController, jsonify, render from kallithea.lib.exceptions import AttachedForksError from kallithea.lib.utils import action_logger from kallithea.lib.utils2 import safe_int from kallithea.lib.vcs import RepositoryError -from kallithea.model.db import RepoGroup, Repository, RepositoryField, Setting, UserFollowing +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import RepoFieldForm, RepoForm, RepoPermsForm -from kallithea.model.meta import Session from kallithea.model.repo import RepoModel from kallithea.model.scm import AvailableRepoGroupChoices, RepoList, ScmModel @@ -56,11 +55,6 @@ class ReposController(BaseRepoController): - """ - REST Controller styled on the Atom Publishing Protocol""" - # To properly map this controller, ensure your config/routing.py - # file has a resource setup: - # map.resource('repo', 'repos') @LoginRequired(allow_default_user=True) def _before(self, *args, **kwargs): @@ -76,14 +70,9 @@ return repo_obj def __load_defaults(self, repo=None): - top_perms = ['hg.create.repository'] - if HasPermissionAny('hg.create.write_on_repogroup.true')(): - repo_group_perm_level = 'write' - else: - repo_group_perm_level = 'admin' extras = [] if repo is None else [repo.group] - c.repo_groups = AvailableRepoGroupChoices(top_perms, repo_group_perm_level, extras) + c.repo_groups = AvailableRepoGroupChoices('write', extras) c.landing_revs_choices, c.landing_revs = ScmModel().get_repo_landing_revs(repo) @@ -101,7 +90,7 @@ return defaults def index(self, format='html'): - repos_list = RepoList(Repository.query(sorted=True).all(), perm_level='admin') + repos_list = RepoList(db.Repository.query(sorted=True).all(), perm_level='admin') # the repo list will be filtered to only show repos where the user has read permissions repos_data = RepoModel().get_repos_as_dict(repos_list, admin=True) # data used to render the grid @@ -151,9 +140,9 @@ parent_group = request.GET.get('parent_group') ## apply the defaults from defaults page - defaults = Setting.get_default_repo_settings(strip_prefix=True) + defaults = db.Setting.get_default_repo_settings(strip_prefix=True) if parent_group: - prg = RepoGroup.get(parent_group) + prg = db.RepoGroup.get(parent_group) if prg is None or not any(rgc[0] == prg.group_id for rgc in c.repo_groups): raise HTTPForbidden @@ -187,8 +176,8 @@ if task_result.failed(): raise HTTPInternalServerError(task_result.traceback) - repo = Repository.get_by_repo_name(repo_name) - if repo and repo.repo_state == Repository.STATE_CREATED: + repo = db.Repository.get_by_repo_name(repo_name) + if repo and repo.repo_state == db.Repository.STATE_CREATED: if repo.clone_uri: h.flash(_('Created repository %s from %s') % (repo.repo_name, repo.clone_uri_hidden), category='success') @@ -212,12 +201,12 @@ c.repo_info = self._load_repo() self.__load_defaults(c.repo_info) c.active = 'settings' - c.repo_fields = RepositoryField.query() \ - .filter(RepositoryField.repository == c.repo_info).all() + c.repo_fields = db.RepositoryField.query() \ + .filter(db.RepositoryField.repository == c.repo_info).all() repo_model = RepoModel() changed_name = repo_name - repo = Repository.get_by_repo_name(repo_name) + repo = db.Repository.get_by_repo_name(repo_name) old_data = { 'repo_name': repo_name, 'repo_group': repo.group.get_dict() if repo.group else {}, @@ -236,7 +225,7 @@ changed_name = repo.repo_name action_logger(request.authuser, 'admin_updated_repo', changed_name, request.ip_addr) - Session().commit() + meta.Session().commit() except formencode.Invalid as errors: log.info(errors) defaults = self.__load_data() @@ -278,7 +267,7 @@ repo_name, request.ip_addr) ScmModel().mark_for_invalidation(repo_name) h.flash(_('Deleted repository %s') % repo_name, category='success') - Session().commit() + meta.Session().commit() except AttachedForksError: h.flash(_('Cannot delete repository %s which still has forks') % repo_name, category='warning') @@ -295,8 +284,8 @@ @HasRepoPermissionLevelDecorator('admin') def edit(self, repo_name): defaults = self.__load_data() - c.repo_fields = RepositoryField.query() \ - .filter(RepositoryField.repository == c.repo_info).all() + c.repo_fields = db.RepositoryField.query() \ + .filter(db.RepositoryField.repository == c.repo_info).all() c.active = 'settings' return htmlfill.render( render('admin/repos/repo_edit.html'), @@ -324,7 +313,7 @@ # TODO: implement this #action_logger(request.authuser, 'admin_changed_repo_permissions', # repo_name, request.ip_addr) - Session().commit() + meta.Session().commit() h.flash(_('Repository permissions updated'), category='success') raise HTTPFound(location=url('edit_repo_perms', repo_name=repo_name)) @@ -351,7 +340,7 @@ # TODO: implement this #action_logger(request.authuser, 'admin_revoked_repo_permissions', # repo_name, request.ip_addr) - Session().commit() + meta.Session().commit() except Exception: log.error(traceback.format_exc()) h.flash(_('An error occurred during revoking of permission'), @@ -362,8 +351,8 @@ @HasRepoPermissionLevelDecorator('admin') def edit_fields(self, repo_name): c.repo_info = self._load_repo() - c.repo_fields = RepositoryField.query() \ - .filter(RepositoryField.repository == c.repo_info).all() + c.repo_fields = db.RepositoryField.query() \ + .filter(db.RepositoryField.repository == c.repo_info).all() c.active = 'fields' if request.POST: @@ -374,15 +363,15 @@ def create_repo_field(self, repo_name): try: form_result = RepoFieldForm()().to_python(dict(request.POST)) - new_field = RepositoryField() - new_field.repository = Repository.get_by_repo_name(repo_name) + new_field = db.RepositoryField() + new_field.repository = db.Repository.get_by_repo_name(repo_name) new_field.field_key = form_result['new_field_key'] new_field.field_type = form_result['new_field_type'] # python type new_field.field_value = form_result['new_field_value'] # set initial blank value new_field.field_desc = form_result['new_field_desc'] new_field.field_label = form_result['new_field_label'] - Session().add(new_field) - Session().commit() + meta.Session().add(new_field) + meta.Session().commit() except formencode.Invalid as e: h.flash(_('Field validation error: %s') % e.msg, category='error') except Exception as e: @@ -392,10 +381,10 @@ @HasRepoPermissionLevelDecorator('admin') def delete_repo_field(self, repo_name, field_id): - field = RepositoryField.get_or_404(field_id) + field = db.RepositoryField.get_or_404(field_id) try: - Session().delete(field) - Session().commit() + meta.Session().delete(field) + meta.Session().commit() except Exception as e: log.error(traceback.format_exc()) msg = _('An error occurred during removal of field') @@ -406,11 +395,11 @@ def edit_advanced(self, repo_name): c.repo_info = self._load_repo() c.default_user_id = kallithea.DEFAULT_USER_ID - c.in_public_journal = UserFollowing.query() \ - .filter(UserFollowing.user_id == c.default_user_id) \ - .filter(UserFollowing.follows_repository == c.repo_info).scalar() + c.in_public_journal = db.UserFollowing.query() \ + .filter(db.UserFollowing.user_id == c.default_user_id) \ + .filter(db.UserFollowing.follows_repository == c.repo_info).scalar() - _repos = Repository.query(sorted=True).all() + _repos = db.Repository.query(sorted=True).all() read_access_repos = RepoList(_repos, perm_level='read') c.repos_list = [(None, _('-- Not a fork --'))] c.repos_list += [(x.repo_id, x.repo_name) @@ -441,12 +430,12 @@ """ try: - repo_id = Repository.get_by_repo_name(repo_name).repo_id + repo_id = db.Repository.get_by_repo_name(repo_name).repo_id user_id = kallithea.DEFAULT_USER_ID self.scm_model.toggle_following_repo(repo_id, user_id) h.flash(_('Updated repository visibility in public journal'), category='success') - Session().commit() + meta.Session().commit() except Exception: h.flash(_('An error occurred during setting this' ' repository in public journal'), @@ -465,7 +454,7 @@ repo = ScmModel().mark_as_fork(repo_name, fork_id, request.authuser.username) fork = repo.fork.repo_name if repo.fork else _('Nothing') - Session().commit() + meta.Session().commit() h.flash(_('Marked repository %s as fork of %s') % (repo_name, fork), category='success') except RepositoryError as e: @@ -516,7 +505,7 @@ if request.POST: try: RepoModel().delete_stats(repo_name) - Session().commit() + meta.Session().commit() except Exception as e: log.error(traceback.format_exc()) h.flash(_('An error occurred during deletion of repository stats'), diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/settings.py --- a/kallithea/controllers/admin/settings.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/settings.py Fri Oct 30 23:44:18 2020 +0100 @@ -35,18 +35,16 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPFound -from kallithea.config.routing import url from kallithea.lib import helpers as h from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired from kallithea.lib.base import BaseController, render from kallithea.lib.celerylib import tasks -from kallithea.lib.exceptions import HgsubversionImportError from kallithea.lib.utils import repo2db_mapper, set_app_settings from kallithea.lib.utils2 import safe_str from kallithea.lib.vcs import VCSError -from kallithea.model.db import Repository, Setting, Ui +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import ApplicationSettingsForm, ApplicationUiSettingsForm, ApplicationVisualisationForm -from kallithea.model.meta import Session from kallithea.model.notification import EmailNotificationModel from kallithea.model.scm import ScmModel @@ -55,18 +53,13 @@ class SettingsController(BaseController): - """REST Controller styled on the Atom Publishing Protocol""" - # To properly map this controller, ensure your config/routing.py - # file has a resource setup: - # map.resource('setting', 'settings', controller='admin/settings', - # path_prefix='/admin', name_prefix='admin_') @LoginRequired(allow_default_user=True) def _before(self, *args, **kwargs): super(SettingsController, self)._before(*args, **kwargs) def _get_hg_ui_settings(self): - ret = Ui.query().all() + ret = db.Ui.query().all() settings = {} for each in ret: @@ -101,48 +94,33 @@ try: if c.visual.allow_repo_location_change: - sett = Ui.get_by_key('paths', '/') + sett = db.Ui.get_by_key('paths', '/') sett.ui_value = form_result['paths_root_path'] # HOOKS - sett = Ui.get_by_key('hooks', Ui.HOOK_UPDATE) + sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE) sett.ui_active = form_result['hooks_changegroup_update'] - sett = Ui.get_by_key('hooks', Ui.HOOK_REPO_SIZE) + sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_REPO_SIZE) sett.ui_active = form_result['hooks_changegroup_repo_size'] ## EXTENSIONS - sett = Ui.get_or_create('extensions', 'largefiles') + sett = db.Ui.get_or_create('extensions', 'largefiles') sett.ui_active = form_result['extensions_largefiles'] - sett = Ui.get_or_create('extensions', 'hgsubversion') - sett.ui_active = form_result['extensions_hgsubversion'] - if sett.ui_active: - try: - import hgsubversion # pragma: no cover - assert hgsubversion - except ImportError: - raise HgsubversionImportError - -# sett = Ui.get_or_create('extensions', 'hggit') +# sett = db.Ui.get_or_create('extensions', 'hggit') # sett.ui_active = form_result['extensions_hggit'] - Session().commit() + meta.Session().commit() h.flash(_('Updated VCS settings'), category='success') - except HgsubversionImportError: - log.error(traceback.format_exc()) - h.flash(_('Unable to activate hgsubversion support. ' - 'The "hgsubversion" library is missing'), - category='error') - except Exception: log.error(traceback.format_exc()) h.flash(_('Error occurred while updating ' 'application settings'), category='error') - defaults = Setting.get_app_settings() + defaults = db.Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) return htmlfill.render( @@ -180,7 +158,7 @@ if invalidate_cache: log.debug('invalidating all repositories cache') i = 0 - for repo in Repository.query(): + for repo in db.Repository.query(): try: ScmModel().mark_for_invalidation(repo.repo_name) i += 1 @@ -190,7 +168,7 @@ raise HTTPFound(location=url('admin_settings_mapping')) - defaults = Setting.get_app_settings() + defaults = db.Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) return htmlfill.render( @@ -223,9 +201,9 @@ 'captcha_public_key', 'captcha_private_key', ): - Setting.create_or_update(setting, form_result[setting]) + db.Setting.create_or_update(setting, form_result[setting]) - Session().commit() + meta.Session().commit() set_app_settings(config) h.flash(_('Updated application settings'), category='success') @@ -237,7 +215,7 @@ raise HTTPFound(location=url('admin_settings_global')) - defaults = Setting.get_app_settings() + defaults = db.Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) return htmlfill.render( @@ -277,9 +255,9 @@ ('clone_ssh_tmpl', 'clone_ssh_tmpl', 'unicode'), ] for setting, form_key, type_ in settings: - Setting.create_or_update(setting, form_result[form_key], type_) + db.Setting.create_or_update(setting, form_result[form_key], type_) - Session().commit() + meta.Session().commit() set_app_settings(config) h.flash(_('Updated visualisation settings'), category='success') @@ -292,7 +270,7 @@ raise HTTPFound(location=url('admin_settings_visual')) - defaults = Setting.get_app_settings() + defaults = db.Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) return htmlfill.render( @@ -328,7 +306,7 @@ h.flash(_('Send email task created'), category='success') raise HTTPFound(location=url('admin_settings_email')) - defaults = Setting.get_app_settings() + defaults = db.Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) import kallithea @@ -352,16 +330,16 @@ try: ui_key = ui_key and ui_key.strip() - if ui_key in (x.ui_key for x in Ui.get_custom_hooks()): + if ui_key in (x.ui_key for x in db.Ui.get_custom_hooks()): h.flash(_('Hook already exists'), category='error') - elif ui_key in (x.ui_key for x in Ui.get_builtin_hooks()): + elif ui_key in (x.ui_key for x in db.Ui.get_builtin_hooks()): h.flash(_('Builtin hooks are read-only. Please use another hook name.'), category='error') elif ui_value and ui_key: - Ui.create_or_update_hook(ui_key, ui_value) + db.Ui.create_or_update_hook(ui_key, ui_value) h.flash(_('Added new hook'), category='success') elif hook_id: - Ui.delete(hook_id) - Session().commit() + db.Ui.delete(hook_id) + meta.Session().commit() # check for edits update = False @@ -370,12 +348,12 @@ _d.get('hook_ui_value_new', []), _d.get('hook_ui_value', [])): if v != ov: - Ui.create_or_update_hook(k, v) + db.Ui.create_or_update_hook(k, v) update = True if update: h.flash(_('Updated hooks'), category='success') - Session().commit() + meta.Session().commit() except Exception: log.error(traceback.format_exc()) h.flash(_('Error occurred during hook creation'), @@ -383,11 +361,11 @@ raise HTTPFound(location=url('admin_settings_hooks')) - defaults = Setting.get_app_settings() + defaults = db.Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) - c.hooks = Ui.get_builtin_hooks() - c.custom_hooks = Ui.get_custom_hooks() + c.hooks = db.Ui.get_builtin_hooks() + c.custom_hooks = db.Ui.get_custom_hooks() return htmlfill.render( render('admin/settings/settings.html'), @@ -405,7 +383,7 @@ h.flash(_('Whoosh reindex task scheduled'), category='success') raise HTTPFound(location=url('admin_settings_search')) - defaults = Setting.get_app_settings() + defaults = db.Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) return htmlfill.render( @@ -418,12 +396,12 @@ def settings_system(self): c.active = 'system' - defaults = Setting.get_app_settings() + defaults = db.Setting.get_app_settings() defaults.update(self._get_hg_ui_settings()) import kallithea c.ini = kallithea.CONFIG - server_info = Setting.get_server_info() + server_info = db.Setting.get_server_info() for key, val in server_info.items(): setattr(c, key, val) diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/user_groups.py --- a/kallithea/controllers/admin/user_groups.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/user_groups.py Fri Oct 30 23:44:18 2020 +0100 @@ -37,16 +37,15 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPFound, HTTPInternalServerError -from kallithea.config.routing import url from kallithea.lib import helpers as h from kallithea.lib.auth import HasPermissionAnyDecorator, HasUserGroupPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseController, render from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException from kallithea.lib.utils import action_logger from kallithea.lib.utils2 import safe_int, safe_str -from kallithea.model.db import User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import CustomDefaultPermissionsForm, UserGroupForm, UserGroupPermsForm -from kallithea.model.meta import Session from kallithea.model.scm import UserGroupList from kallithea.model.user_group import UserGroupModel @@ -55,7 +54,6 @@ class UserGroupsController(BaseController): - """REST Controller styled on the Atom Publishing Protocol""" @LoginRequired(allow_default_user=True) def _before(self, *args, **kwargs): @@ -67,7 +65,7 @@ c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] c.available_members = sorted(((x.user_id, x.username) for x in - User.query().all()), + db.User.query().all()), key=lambda u: u[1].lower()) def __load_defaults(self, user_group_id): @@ -76,13 +74,13 @@ :param user_group_id: """ - user_group = UserGroup.get_or_404(user_group_id) + user_group = db.UserGroup.get_or_404(user_group_id) data = user_group.get_dict() return data def index(self, format='html'): - _list = UserGroup.query() \ - .order_by(func.lower(UserGroup.users_group_name)) \ + _list = db.UserGroup.query() \ + .order_by(func.lower(db.UserGroup.users_group_name)) \ .all() group_iter = UserGroupList(_list, perm_level='admin') user_groups_data = [] @@ -133,7 +131,7 @@ None, request.ip_addr) h.flash(h.HTML(_('Created user group %s')) % h.link_to(gr, url('edit_users_group', id=ug.users_group_id)), category='success') - Session().commit() + meta.Session().commit() except formencode.Invalid as errors: return htmlfill.render( render('admin/user_groups/user_group_add.html'), @@ -155,7 +153,7 @@ @HasUserGroupPermissionLevelDecorator('admin') def update(self, id): - c.user_group = UserGroup.get_or_404(id) + c.user_group = db.UserGroup.get_or_404(id) c.active = 'settings' self.__load_data(id) @@ -173,7 +171,7 @@ 'admin_updated_users_group:%s' % gr, None, request.ip_addr) h.flash(_('Updated user group %s') % gr, category='success') - Session().commit() + meta.Session().commit() except formencode.Invalid as errors: ug_model = UserGroupModel() defaults = errors.value @@ -201,10 +199,10 @@ @HasUserGroupPermissionLevelDecorator('admin') def delete(self, id): - usr_gr = UserGroup.get_or_404(id) + usr_gr = db.UserGroup.get_or_404(id) try: UserGroupModel().delete(usr_gr) - Session().commit() + meta.Session().commit() h.flash(_('Successfully deleted user group'), category='success') except UserGroupsAssignedException as e: h.flash(e, category='error') @@ -216,7 +214,7 @@ @HasUserGroupPermissionLevelDecorator('admin') def edit(self, id, format='html'): - c.user_group = UserGroup.get_or_404(id) + c.user_group = db.UserGroup.get_or_404(id) c.active = 'settings' self.__load_data(id) @@ -231,7 +229,7 @@ @HasUserGroupPermissionLevelDecorator('admin') def edit_perms(self, id): - c.user_group = UserGroup.get_or_404(id) + c.user_group = db.UserGroup.get_or_404(id) c.active = 'perms' defaults = {} @@ -258,7 +256,7 @@ :param id: """ - user_group = UserGroup.get_or_404(id) + user_group = db.UserGroup.get_or_404(id) form = UserGroupPermsForm()().to_python(request.POST) # set the permissions ! @@ -271,7 +269,7 @@ # TODO: implement this #action_logger(request.authuser, 'admin_changed_repo_permissions', # repo_name, request.ip_addr) - Session().commit() + meta.Session().commit() h.flash(_('User group permissions updated'), category='success') raise HTTPFound(location=url('edit_user_group_perms', id=id)) @@ -296,7 +294,7 @@ elif obj_type == 'user_group': UserGroupModel().revoke_user_group_permission(target_user_group=id, user_group=obj_id) - Session().commit() + meta.Session().commit() except Exception: log.error(traceback.format_exc()) h.flash(_('An error occurred during revoking of permission'), @@ -305,27 +303,27 @@ @HasUserGroupPermissionLevelDecorator('admin') def edit_default_perms(self, id): - c.user_group = UserGroup.get_or_404(id) + c.user_group = db.UserGroup.get_or_404(id) c.active = 'default_perms' permissions = { 'repositories': {}, 'repositories_groups': {} } - ugroup_repo_perms = UserGroupRepoToPerm.query() \ - .options(joinedload(UserGroupRepoToPerm.permission)) \ - .options(joinedload(UserGroupRepoToPerm.repository)) \ - .filter(UserGroupRepoToPerm.users_group_id == id) \ + ugroup_repo_perms = db.UserGroupRepoToPerm.query() \ + .options(joinedload(db.UserGroupRepoToPerm.permission)) \ + .options(joinedload(db.UserGroupRepoToPerm.repository)) \ + .filter(db.UserGroupRepoToPerm.users_group_id == id) \ .all() for gr in ugroup_repo_perms: permissions['repositories'][gr.repository.repo_name] \ = gr.permission.permission_name - ugroup_group_perms = UserGroupRepoGroupToPerm.query() \ - .options(joinedload(UserGroupRepoGroupToPerm.permission)) \ - .options(joinedload(UserGroupRepoGroupToPerm.group)) \ - .filter(UserGroupRepoGroupToPerm.users_group_id == id) \ + ugroup_group_perms = db.UserGroupRepoGroupToPerm.query() \ + .options(joinedload(db.UserGroupRepoGroupToPerm.permission)) \ + .options(joinedload(db.UserGroupRepoGroupToPerm.group)) \ + .filter(db.UserGroupRepoGroupToPerm.users_group_id == id) \ .all() for gr in ugroup_group_perms: @@ -354,7 +352,7 @@ @HasUserGroupPermissionLevelDecorator('admin') def update_default_perms(self, id): - user_group = UserGroup.get_or_404(id) + user_group = db.UserGroup.get_or_404(id) try: form = CustomDefaultPermissionsForm()() @@ -362,11 +360,11 @@ usergroup_model = UserGroupModel() - defs = UserGroupToPerm.query() \ - .filter(UserGroupToPerm.users_group == user_group) \ + defs = db.UserGroupToPerm.query() \ + .filter(db.UserGroupToPerm.users_group == user_group) \ .all() for ug in defs: - Session().delete(ug) + meta.Session().delete(ug) if form_result['create_repo_perm']: usergroup_model.grant_perm(id, 'hg.create.repository') @@ -382,7 +380,7 @@ usergroup_model.grant_perm(id, 'hg.fork.none') h.flash(_("Updated permissions"), category='success') - Session().commit() + meta.Session().commit() except Exception: log.error(traceback.format_exc()) h.flash(_('An error occurred during permissions saving'), @@ -392,7 +390,7 @@ @HasUserGroupPermissionLevelDecorator('admin') def edit_advanced(self, id): - c.user_group = UserGroup.get_or_404(id) + c.user_group = db.UserGroup.get_or_404(id) c.active = 'advanced' c.group_members_obj = sorted((x.user for x in c.user_group.members), key=lambda u: u.username.lower()) @@ -400,7 +398,7 @@ @HasUserGroupPermissionLevelDecorator('admin') def edit_members(self, id): - c.user_group = UserGroup.get_or_404(id) + c.user_group = db.UserGroup.get_or_404(id) c.active = 'members' c.group_members_obj = sorted((x.user for x in c.user_group.members), key=lambda u: u.username.lower()) diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/admin/users.py --- a/kallithea/controllers/admin/users.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/admin/users.py Fri Oct 30 23:44:18 2020 +0100 @@ -37,7 +37,6 @@ from webob.exc import HTTPFound, HTTPNotFound import kallithea -from kallithea.config.routing import url from kallithea.lib import auth_modules from kallithea.lib import helpers as h from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired @@ -45,10 +44,10 @@ from kallithea.lib.exceptions import DefaultUserException, UserCreationError, UserOwnsReposException from kallithea.lib.utils import action_logger from kallithea.lib.utils2 import datetime_to_time, generate_api_key, safe_int +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.api_key import ApiKeyModel -from kallithea.model.db import User, UserEmailMap, UserIpMap, UserToPerm from kallithea.model.forms import CustomDefaultPermissionsForm, UserForm -from kallithea.model.meta import Session from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException from kallithea.model.user import UserModel @@ -57,7 +56,6 @@ class UsersController(BaseController): - """REST Controller styled on the Atom Publishing Protocol""" @LoginRequired() @HasPermissionAnyDecorator('hg.admin') @@ -65,9 +63,9 @@ super(UsersController, self)._before(*args, **kwargs) def index(self, format='html'): - c.users_list = User.query().order_by(User.username) \ + c.users_list = db.User.query().order_by(db.User.username) \ .filter_by(is_default_user=False) \ - .order_by(func.lower(User.username)) \ + .order_by(func.lower(db.User.username)) \ .all() users_data = [] @@ -109,7 +107,7 @@ return render('admin/users/users.html') def create(self): - c.default_extern_type = User.DEFAULT_AUTH_TYPE + c.default_extern_type = db.User.DEFAULT_AUTH_TYPE c.default_extern_name = '' user_model = UserModel() user_form = UserForm()() @@ -120,7 +118,7 @@ None, request.ip_addr) h.flash(_('Created user %s') % user.username, category='success') - Session().commit() + meta.Session().commit() except formencode.Invalid as errors: return htmlfill.render( render('admin/users/user_add.html'), @@ -138,7 +136,7 @@ raise HTTPFound(location=url('edit_user', id=user.user_id)) def new(self, format='html'): - c.default_extern_type = User.DEFAULT_AUTH_TYPE + c.default_extern_type = db.User.DEFAULT_AUTH_TYPE c.default_extern_name = '' return render('admin/users/user_add.html') @@ -158,7 +156,7 @@ action_logger(request.authuser, 'admin_updated_user:%s' % usr, None, request.ip_addr) h.flash(_('User updated successfully'), category='success') - Session().commit() + meta.Session().commit() except formencode.Invalid as errors: defaults = errors.value e = errors.error_dict or {} @@ -181,10 +179,11 @@ raise HTTPFound(location=url('edit_user', id=id)) def delete(self, id): - usr = User.get_or_404(id) + usr = db.User.get_or_404(id) + has_ssh_keys = bool(usr.ssh_keys) try: UserModel().delete(usr) - Session().commit() + meta.Session().commit() h.flash(_('Successfully deleted user'), category='success') except (UserOwnsReposException, DefaultUserException) as e: h.flash(e, category='warning') @@ -192,11 +191,14 @@ log.error(traceback.format_exc()) h.flash(_('An error occurred during deletion of user'), category='error') + else: + if has_ssh_keys: + SshKeyModel().write_authorized_keys() raise HTTPFound(location=url('users')) def _get_user_or_raise_if_default(self, id): try: - return User.get_or_404(id, allow_default=False) + return db.User.get_or_404(id, allow_default=False) except DefaultUserException: h.flash(_("The default user cannot be edited"), category='warning') raise HTTPNotFound @@ -265,7 +267,7 @@ lifetime = safe_int(request.POST.get('lifetime'), -1) description = request.POST.get('description') ApiKeyModel().create(c.user.user_id, description, lifetime) - Session().commit() + meta.Session().commit() h.flash(_("API key successfully created"), category='success') raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) @@ -275,11 +277,11 @@ api_key = request.POST.get('del_api_key') if request.POST.get('del_api_key_builtin'): c.user.api_key = generate_api_key() - Session().commit() + meta.Session().commit() h.flash(_("API key successfully reset"), category='success') elif api_key: ApiKeyModel().delete(api_key, c.user.user_id) - Session().commit() + meta.Session().commit() h.flash(_("API key successfully deleted"), category='success') raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) @@ -315,11 +317,11 @@ user_model = UserModel() - defs = UserToPerm.query() \ - .filter(UserToPerm.user == user) \ + defs = db.UserToPerm.query() \ + .filter(db.UserToPerm.user == user) \ .all() for ug in defs: - Session().delete(ug) + meta.Session().delete(ug) if form_result['create_repo_perm']: user_model.grant_perm(id, 'hg.create.repository') @@ -334,7 +336,7 @@ else: user_model.grant_perm(id, 'hg.fork.none') h.flash(_("Updated permissions"), category='success') - Session().commit() + meta.Session().commit() except Exception: log.error(traceback.format_exc()) h.flash(_('An error occurred during permissions saving'), @@ -344,8 +346,8 @@ def edit_emails(self, id): c.user = self._get_user_or_raise_if_default(id) c.active = 'emails' - c.user_email_map = UserEmailMap.query() \ - .filter(UserEmailMap.user == c.user).all() + c.user_email_map = db.UserEmailMap.query() \ + .filter(db.UserEmailMap.user == c.user).all() defaults = c.user.get_dict() return htmlfill.render( @@ -361,7 +363,7 @@ try: user_model.add_extra_email(id, email) - Session().commit() + meta.Session().commit() h.flash(_("Added email %s to user") % email, category='success') except formencode.Invalid as error: msg = error.error_dict['email'] @@ -377,18 +379,18 @@ email_id = request.POST.get('del_email_id') user_model = UserModel() user_model.delete_extra_email(id, email_id) - Session().commit() + meta.Session().commit() h.flash(_("Removed email from user"), category='success') raise HTTPFound(location=url('edit_user_emails', id=id)) def edit_ips(self, id): c.user = self._get_user_or_raise_if_default(id) c.active = 'ips' - c.user_ip_map = UserIpMap.query() \ - .filter(UserIpMap.user == c.user).all() + c.user_ip_map = db.UserIpMap.query() \ + .filter(db.UserIpMap.user == c.user).all() - c.default_user_ip_map = UserIpMap.query() \ - .filter(UserIpMap.user_id == kallithea.DEFAULT_USER_ID).all() + c.default_user_ip_map = db.UserIpMap.query() \ + .filter(db.UserIpMap.user_id == kallithea.DEFAULT_USER_ID).all() defaults = c.user.get_dict() return htmlfill.render( @@ -403,7 +405,7 @@ try: user_model.add_extra_ip(id, ip) - Session().commit() + meta.Session().commit() h.flash(_("Added IP address %s to user whitelist") % ip, category='success') except formencode.Invalid as error: msg = error.error_dict['ip'] @@ -421,7 +423,7 @@ ip_id = request.POST.get('del_ip_id') user_model = UserModel() user_model.delete_extra_ip(id, ip_id) - Session().commit() + meta.Session().commit() h.flash(_("Removed IP address from user whitelist"), category='success') if 'default_user' in request.POST: @@ -449,7 +451,7 @@ try: new_ssh_key = SshKeyModel().create(c.user.user_id, description, public_key) - Session().commit() + meta.Session().commit() SshKeyModel().write_authorized_keys() h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success') except SshKeyModelException as e: @@ -463,7 +465,7 @@ fingerprint = request.POST.get('del_public_key_fingerprint') try: SshKeyModel().delete(fingerprint, c.user.user_id) - Session().commit() + meta.Session().commit() SshKeyModel().write_authorized_keys() h.flash(_("SSH key successfully deleted"), category='success') except SshKeyModelException as e: diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/api/__init__.py --- a/kallithea/controllers/api/__init__.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/api/__init__.py Fri Oct 30 23:44:18 2020 +0100 @@ -40,7 +40,7 @@ from kallithea.lib.base import _get_ip_addr as _get_ip from kallithea.lib.base import get_path_info from kallithea.lib.utils2 import ascii_bytes -from kallithea.model.db import User +from kallithea.model import db log = logging.getLogger('JSONRPC') @@ -145,7 +145,7 @@ # check if we can find this session using api_key try: - u = User.get_by_api_key(self._req_api_key) + u = db.User.get_by_api_key(self._req_api_key) auth_user = AuthUser.make(dbuser=u, ip_addr=ip_addr) if auth_user is None: raise JSONRPCErrorResponse(retid=self._req_id, diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/api/api.py --- a/kallithea/controllers/api/api.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/api/api.py Fri Oct 30 23:44:18 2020 +0100 @@ -36,14 +36,12 @@ HasUserGroupPermissionLevel) from kallithea.lib.exceptions import DefaultUserException, UserGroupsAssignedException from kallithea.lib.utils import action_logger, repo2db_mapper -from kallithea.lib.utils2 import OAttr, Optional from kallithea.lib.vcs.backends.base import EmptyChangeset from kallithea.lib.vcs.exceptions import EmptyRepositoryError +from kallithea.model import db, meta from kallithea.model.changeset_status import ChangesetStatusModel from kallithea.model.comment import ChangesetCommentsModel -from kallithea.model.db import ChangesetStatus, Gist, Permission, PullRequest, RepoGroup, Repository, Setting, User, UserGroup, UserIpMap from kallithea.model.gist import GistModel -from kallithea.model.meta import Session from kallithea.model.pull_request import PullRequestModel from kallithea.model.repo import RepoModel from kallithea.model.repo_group import RepoGroupModel @@ -57,10 +55,10 @@ def store_update(updates, attr, name): """ - Stores param in updates dict if it's not instance of Optional - allows easy updates of passed in params + Stores param in updates dict if it's not None (i.e. if user explicitly set + a parameter). This allows easy updates of passed in params. """ - if not isinstance(attr, Optional): + if attr is not None: updates[name] = attr @@ -94,7 +92,7 @@ :param repogroupid: """ - repo_group = RepoGroup.guess_instance(repogroupid) + repo_group = db.RepoGroup.guess_instance(repogroupid) if repo_group is None: raise JSONRPCError( 'repository group `%s` does not exist' % (repogroupid,)) @@ -119,7 +117,7 @@ :param permid: """ - perm = Permission.get_by_key(permid) + perm = db.Permission.get_by_key(permid) if perm is None: raise JSONRPCError('permission `%s` does not exist' % (permid,)) if prefix: @@ -161,7 +159,7 @@ return args @HasPermissionAnyDecorator('hg.admin') - def pull(self, repoid, clone_uri=Optional(None)): + def pull(self, repoid, clone_uri=None): """ Triggers a pull from remote location on given repo. Can be used to automatically keep remote repos up to date. This command can be executed @@ -197,7 +195,7 @@ ScmModel().pull_changes(repo.repo_name, request.authuser.username, request.ip_addr, - clone_uri=Optional.extract(clone_uri)) + clone_uri=clone_uri) return dict( msg='Pulled from `%s`' % repo.repo_name, repository=repo.repo_name @@ -209,7 +207,7 @@ ) @HasPermissionAnyDecorator('hg.admin') - def rescan_repos(self, remove_obsolete=Optional(False)): + def rescan_repos(self, remove_obsolete=False): """ Triggers rescan repositories action. If remove_obsolete is set than also delete repos that are in database but not in the filesystem. @@ -240,7 +238,7 @@ """ try: - rm_obsolete = Optional.extract(remove_obsolete) + rm_obsolete = remove_obsolete added, removed = repo2db_mapper(ScmModel().repo_scan(), remove_obsolete=rm_obsolete) return {'added': added, 'removed': removed} @@ -295,7 +293,7 @@ ) @HasPermissionAnyDecorator('hg.admin') - def get_ip(self, userid=Optional(OAttr('apiuser'))): + def get_ip(self, userid=None): """ Shows IP address as seen from Kallithea server, together with all defined IP addresses for given user. If userid is not passed data is @@ -321,10 +319,10 @@ } """ - if isinstance(userid, Optional): + if userid is None: userid = request.authuser.user_id user = get_user_or_error(userid) - ips = UserIpMap.query().filter(UserIpMap.user == user).all() + ips = db.UserIpMap.query().filter(db.UserIpMap.user == user).all() return dict( server_ip_addr=request.ip_addr, user_ips=ips @@ -350,9 +348,9 @@ } error : null """ - return Setting.get_server_info() + return db.Setting.get_server_info() - def get_user(self, userid=Optional(OAttr('apiuser'))): + def get_user(self, userid=None): """ Gets a user by username or user_id, Returns empty result if user is not found. If userid param is skipped it is set to id of user who is @@ -397,12 +395,12 @@ if not HasPermissionAny('hg.admin')(): # make sure normal user does not pass someone else userid, # he is not allowed to do that - if not isinstance(userid, Optional) and userid != request.authuser.user_id: + if userid is not None and userid != request.authuser.user_id: raise JSONRPCError( 'userid is not the same as your user' ) - if isinstance(userid, Optional): + if userid is None: userid = request.authuser.user_id user = get_user_or_error(userid) @@ -426,17 +424,17 @@ return [ user.get_api_data() - for user in User.query() - .order_by(User.username) + for user in db.User.query() + .order_by(db.User.username) .filter_by(is_default_user=False) ] @HasPermissionAnyDecorator('hg.admin') - def create_user(self, username, email, password=Optional(''), - firstname=Optional(''), lastname=Optional(''), - active=Optional(True), admin=Optional(False), - extern_type=Optional(User.DEFAULT_AUTH_TYPE), - extern_name=Optional('')): + def create_user(self, username, email, password='', + firstname='', lastname='', + active=True, admin=False, + extern_type=db.User.DEFAULT_AUTH_TYPE, + extern_name=''): """ Creates new user. Returns new user object. This command can be executed only using api_key belonging to user with admin rights. @@ -484,25 +482,25 @@ """ - if User.get_by_username(username): + if db.User.get_by_username(username): raise JSONRPCError("user `%s` already exist" % (username,)) - if User.get_by_email(email): + if db.User.get_by_email(email): raise JSONRPCError("email `%s` already exist" % (email,)) try: user = UserModel().create_or_update( - username=Optional.extract(username), - password=Optional.extract(password), - email=Optional.extract(email), - firstname=Optional.extract(firstname), - lastname=Optional.extract(lastname), - active=Optional.extract(active), - admin=Optional.extract(admin), - extern_type=Optional.extract(extern_type), - extern_name=Optional.extract(extern_name) + username=username, + password=password, + email=email, + firstname=firstname, + lastname=lastname, + active=active, + admin=admin, + extern_type=extern_type, + extern_name=extern_name ) - Session().commit() + meta.Session().commit() return dict( msg='created new user `%s`' % username, user=user.get_api_data() @@ -512,11 +510,11 @@ raise JSONRPCError('failed to create user `%s`' % (username,)) @HasPermissionAnyDecorator('hg.admin') - def update_user(self, userid, username=Optional(None), - email=Optional(None), password=Optional(None), - firstname=Optional(None), lastname=Optional(None), - active=Optional(None), admin=Optional(None), - extern_type=Optional(None), extern_name=Optional(None)): + def update_user(self, userid, username=None, + email=None, password=None, + firstname=None, lastname=None, + active=None, admin=None, + extern_type=None, extern_name=None): """ updates given user if such user exists. This command can be executed only using api_key belonging to user with admin rights. @@ -580,7 +578,7 @@ store_update(updates, extern_type, 'extern_type') user = UserModel().update_user(user, **updates) - Session().commit() + meta.Session().commit() return dict( msg='updated user ID:%s %s' % (user.user_id, user.username), user=user.get_api_data() @@ -623,7 +621,7 @@ try: UserModel().delete(userid) - Session().commit() + meta.Session().commit() return dict( msg='deleted user ID:%s %s' % (user.user_id, user.username), user=None @@ -682,12 +680,12 @@ return [ user_group.get_api_data() - for user_group in UserGroupList(UserGroup.query().all(), perm_level='read') + for user_group in UserGroupList(db.UserGroup.query().all(), perm_level='read') ] @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') - def create_user_group(self, group_name, description=Optional(''), - owner=Optional(OAttr('apiuser')), active=Optional(True)): + def create_user_group(self, group_name, description='', + owner=None, active=True): """ Creates new user group. This command can be executed only using api_key belonging to user with admin rights or an user who has create user group @@ -727,15 +725,13 @@ raise JSONRPCError("user group `%s` already exist" % (group_name,)) try: - if isinstance(owner, Optional): + if owner is None: owner = request.authuser.user_id owner = get_user_or_error(owner) - active = Optional.extract(active) - description = Optional.extract(description) ug = UserGroupModel().create(name=group_name, description=description, owner=owner, active=active) - Session().commit() + meta.Session().commit() return dict( msg='created new user group `%s`' % group_name, user_group=ug.get_api_data() @@ -745,9 +741,9 @@ raise JSONRPCError('failed to create group `%s`' % (group_name,)) # permission check inside - def update_user_group(self, usergroupid, group_name=Optional(''), - description=Optional(''), owner=Optional(None), - active=Optional(True)): + def update_user_group(self, usergroupid, group_name=None, + description=None, owner=None, + active=None): """ Updates given usergroup. This command can be executed only using api_key belonging to user with admin rights or an admin of given user group @@ -786,7 +782,7 @@ if not HasUserGroupPermissionLevel('admin')(user_group.users_group_name): raise JSONRPCError('user group `%s` does not exist' % (usergroupid,)) - if not isinstance(owner, Optional): + if owner is not None: owner = get_user_or_error(owner) updates = {} @@ -796,7 +792,7 @@ store_update(updates, active, 'users_group_active') try: UserGroupModel().update(user_group, updates) - Session().commit() + meta.Session().commit() return dict( msg='updated user group ID:%s %s' % (user_group.users_group_id, user_group.users_group_name), @@ -842,7 +838,7 @@ try: UserGroupModel().delete(user_group) - Session().commit() + meta.Session().commit() return dict( msg='deleted user group ID:%s %s' % (user_group.users_group_id, user_group.users_group_name), @@ -903,7 +899,7 @@ user.username, user_group.users_group_name ) msg = msg if success else 'User is already in that group' - Session().commit() + meta.Session().commit() return dict( success=success, @@ -951,7 +947,7 @@ user.username, user_group.users_group_name ) msg = msg if success else "User wasn't in group" - Session().commit() + meta.Session().commit() return dict(success=success, msg=msg) except Exception: log.error(traceback.format_exc()) @@ -963,8 +959,8 @@ # permission check inside def get_repo(self, repoid, - with_revision_names=Optional(False), - with_pullrequests=Optional(False)): + with_revision_names=False, + with_pullrequests=False): """ Gets an existing repository by it's name or repository_id. Members will return either users_group or user associated to that repository. This command can be @@ -1064,8 +1060,8 @@ for uf in repo.followers ] - data = repo.get_api_data(with_revision_names=Optional.extract(with_revision_names), - with_pullrequests=Optional.extract(with_pullrequests)) + data = repo.get_api_data(with_revision_names=with_revision_names, + with_pullrequests=with_pullrequests) data['members'] = members data['followers'] = followers return data @@ -1103,7 +1099,7 @@ if not HasPermissionAny('hg.admin')(): repos = RepoModel().get_all_user_repos(user=request.authuser.user_id) else: - repos = Repository.query() + repos = db.Repository.query() return [ repo.get_api_data() @@ -1112,7 +1108,7 @@ # permission check inside def get_repo_nodes(self, repoid, revision, root_path, - ret_type=Optional('all')): + ret_type='all'): """ returns a list of nodes and it's children in a flat list for a given path at given revision. It's possible to specify ret_type to show only `files` or @@ -1147,7 +1143,7 @@ if not HasRepoPermissionLevel('read')(repo.repo_name): raise JSONRPCError('repository `%s` does not exist' % (repoid,)) - ret_type = Optional.extract(ret_type) + ret_type = ret_type _map = {} try: _d, _f = ScmModel().get_nodes(repo, revision, root_path, @@ -1168,13 +1164,13 @@ ) @HasPermissionAnyDecorator('hg.admin', 'hg.create.repository') - def create_repo(self, repo_name, owner=Optional(OAttr('apiuser')), - repo_type=Optional('hg'), description=Optional(''), - private=Optional(False), clone_uri=Optional(None), - landing_rev=Optional('rev:tip'), - enable_statistics=Optional(False), - enable_downloads=Optional(False), - copy_permissions=Optional(False)): + def create_repo(self, repo_name, owner=None, + repo_type=None, description='', + private=False, clone_uri=None, + landing_rev='rev:tip', + enable_statistics=None, + enable_downloads=None, + copy_permissions=False): """ Creates a repository. The repository name contains the full path, but the parent repository group must exist. For example "foo/bar/baz" require the groups @@ -1225,12 +1221,12 @@ """ if not HasPermissionAny('hg.admin')(): - if not isinstance(owner, Optional): + if owner is not None: # forbid setting owner for non-admins raise JSONRPCError( 'Only Kallithea admin can specify `owner` param' ) - if isinstance(owner, Optional): + if owner is None: owner = request.authuser.user_id owner = get_user_or_error(owner) @@ -1238,27 +1234,22 @@ if RepoModel().get_by_repo_name(repo_name): raise JSONRPCError("repo `%s` already exist" % repo_name) - defs = Setting.get_default_repo_settings(strip_prefix=True) - if isinstance(private, Optional): - private = defs.get('repo_private') or Optional.extract(private) - if isinstance(repo_type, Optional): + defs = db.Setting.get_default_repo_settings(strip_prefix=True) + if private is None: + private = defs.get('repo_private') or False + if repo_type is None: repo_type = defs.get('repo_type') - if isinstance(enable_statistics, Optional): + if enable_statistics is None: enable_statistics = defs.get('repo_enable_statistics') - if isinstance(enable_downloads, Optional): + if enable_downloads is None: enable_downloads = defs.get('repo_enable_downloads') - clone_uri = Optional.extract(clone_uri) - description = Optional.extract(description) - landing_rev = Optional.extract(landing_rev) - copy_permissions = Optional.extract(copy_permissions) - try: repo_name_parts = repo_name.split('/') repo_group = None if len(repo_name_parts) > 1: group_name = '/'.join(repo_name_parts[:-1]) - repo_group = RepoGroup.get_by_group_name(group_name) + repo_group = db.RepoGroup.get_by_group_name(group_name) if repo_group is None: raise JSONRPCError("repo group `%s` not found" % group_name) data = dict( @@ -1291,13 +1282,13 @@ 'failed to create repository `%s`' % (repo_name,)) # permission check inside - def update_repo(self, repoid, name=Optional(None), - owner=Optional(OAttr('apiuser')), - group=Optional(None), - description=Optional(''), private=Optional(False), - clone_uri=Optional(None), landing_rev=Optional('rev:tip'), - enable_statistics=Optional(False), - enable_downloads=Optional(False)): + def update_repo(self, repoid, name=None, + owner=None, + group=None, + description=None, private=None, + clone_uri=None, landing_rev=None, + enable_statistics=None, + enable_downloads=None): """ Updates repo @@ -1324,7 +1315,7 @@ ): raise JSONRPCError('no permission to create (or move) repositories') - if not isinstance(owner, Optional): + if owner is not None: # forbid setting owner for non-admins raise JSONRPCError( 'Only Kallithea admin can specify `owner` param' @@ -1332,7 +1323,7 @@ updates = {} repo_group = group - if not isinstance(repo_group, Optional): + if repo_group is not None: repo_group = get_repo_group_or_error(repo_group) repo_group = repo_group.group_id try: @@ -1347,7 +1338,7 @@ store_update(updates, enable_downloads, 'repo_enable_downloads') RepoModel().update(repo, **updates) - Session().commit() + meta.Session().commit() return dict( msg='updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), repository=repo.get_api_data() @@ -1358,9 +1349,9 @@ @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository') def fork_repo(self, repoid, fork_name, - owner=Optional(OAttr('apiuser')), - description=Optional(''), copy_permissions=Optional(False), - private=Optional(False), landing_rev=Optional('rev:tip')): + owner=None, + description='', copy_permissions=False, + private=False, landing_rev='rev:tip'): """ Creates a fork of given repo. In case of using celery this will immediately return success message, while fork is going to be created @@ -1413,7 +1404,7 @@ if HasPermissionAny('hg.admin')(): pass elif HasRepoPermissionLevel('read')(repo.repo_name): - if not isinstance(owner, Optional): + if owner is not None: # forbid setting owner for non-admins raise JSONRPCError( 'Only Kallithea admin can specify `owner` param' @@ -1424,7 +1415,7 @@ else: raise JSONRPCError('repository `%s` does not exist' % (repoid,)) - if isinstance(owner, Optional): + if owner is None: owner = request.authuser.user_id owner = get_user_or_error(owner) @@ -1434,7 +1425,7 @@ repo_group = None if len(fork_name_parts) > 1: group_name = '/'.join(fork_name_parts[:-1]) - repo_group = RepoGroup.get_by_group_name(group_name) + repo_group = db.RepoGroup.get_by_group_name(group_name) if repo_group is None: raise JSONRPCError("repo group `%s` not found" % group_name) @@ -1443,10 +1434,10 @@ repo_name_full=fork_name, repo_group=repo_group, repo_type=repo.repo_type, - description=Optional.extract(description), - private=Optional.extract(private), - copy_permissions=Optional.extract(copy_permissions), - landing_rev=Optional.extract(landing_rev), + description=description, + private=private, + copy_permissions=copy_permissions, + landing_rev=landing_rev, update_after_clone=False, fork_parent_id=repo.repo_id, ) @@ -1468,7 +1459,7 @@ ) # permission check inside - def delete_repo(self, repoid, forks=Optional('')): + def delete_repo(self, repoid, forks=''): """ Deletes a repository. This command can be executed only using api_key belonging to user with admin rights or regular user that have admin access to repository. @@ -1497,7 +1488,7 @@ raise JSONRPCError('repository `%s` does not exist' % (repoid,)) try: - handle_forks = Optional.extract(forks) + handle_forks = forks _forks_msg = '' _forks = [f for f in repo.forks] if handle_forks == 'detach': @@ -1511,7 +1502,7 @@ ) RepoModel().delete(repo, forks=forks) - Session().commit() + meta.Session().commit() return dict( msg='Deleted repository `%s`%s' % (repo.repo_name, _forks_msg), success=True @@ -1552,7 +1543,7 @@ RepoModel().grant_user_permission(repo=repo, user=user, perm=perm) - Session().commit() + meta.Session().commit() return dict( msg='Granted perm: `%s` for user: `%s` in repo: `%s`' % ( perm.permission_name, user.username, repo.repo_name @@ -1592,7 +1583,7 @@ user = get_user_or_error(userid) try: RepoModel().revoke_user_permission(repo=repo, user=user) - Session().commit() + meta.Session().commit() return dict( msg='Revoked perm for user: `%s` in repo: `%s`' % ( user.username, repo.repo_name @@ -1654,7 +1645,7 @@ RepoModel().grant_user_group_permission( repo=repo, group_name=user_group, perm=perm) - Session().commit() + meta.Session().commit() return dict( msg='Granted perm: `%s` for user group: `%s` in ' 'repo: `%s`' % ( @@ -1704,7 +1695,7 @@ RepoModel().revoke_user_group_permission( repo=repo, group_name=user_group) - Session().commit() + meta.Session().commit() return dict( msg='Revoked perm for user group: `%s` in repo: `%s`' % ( user_group.users_group_name, repo.repo_name @@ -1764,14 +1755,14 @@ """ return [ repo_group.get_api_data() - for repo_group in RepoGroup.query() + for repo_group in db.RepoGroup.query() ] @HasPermissionAnyDecorator('hg.admin') - def create_repo_group(self, group_name, description=Optional(''), - owner=Optional(OAttr('apiuser')), - parent=Optional(None), - copy_permissions=Optional(False)): + def create_repo_group(self, group_name, description='', + owner=None, + parent=None, + copy_permissions=False): """ Creates a repository group. This command can be executed only using api_key belonging to user with admin rights. @@ -1805,17 +1796,16 @@ } """ - if RepoGroup.get_by_group_name(group_name): + if db.RepoGroup.get_by_group_name(group_name): raise JSONRPCError("repo group `%s` already exist" % (group_name,)) - if isinstance(owner, Optional): + if owner is None: owner = request.authuser.user_id - group_description = Optional.extract(description) - parent_group = Optional.extract(parent) - if not isinstance(parent, Optional): - parent_group = get_repo_group_or_error(parent_group) + group_description = description + parent_group = None + if parent is not None: + parent_group = get_repo_group_or_error(parent) - copy_permissions = Optional.extract(copy_permissions) try: repo_group = RepoGroupModel().create( group_name=group_name, @@ -1824,7 +1814,7 @@ parent=parent_group, copy_permissions=copy_permissions ) - Session().commit() + meta.Session().commit() return dict( msg='created new repo group `%s`' % group_name, repo_group=repo_group.get_api_data() @@ -1835,10 +1825,10 @@ raise JSONRPCError('failed to create repo group `%s`' % (group_name,)) @HasPermissionAnyDecorator('hg.admin') - def update_repo_group(self, repogroupid, group_name=Optional(''), - description=Optional(''), - owner=Optional(OAttr('apiuser')), - parent=Optional(None)): + def update_repo_group(self, repogroupid, group_name=None, + description=None, + owner=None, + parent=None): repo_group = get_repo_group_or_error(repogroupid) updates = {} @@ -1848,7 +1838,7 @@ store_update(updates, owner, 'owner') store_update(updates, parent, 'parent_group') repo_group = RepoGroupModel().update(repo_group, updates) - Session().commit() + meta.Session().commit() return dict( msg='updated repository group ID:%s %s' % (repo_group.group_id, repo_group.group_name), @@ -1888,7 +1878,7 @@ try: RepoGroupModel().delete(repo_group) - Session().commit() + meta.Session().commit() return dict( msg='deleted repo group ID:%s %s' % (repo_group.group_id, repo_group.group_name), @@ -1902,7 +1892,7 @@ # permission check inside def grant_user_permission_to_repo_group(self, repogroupid, userid, - perm, apply_to_children=Optional('none')): + perm, apply_to_children='none'): """ Grant permission for user on given repository group, or update existing one if found. This command can be executed only using api_key belonging @@ -1944,7 +1934,6 @@ user = get_user_or_error(userid) perm = get_perm_or_error(perm, prefix='group.') - apply_to_children = Optional.extract(apply_to_children) try: RepoGroupModel().add_permission(repo_group=repo_group, @@ -1952,7 +1941,7 @@ obj_type="user", perm=perm, recursive=apply_to_children) - Session().commit() + meta.Session().commit() return dict( msg='Granted perm: `%s` (recursive:%s) for user: `%s` in repo group: `%s`' % ( perm.permission_name, apply_to_children, user.username, repo_group.name @@ -1967,7 +1956,7 @@ # permission check inside def revoke_user_permission_from_repo_group(self, repogroupid, userid, - apply_to_children=Optional('none')): + apply_to_children='none'): """ Revoke permission for user on given repository group. This command can be executed only using api_key belonging to user with admin rights, or @@ -2006,7 +1995,6 @@ raise JSONRPCError('repository group `%s` does not exist' % (repogroupid,)) user = get_user_or_error(userid) - apply_to_children = Optional.extract(apply_to_children) try: RepoGroupModel().delete_permission(repo_group=repo_group, @@ -2014,7 +2002,7 @@ obj_type="user", recursive=apply_to_children) - Session().commit() + meta.Session().commit() return dict( msg='Revoked perm (recursive:%s) for user: `%s` in repo group: `%s`' % ( apply_to_children, user.username, repo_group.name @@ -2030,7 +2018,7 @@ # permission check inside def grant_user_group_permission_to_repo_group( self, repogroupid, usergroupid, perm, - apply_to_children=Optional('none')): + apply_to_children='none'): """ Grant permission for user group on given repository group, or update existing one if found. This command can be executed only using @@ -2077,15 +2065,13 @@ raise JSONRPCError( 'user group `%s` does not exist' % (usergroupid,)) - apply_to_children = Optional.extract(apply_to_children) - try: RepoGroupModel().add_permission(repo_group=repo_group, obj=user_group, obj_type="user_group", perm=perm, recursive=apply_to_children) - Session().commit() + meta.Session().commit() return dict( msg='Granted perm: `%s` (recursive:%s) for user group: `%s` in repo group: `%s`' % ( perm.permission_name, apply_to_children, @@ -2105,7 +2091,7 @@ # permission check inside def revoke_user_group_permission_from_repo_group( self, repogroupid, usergroupid, - apply_to_children=Optional('none')): + apply_to_children='none'): """ Revoke permission for user group on given repository. This command can be executed only using api_key belonging to user with admin rights, or @@ -2147,14 +2133,12 @@ raise JSONRPCError( 'user group `%s` does not exist' % (usergroupid,)) - apply_to_children = Optional.extract(apply_to_children) - try: RepoGroupModel().delete_permission(repo_group=repo_group, obj=user_group, obj_type="user_group", recursive=apply_to_children) - Session().commit() + meta.Session().commit() return dict( msg='Revoked perm (recursive:%s) for user group: `%s` in repo group: `%s`' % ( apply_to_children, user_group.users_group_name, repo_group.name @@ -2182,7 +2166,7 @@ raise JSONRPCError('gist `%s` does not exist' % (gistid,)) return gist.get_api_data() - def get_gists(self, userid=Optional(OAttr('apiuser'))): + def get_gists(self, userid=None): """ Get all gists for given user. If userid is empty returned gists are for user who called the api @@ -2193,27 +2177,27 @@ if not HasPermissionAny('hg.admin')(): # make sure normal user does not pass someone else userid, # he is not allowed to do that - if not isinstance(userid, Optional) and userid != request.authuser.user_id: + if userid is not None and userid != request.authuser.user_id: raise JSONRPCError( 'userid is not the same as your user' ) - if isinstance(userid, Optional): + if userid is None: user_id = request.authuser.user_id else: user_id = get_user_or_error(userid).user_id return [ gist.get_api_data() - for gist in Gist().query() + for gist in db.Gist().query() .filter_by(is_expired=False) - .filter(Gist.owner_id == user_id) - .order_by(Gist.created_on.desc()) + .filter(db.Gist.owner_id == user_id) + .order_by(db.Gist.created_on.desc()) ] - def create_gist(self, files, owner=Optional(OAttr('apiuser')), - gist_type=Optional(Gist.GIST_PUBLIC), lifetime=Optional(-1), - description=Optional('')): + def create_gist(self, files, owner=None, + gist_type=db.Gist.GIST_PUBLIC, lifetime=-1, + description=''): """ Creates new Gist @@ -2250,13 +2234,10 @@ """ try: - if isinstance(owner, Optional): + if owner is None: owner = request.authuser.user_id owner = get_user_or_error(owner) - description = Optional.extract(description) - gist_type = Optional.extract(gist_type) - lifetime = Optional.extract(lifetime) gist = GistModel().create(description=description, owner=owner, @@ -2264,7 +2245,7 @@ gist_mapping=files, gist_type=gist_type, lifetime=lifetime) - Session().commit() + meta.Session().commit() return dict( msg='created new gist', gist=gist.get_api_data() @@ -2273,12 +2254,6 @@ log.error(traceback.format_exc()) raise JSONRPCError('failed to create gist') - # def update_gist(self, gistid, files, owner=Optional(OAttr('apiuser')), - # gist_type=Optional(Gist.GIST_PUBLIC), - # gist_lifetime=Optional(-1), gist_description=Optional('')): - # gist = get_gist_or_error(gistid) - # updates = {} - # permission check inside def delete_gist(self, gistid): """ @@ -2312,7 +2287,7 @@ try: GistModel().delete(gist) - Session().commit() + meta.Session().commit() return dict( msg='deleted gist ID:%s' % (gist.gist_access_id,), gist=None @@ -2342,7 +2317,7 @@ raise JSONRPCError('Repository is empty') # permission check inside - def get_changeset(self, repoid, raw_id, with_reviews=Optional(False)): + def get_changeset(self, repoid, raw_id, with_reviews=False): repo = get_repo_or_error(repoid) if not HasRepoPermissionLevel('read')(repo.repo_name): raise JSONRPCError('Access denied to repo %s' % repo.repo_name) @@ -2352,7 +2327,6 @@ info = dict(changeset.as_dict()) - with_reviews = Optional.extract(with_reviews) if with_reviews: reviews = ChangesetStatusModel().get_statuses( repo.repo_name, raw_id) @@ -2365,7 +2339,7 @@ """ Get given pull request by id """ - pull_request = PullRequest.get(pullrequest_id) + pull_request = db.PullRequest.get(pullrequest_id) if pull_request is None: raise JSONRPCError('pull request `%s` does not exist' % (pullrequest_id,)) if not HasRepoPermissionLevel('read')(pull_request.org_repo.repo_name): @@ -2378,7 +2352,7 @@ Add comment, close and change status of pull request. """ apiuser = get_user_or_error(request.authuser.user_id) - pull_request = PullRequest.get(pull_request_id) + pull_request = db.PullRequest.get(pull_request_id) if pull_request is None: raise JSONRPCError('pull request `%s` does not exist' % (pull_request_id,)) if (not HasRepoPermissionLevel('read')(pull_request.org_repo.repo_name)): @@ -2400,7 +2374,7 @@ pull_request=pull_request.pull_request_id, f_path=None, line_no=None, - status_change=ChangesetStatus.get_status_lbl(status), + status_change=db.ChangesetStatus.get_status_lbl(status), closing_pr=close_pr ) action_logger(apiuser, @@ -2419,5 +2393,51 @@ action_logger(apiuser, 'user_closed_pull_request:%s' % pull_request_id, pull_request.org_repo, request.ip_addr) - Session().commit() + meta.Session().commit() return True + + # permission check inside + def edit_reviewers(self, pull_request_id, add=None, remove=None): + """ + Add and/or remove one or more reviewers to a pull request, by username + or user ID. Reviewers are specified either as a single-user string or + as a JSON list of one or more strings. + """ + if add is None and remove is None: + raise JSONRPCError('''Invalid request. Neither 'add' nor 'remove' is specified.''') + + pull_request = db.PullRequest.get(pull_request_id) + if pull_request is None: + raise JSONRPCError('pull request `%s` does not exist' % (pull_request_id,)) + + apiuser = get_user_or_error(request.authuser.user_id) + is_owner = apiuser.user_id == pull_request.owner_id + is_repo_admin = HasRepoPermissionLevel('admin')(pull_request.other_repo.repo_name) + if not (apiuser.admin or is_repo_admin or is_owner): + raise JSONRPCError('No permission to edit reviewers of this pull request. User needs to be admin or pull request owner.') + if pull_request.is_closed(): + raise JSONRPCError('Cannot edit reviewers of a closed pull request.') + + if not isinstance(add, list): + add = [add] + if not isinstance(remove, list): + remove = [remove] + + # look up actual user objects from given name or id. Bail out if unknown. + add_objs = set(get_user_or_error(user) for user in add if user is not None) + remove_objs = set(get_user_or_error(user) for user in remove if user is not None) + + new_reviewers = redundant_reviewers = set() + if add_objs: + new_reviewers, redundant_reviewers = PullRequestModel().add_reviewers(apiuser, pull_request, add_objs) + if remove_objs: + PullRequestModel().remove_reviewers(apiuser, pull_request, remove_objs) + + meta.Session().commit() + + return { + 'added': [x.username for x in new_reviewers], + 'already_present': [x.username for x in redundant_reviewers], + # NOTE: no explicit check that removed reviewers were actually present. + 'removed': [x.username for x in remove_objs], + } diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/changelog.py --- a/kallithea/controllers/changelog.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/changelog.py Fri Oct 30 23:44:18 2020 +0100 @@ -34,13 +34,13 @@ from webob.exc import HTTPBadRequest, HTTPFound, HTTPNotFound import kallithea.lib.helpers as h -from kallithea.config.routing import url from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseRepoController, render from kallithea.lib.graphmod import graph_data from kallithea.lib.page import Page from kallithea.lib.utils2 import safe_int from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, NodeDoesNotExistError, RepositoryError +from kallithea.lib.webutils import url log = logging.getLogger(__name__) diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/changeset.py --- a/kallithea/controllers/changeset.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/changeset.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,7 +28,7 @@ import binascii import logging import traceback -from collections import OrderedDict, defaultdict +from collections import OrderedDict from tg import request, response from tg import tmpl_context as c @@ -44,128 +44,15 @@ from kallithea.lib.utils2 import ascii_str, safe_str from kallithea.lib.vcs.backends.base import EmptyChangeset from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError +from kallithea.model import db, meta from kallithea.model.changeset_status import ChangesetStatusModel from kallithea.model.comment import ChangesetCommentsModel -from kallithea.model.db import ChangesetComment, ChangesetStatus -from kallithea.model.meta import Session from kallithea.model.pull_request import PullRequestModel log = logging.getLogger(__name__) -def _update_with_GET(params, GET): - for k in ['diff1', 'diff2', 'diff']: - params[k] += GET.getall(k) - - -def anchor_url(revision, path, GET): - fid = h.FID(revision, path) - return h.url.current(anchor=fid, **dict(GET)) - - -def get_ignore_ws(fid, GET): - ig_ws_global = GET.get('ignorews') - ig_ws = [k for k in GET.getall(fid) if k.startswith('WS')] - if ig_ws: - try: - return int(ig_ws[0].split(':')[-1]) - except ValueError: - raise HTTPBadRequest() - return ig_ws_global - - -def _ignorews_url(GET, fileid=None): - fileid = str(fileid) if fileid else None - params = defaultdict(list) - _update_with_GET(params, GET) - lbl = _('Show whitespace') - ig_ws = get_ignore_ws(fileid, GET) - ln_ctx = get_line_ctx(fileid, GET) - # global option - if fileid is None: - if ig_ws is None: - params['ignorews'] += [1] - lbl = _('Ignore whitespace') - ctx_key = 'context' - ctx_val = ln_ctx - # per file options - else: - if ig_ws is None: - params[fileid] += ['WS:1'] - lbl = _('Ignore whitespace') - - ctx_key = fileid - ctx_val = 'C:%s' % ln_ctx - # if we have passed in ln_ctx pass it along to our params - if ln_ctx: - params[ctx_key] += [ctx_val] - - params['anchor'] = fileid - icon = h.literal('') - return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'}) - - -def get_line_ctx(fid, GET): - ln_ctx_global = GET.get('context') - if fid: - ln_ctx = [k for k in GET.getall(fid) if k.startswith('C')] - else: - _ln_ctx = [k for k in GET if k.startswith('C')] - ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global - if ln_ctx: - ln_ctx = [ln_ctx] - - if ln_ctx: - retval = ln_ctx[0].split(':')[-1] - else: - retval = ln_ctx_global - - try: - return int(retval) - except Exception: - return 3 - - -def _context_url(GET, fileid=None): - """ - Generates url for context lines - - :param fileid: - """ - - fileid = str(fileid) if fileid else None - ig_ws = get_ignore_ws(fileid, GET) - ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2 - - params = defaultdict(list) - _update_with_GET(params, GET) - - # global option - if fileid is None: - if ln_ctx > 0: - params['context'] += [ln_ctx] - - if ig_ws: - ig_ws_key = 'ignorews' - ig_ws_val = 1 - - # per file option - else: - params[fileid] += ['C:%s' % ln_ctx] - ig_ws_key = fileid - ig_ws_val = 'WS:%s' % 1 - - if ig_ws: - params[ig_ws_key] += [ig_ws_val] - - lbl = _('Increase diff context to %(num)s lines') % {'num': ln_ctx} - - params['anchor'] = fileid - icon = h.literal('') - return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'}) - - def create_cs_pr_comment(repo_name, revision=None, pull_request=None, allowed_to_change_status=True): """ Add a comment to the specified changeset or pull request, using POST values @@ -209,7 +96,7 @@ h.HasRepoPermissionLevel('admin')(pull_request.other_repo.repo_name) ) and not pull_request.is_closed(): PullRequestModel().delete(pull_request) - Session().commit() + meta.Session().commit() h.flash(_('Successfully deleted pull request %s') % pull_request_id, category='success') return { @@ -227,7 +114,7 @@ pull_request=pull_request_id, f_path=f_path or None, line_no=line_no or None, - status_change=ChangesetStatus.get_status_lbl(status) if status else None, + status_change=db.ChangesetStatus.get_status_lbl(status) if status else None, closing_pr=close_pr, ) @@ -253,7 +140,7 @@ 'user_closed_pull_request:%s' % pull_request_id, c.db_repo, request.ip_addr) - Session().commit() + meta.Session().commit() data = { 'target_id': h.safeid(request.POST.get('f_path')), @@ -268,7 +155,7 @@ def delete_cs_pr_comment(repo_name, comment_id): """Delete a comment from a changeset or pull request""" - co = ChangesetComment.get_or_404(comment_id) + co = db.ChangesetComment.get_or_404(comment_id) if co.repo.repo_name != repo_name: raise HTTPNotFound() if co.pull_request and co.pull_request.is_closed(): @@ -279,7 +166,7 @@ repo_admin = h.HasRepoPermissionLevel('admin')(repo_name) if h.HasPermissionAny('hg.admin')() or repo_admin or owner: ChangesetCommentsModel().delete(comment=co) - Session().commit() + meta.Session().commit() return True else: raise HTTPForbidden() @@ -292,9 +179,6 @@ def _index(self, revision, method): c.pull_request = None - c.anchor_url = anchor_url - c.ignorews_url = _ignorews_url - c.context_url = _context_url c.fulldiff = request.GET.get('fulldiff') # for reporting number of changed files # get ranges of revisions if preset rev_range = revision.split('...')[:2] @@ -325,7 +209,7 @@ c.lines_added = 0 # count of lines added c.lines_deleted = 0 # count of lines removes - c.changeset_statuses = ChangesetStatus.STATUSES + c.changeset_statuses = db.ChangesetStatus.STATUSES comments = dict() c.statuses = [] c.inline_comments = [] @@ -357,11 +241,10 @@ cs2 = changeset.raw_id cs1 = changeset.parents[0].raw_id if changeset.parents else EmptyChangeset().raw_id - context_lcl = get_line_ctx('', request.GET) - ign_whitespace_lcl = get_ignore_ws('', request.GET) - + ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET) + diff_context_size = h.get_diff_context_size(request.GET) raw_diff = diffs.get_diff(c.db_repo_scm_instance, cs1, cs2, - ignore_whitespace=ign_whitespace_lcl, context=context_lcl) + ignore_whitespace=ignore_whitespace_diff, context=diff_context_size) diff_limit = None if c.fulldiff else self.cut_off_limit file_diff_data = [] if method == 'show': diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/compare.py --- a/kallithea/controllers/compare.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/compare.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,23 +28,19 @@ import logging -import re -import mercurial.unionrepo from tg import request from tg import tmpl_context as c from tg.i18n import ugettext as _ from webob.exc import HTTPBadRequest, HTTPFound, HTTPNotFound -from kallithea.config.routing import url -from kallithea.controllers.changeset import _context_url, _ignorews_url from kallithea.lib import diffs from kallithea.lib import helpers as h from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseRepoController, render from kallithea.lib.graphmod import graph_data -from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes, safe_int -from kallithea.model.db import Repository +from kallithea.lib.webutils import url +from kallithea.model import db log = logging.getLogger(__name__) @@ -63,7 +59,7 @@ if other_repo is None: c.cs_repo = c.a_repo else: - c.cs_repo = Repository.get_by_repo_name(other_repo) + c.cs_repo = db.Repository.get_by_repo_name(other_repo) if c.cs_repo is None: msg = _('Could not find other repository %s') % other_repo h.flash(msg, category='error') @@ -75,104 +71,6 @@ h.flash(msg, category='error') raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name)) - @staticmethod - def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev): - """ - Returns lists of changesets that can be merged from org_repo@org_rev - to other_repo@other_rev - ... and the other way - ... and the ancestors that would be used for merge - - :param org_repo: repo object, that is most likely the original repo we forked from - :param org_rev: the revision we want our compare to be made - :param other_repo: repo object, most likely the fork of org_repo. It has - all changesets that we need to obtain - :param other_rev: revision we want out compare to be made on other_repo - """ - ancestors = None - if org_rev == other_rev: - org_changesets = [] - other_changesets = [] - - elif alias == 'hg': - # case two independent repos - if org_repo != other_repo: - hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui, - safe_bytes(other_repo.path), - safe_bytes(org_repo.path)) - # all ancestors of other_rev will be in other_repo and - # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot - - # no remote compare do it on the same repository - else: - hgrepo = other_repo._repo - - ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in - hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))] - if ancestors: - log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev) - else: - log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev) - ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in - hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive! - - other_changesets = [ - other_repo.get_changeset(rev) - for rev in hgrepo.revs( - b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", - ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev)) - ] - org_changesets = [ - org_repo.get_changeset(ascii_str(hgrepo[rev].hex())) - for rev in hgrepo.revs( - b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", - ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev)) - ] - - elif alias == 'git': - if org_repo != other_repo: - from dulwich.repo import Repo - from dulwich.client import SubprocessGitClient - - gitrepo = Repo(org_repo.path) - SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo) - - gitrepo_remote = Repo(other_repo.path) - SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote) - - revs = [ - ascii_str(x.commit.id) - for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)], - exclude=[ascii_bytes(org_rev)]) - ] - other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)] - if other_changesets: - ancestors = [other_changesets[0].parents[0].raw_id] - else: - # no changesets from other repo, ancestor is the other_rev - ancestors = [other_rev] - - gitrepo.close() - gitrepo_remote.close() - - else: - so = org_repo.run_git_command( - ['log', '--reverse', '--pretty=format:%H', - '-s', '%s..%s' % (org_rev, other_rev)] - ) - other_changesets = [org_repo.get_changeset(cs) - for cs in re.findall(r'[0-9a-fA-F]{40}', so)] - so = org_repo.run_git_command( - ['merge-base', org_rev, other_rev] - ) - ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]] - org_changesets = [] - - else: - raise Exception('Bad alias only git and hg is allowed') - - return other_changesets, org_changesets, ancestors - @LoginRequired(allow_default_user=True) @HasRepoPermissionLevelDecorator('read') def index(self, repo_name): @@ -208,12 +106,8 @@ other_repo=c.a_repo.repo_name, other_ref_type=org_ref_type, other_ref_name=org_ref_name, merge=merge or '') - - # set callbacks for generating markup for icons - c.ignorews_url = _ignorews_url - c.context_url = _context_url - ignore_whitespace = request.GET.get('ignorews') == '1' - line_context = safe_int(request.GET.get('context'), 3) + ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET) + diff_context_size = h.get_diff_context_size(request.GET) c.a_rev = self._get_ref_rev(c.a_repo, org_ref_type, org_ref_name, returnempty=True) @@ -225,9 +119,8 @@ c.cs_ref_name = other_ref_name c.cs_ref_type = other_ref_type - c.cs_ranges, c.cs_ranges_org, c.ancestors = self._get_changesets( - c.a_repo.scm_instance.alias, c.a_repo.scm_instance, c.a_rev, - c.cs_repo.scm_instance, c.cs_rev) + c.cs_ranges, c.cs_ranges_org, c.ancestors = c.a_repo.scm_instance.get_diff_changesets( + c.a_rev, c.cs_repo.scm_instance, c.cs_rev) raw_ids = [x.raw_id for x in c.cs_ranges] c.cs_comments = c.cs_repo.get_comments(raw_ids) c.cs_statuses = c.cs_repo.statuses(raw_ids) @@ -275,8 +168,8 @@ log.debug('running diff between %s and %s in %s', rev1, c.cs_rev, org_repo.scm_instance.path) raw_diff = diffs.get_diff(org_repo.scm_instance, rev1=rev1, rev2=c.cs_rev, - ignore_whitespace=ignore_whitespace, - context=line_context) + ignore_whitespace=ignore_whitespace_diff, + context=diff_context_size) diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit) c.limited_diff = diff_processor.limited_diff diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/feed.py --- a/kallithea/controllers/feed.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/feed.py Fri Oct 30 23:44:18 2020 +0100 @@ -33,13 +33,13 @@ from tg import tmpl_context as c from tg.i18n import ugettext as _ -from kallithea import CONFIG +import kallithea from kallithea.lib import feeds from kallithea.lib import helpers as h from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseRepoController from kallithea.lib.diffs import DiffProcessor -from kallithea.lib.utils2 import safe_int, safe_str, str2bool +from kallithea.lib.utils2 import asbool, safe_int, safe_str log = logging.getLogger(__name__) @@ -67,7 +67,7 @@ desc_msg.append('tag: %s
' % tag) changes = [] - diff_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024)) + diff_limit = safe_int(kallithea.CONFIG.get('rss_cut_off_limit', 32 * 1024)) raw_diff = cs.diff() diff_processor = DiffProcessor(raw_diff, diff_limit=diff_limit, @@ -92,7 +92,7 @@ desc_msg.append(h.urlify_text(cs.message)) desc_msg.append('\n') desc_msg.extend(changes) - if str2bool(CONFIG.get('rss_include_diff', False)): + if asbool(kallithea.CONFIG.get('rss_include_diff', False)): desc_msg.append('\n\n') desc_msg.append(safe_str(raw_diff)) desc_msg.append('') @@ -109,7 +109,7 @@ description=_('Changes on %s repository') % repo_name, ) - rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) + rss_items_per_page = safe_int(kallithea.CONFIG.get('rss_items_per_page', 20)) entries=[] for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): entries.append(dict( diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/files.py --- a/kallithea/controllers/files.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/files.py Fri Oct 30 23:44:18 2020 +0100 @@ -38,21 +38,20 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPFound, HTTPNotFound -from kallithea.config.routing import url -from kallithea.controllers.changeset import _context_url, _ignorews_url, anchor_url, get_ignore_ws, get_line_ctx +import kallithea from kallithea.lib import diffs from kallithea.lib import helpers as h from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseRepoController, jsonify, render from kallithea.lib.exceptions import NonRelativePathError from kallithea.lib.utils import action_logger -from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_int, safe_str, str2bool +from kallithea.lib.utils2 import asbool, convert_line_endings, detect_mode, safe_str from kallithea.lib.vcs.backends.base import EmptyChangeset from kallithea.lib.vcs.conf import settings from kallithea.lib.vcs.exceptions import (ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError, NodeDoesNotExistError, NodeError, RepositoryError, VCSError) from kallithea.lib.vcs.nodes import FileNode -from kallithea.model import db +from kallithea.lib.webutils import url from kallithea.model.repo import RepoModel from kallithea.model.scm import ScmModel @@ -233,7 +232,7 @@ file_node = self.__get_filenode(cs, f_path) response.content_disposition = \ - 'attachment; filename=%s' % f_path.split(db.URL_SEP)[-1] + 'attachment; filename=%s' % f_path.split(kallithea.URL_SEP)[-1] response.content_type = file_node.mimetype return file_node.content @@ -505,13 +504,12 @@ except (ImproperArchiveTypeError, KeyError): return _('Unknown archive type') - from kallithea import CONFIG rev_name = cs.raw_id[:12] archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext) archive_path = None cached_archive_path = None - archive_cache_dir = CONFIG.get('archive_cache_dir') + archive_cache_dir = kallithea.CONFIG.get('archive_cache_dir') if archive_cache_dir and not subrepos: # TODO: subrepo caching? if not os.path.isdir(archive_cache_dir): os.makedirs(archive_cache_dir) @@ -558,8 +556,8 @@ @LoginRequired(allow_default_user=True) @HasRepoPermissionLevelDecorator('read') def diff(self, repo_name, f_path): - ignore_whitespace = request.GET.get('ignorews') == '1' - line_context = safe_int(request.GET.get('context'), 3) + ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET) + diff_context_size = h.get_diff_context_size(request.GET) diff2 = request.GET.get('diff2', '') diff1 = request.GET.get('diff1', '') or diff2 c.action = request.GET.get('diff') @@ -567,9 +565,6 @@ c.f_path = f_path c.big_diff = False fulldiff = request.GET.get('fulldiff') - c.anchor_url = anchor_url - c.ignorews_url = _ignorews_url - c.context_url = _context_url c.changes = OrderedDict() c.changes[diff2] = [] @@ -577,7 +572,7 @@ # to reduce JS and callbacks if request.GET.get('show_rev'): - if str2bool(request.GET.get('annotate', 'False')): + if asbool(request.GET.get('annotate', 'False')): _url = url('files_annotate_home', repo_name=c.repo_name, revision=diff1, f_path=c.f_path) else: @@ -624,8 +619,8 @@ if c.action == 'download': raw_diff = diffs.get_gitdiff(node1, node2, - ignore_whitespace=ignore_whitespace, - context=line_context) + ignore_whitespace=ignore_whitespace_diff, + context=diff_context_size) diff_name = '%s_vs_%s.diff' % (diff1, diff2) response.content_type = 'text/plain' response.content_disposition = ( @@ -635,25 +630,21 @@ elif c.action == 'raw': raw_diff = diffs.get_gitdiff(node1, node2, - ignore_whitespace=ignore_whitespace, - context=line_context) + ignore_whitespace=ignore_whitespace_diff, + context=diff_context_size) response.content_type = 'text/plain' return raw_diff else: fid = h.FID(diff2, node2.path) - line_context_lcl = get_line_ctx(fid, request.GET) - ign_whitespace_lcl = get_ignore_ws(fid, request.GET) - diff_limit = None if fulldiff else self.cut_off_limit c.a_rev, c.cs_rev, a_path, diff, st, op = diffs.wrapped_diff(filenode_old=node1, filenode_new=node2, diff_limit=diff_limit, - ignore_whitespace=ign_whitespace_lcl, - line_context=line_context_lcl, + ignore_whitespace=ignore_whitespace_diff, + line_context=diff_context_size, enable_comments=False) c.file_diff_data = [(fid, fid, op, a_path, node2.path, diff, st)] - return render('files/file_diff.html') @LoginRequired(allow_default_user=True) diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/followers.py --- a/kallithea/controllers/followers.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/followers.py Fri Oct 30 23:44:18 2020 +0100 @@ -34,7 +34,7 @@ from kallithea.lib.base import BaseRepoController, render from kallithea.lib.page import Page from kallithea.lib.utils2 import safe_int -from kallithea.model.db import UserFollowing +from kallithea.model import db log = logging.getLogger(__name__) @@ -47,8 +47,8 @@ def followers(self, repo_name): p = safe_int(request.GET.get('page'), 1) repo_id = c.db_repo.repo_id - d = UserFollowing.get_repo_followers(repo_id) \ - .order_by(UserFollowing.follows_from) + d = db.UserFollowing.get_repo_followers(repo_id) \ + .order_by(db.UserFollowing.follows_from) c.followers_pager = Page(d, page=p, items_per_page=20) if request.environ.get('HTTP_X_PARTIAL_XHR'): diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/forks.py --- a/kallithea/controllers/forks.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/forks.py Fri Oct 30 23:44:18 2020 +0100 @@ -37,12 +37,12 @@ import kallithea import kallithea.lib.helpers as h -from kallithea.config.routing import url -from kallithea.lib.auth import HasPermissionAny, HasPermissionAnyDecorator, HasRepoPermissionLevel, HasRepoPermissionLevelDecorator, LoginRequired +from kallithea.lib.auth import HasPermissionAnyDecorator, HasRepoPermissionLevel, HasRepoPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseRepoController, render from kallithea.lib.page import Page from kallithea.lib.utils2 import safe_int -from kallithea.model.db import Repository, Ui, UserFollowing +from kallithea.lib.webutils import url +from kallithea.model import db from kallithea.model.forms import RepoForkForm from kallithea.model.repo import RepoModel from kallithea.model.scm import AvailableRepoGroupChoices, ScmModel @@ -54,15 +54,11 @@ class ForksController(BaseRepoController): def __load_defaults(self): - if HasPermissionAny('hg.create.write_on_repogroup.true')(): - repo_group_perm_level = 'write' - else: - repo_group_perm_level = 'admin' - c.repo_groups = AvailableRepoGroupChoices(['hg.create.repository'], repo_group_perm_level) + c.repo_groups = AvailableRepoGroupChoices('write') c.landing_revs_choices, c.landing_revs = ScmModel().get_repo_landing_revs() - c.can_update = Ui.get_by_key('hooks', Ui.HOOK_UPDATE).ui_active + c.can_update = db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE).ui_active def __load_data(self): """ @@ -78,9 +74,9 @@ raise HTTPFound(location=url('repos')) c.default_user_id = kallithea.DEFAULT_USER_ID - c.in_public_journal = UserFollowing.query() \ - .filter(UserFollowing.user_id == c.default_user_id) \ - .filter(UserFollowing.follows_repository == c.repo_info).scalar() + c.in_public_journal = db.UserFollowing.query() \ + .filter(db.UserFollowing.user_id == c.default_user_id) \ + .filter(db.UserFollowing.follows_repository == c.repo_info).scalar() if c.repo_info.stats: last_rev = c.repo_info.stats.stat_on_revision + 1 @@ -112,7 +108,7 @@ p = safe_int(request.GET.get('page'), 1) repo_id = c.db_repo.repo_id d = [] - for r in Repository.get_repo_forks(repo_id): + for r in db.Repository.get_repo_forks(repo_id): if not HasRepoPermissionLevel('read')(r.repo_name, 'get forks check'): continue d.append(r) @@ -127,7 +123,7 @@ @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository') @HasRepoPermissionLevelDecorator('read') def fork(self, repo_name): - c.repo_info = Repository.get_by_repo_name(repo_name) + c.repo_info = db.Repository.get_by_repo_name(repo_name) if not c.repo_info: h.not_mapped_error(repo_name) raise HTTPFound(location=url('home')) @@ -145,7 +141,7 @@ @HasRepoPermissionLevelDecorator('read') def fork_create(self, repo_name): self.__load_defaults() - c.repo_info = Repository.get_by_repo_name(repo_name) + c.repo_info = db.Repository.get_by_repo_name(repo_name) _form = RepoForkForm(old_data={'repo_type': c.repo_info.repo_type}, repo_groups=c.repo_groups, landing_revs=c.landing_revs_choices)() @@ -155,7 +151,7 @@ form_result = _form.to_python(dict(request.POST)) # an approximation that is better than nothing - if not Ui.get_by_key('hooks', Ui.HOOK_UPDATE).ui_active: + if not db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE).ui_active: form_result['update_after_clone'] = False # create fork is done sometimes async on celery, db transaction diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/home.py --- a/kallithea/controllers/home.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/home.py Fri Oct 30 23:44:18 2020 +0100 @@ -38,7 +38,7 @@ from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseController, jsonify, render from kallithea.lib.utils2 import safe_str -from kallithea.model.db import RepoGroup, Repository, User, UserGroup +from kallithea.model import db from kallithea.model.repo import RepoModel from kallithea.model.scm import UserGroupList @@ -56,7 +56,7 @@ c.group = None repo_groups_list = self.scm_model.get_repo_groups() - repos_list = Repository.query(sorted=True).filter_by(group=None).all() + repos_list = db.Repository.query(sorted=True).filter_by(group=None).all() c.data = RepoModel().get_repos_as_dict(repos_list, repo_groups_list=repo_groups_list, @@ -68,9 +68,9 @@ @jsonify def repo_switcher_data(self): if request.is_xhr: - all_repos = Repository.query(sorted=True).all() + all_repos = db.Repository.query(sorted=True).all() repo_iter = self.scm_model.get_repos(all_repos) - all_groups = RepoGroup.query(sorted=True).all() + all_groups = db.RepoGroup.query(sorted=True).all() repo_groups_iter = self.scm_model.get_repo_groups(all_groups) res = [{ @@ -111,7 +111,7 @@ @HasRepoPermissionLevelDecorator('read') @jsonify def repo_refs_data(self, repo_name): - repo = Repository.get_by_repo_name(repo_name).scm_instance + repo = db.Repository.get_by_repo_name(repo_name).scm_instance res = [] _branches = repo.branches.items() if _branches: @@ -163,19 +163,20 @@ if 'users' in types: user_list = [] if key: - u = User.get_by_username(key) + u = db.User.get_by_username(key) if u: user_list = [u] elif query: - user_list = User.query() \ - .filter(User.is_default_user == False) \ - .filter(User.active == True) \ + user_list = db.User.query() \ + .filter(db.User.is_default_user == False) \ + .filter(db.User.active == True) \ .filter(or_( - User.username.ilike("%%" + query + "%%"), - User.name.ilike("%%" + query + "%%"), - User.lastname.ilike("%%" + query + "%%"), + db.User.username.ilike("%%" + query + "%%"), + db.User.name.concat(' ').concat(db.User.lastname).ilike("%%" + query + "%%"), + db.User.lastname.concat(' ').concat(db.User.name).ilike("%%" + query + "%%"), + db.User.email.ilike("%%" + query + "%%"), )) \ - .order_by(User.username) \ + .order_by(db.User.username) \ .limit(500) \ .all() for u in user_list: @@ -191,14 +192,14 @@ if 'groups' in types: grp_list = [] if key: - grp = UserGroup.get_by_group_name(key) + grp = db.UserGroup.get_by_group_name(key) if grp: grp_list = [grp] elif query: - grp_list = UserGroup.query() \ - .filter(UserGroup.users_group_name.ilike("%%" + query + "%%")) \ - .filter(UserGroup.users_group_active == True) \ - .order_by(UserGroup.users_group_name) \ + grp_list = db.UserGroup.query() \ + .filter(db.UserGroup.users_group_name.ilike("%%" + query + "%%")) \ + .filter(db.UserGroup.users_group_active == True) \ + .order_by(db.UserGroup.users_group_name) \ .limit(500) \ .all() for g in UserGroupList(grp_list, perm_level='read'): diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/journal.py --- a/kallithea/controllers/journal.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/journal.py Fri Oct 30 23:44:18 2020 +0100 @@ -43,8 +43,7 @@ from kallithea.lib.base import BaseController, render from kallithea.lib.page import Page from kallithea.lib.utils2 import AttributeDict, safe_int -from kallithea.model.db import Repository, User, UserFollowing, UserLog -from kallithea.model.meta import Session +from kallithea.model import db, meta from kallithea.model.repo import RepoModel @@ -84,20 +83,20 @@ filtering_criterion = None if repo_ids and user_ids: - filtering_criterion = or_(UserLog.repository_id.in_(repo_ids), - UserLog.user_id.in_(user_ids)) + filtering_criterion = or_(db.UserLog.repository_id.in_(repo_ids), + db.UserLog.user_id.in_(user_ids)) if repo_ids and not user_ids: - filtering_criterion = UserLog.repository_id.in_(repo_ids) + filtering_criterion = db.UserLog.repository_id.in_(repo_ids) if not repo_ids and user_ids: - filtering_criterion = UserLog.user_id.in_(user_ids) + filtering_criterion = db.UserLog.user_id.in_(user_ids) if filtering_criterion is not None: - journal = UserLog.query() \ - .options(joinedload(UserLog.user)) \ - .options(joinedload(UserLog.repository)) + journal = db.UserLog.query() \ + .options(joinedload(db.UserLog.user)) \ + .options(joinedload(db.UserLog.repository)) # filter journal = _journal_filter(journal, c.search_term) journal = journal.filter(filtering_criterion) \ - .order_by(UserLog.action_date.desc()) + .order_by(db.UserLog.action_date.desc()) else: journal = [] @@ -166,10 +165,10 @@ def index(self): # Return a rendered template p = safe_int(request.GET.get('page'), 1) - c.user = User.get(request.authuser.user_id) - c.following = UserFollowing.query() \ - .filter(UserFollowing.user_id == request.authuser.user_id) \ - .options(joinedload(UserFollowing.follows_repository)) \ + c.user = db.User.get(request.authuser.user_id) + c.following = db.UserFollowing.query() \ + .filter(db.UserFollowing.user_id == request.authuser.user_id) \ + .options(joinedload(db.UserFollowing.follows_repository)) \ .all() journal = self._get_journal_data(c.following) @@ -181,7 +180,7 @@ if request.environ.get('HTTP_X_PARTIAL_XHR'): return render('journal/journal_data.html') - repos_list = Repository.query(sorted=True) \ + repos_list = db.Repository.query(sorted=True) \ .filter_by(owner_id=request.authuser.user_id).all() repos_data = RepoModel().get_repos_as_dict(repos_list, admin=True) @@ -193,18 +192,18 @@ @LoginRequired() def journal_atom(self): """Produce a simple atom-1.0 feed""" - following = UserFollowing.query() \ - .filter(UserFollowing.user_id == request.authuser.user_id) \ - .options(joinedload(UserFollowing.follows_repository)) \ + following = db.UserFollowing.query() \ + .filter(db.UserFollowing.user_id == request.authuser.user_id) \ + .options(joinedload(db.UserFollowing.follows_repository)) \ .all() return self._atom_feed(following, public=False) @LoginRequired() def journal_rss(self): """Produce a simple rss2 feed""" - following = UserFollowing.query() \ - .filter(UserFollowing.user_id == request.authuser.user_id) \ - .options(joinedload(UserFollowing.follows_repository)) \ + following = db.UserFollowing.query() \ + .filter(db.UserFollowing.user_id == request.authuser.user_id) \ + .options(joinedload(db.UserFollowing.follows_repository)) \ .all() return self._rss_feed(following, public=False) @@ -215,7 +214,7 @@ try: self.scm_model.toggle_following_user(user_id, request.authuser.user_id) - Session().commit() + meta.Session().commit() return 'ok' except Exception: log.error(traceback.format_exc()) @@ -226,7 +225,7 @@ try: self.scm_model.toggle_following_repo(repo_id, request.authuser.user_id) - Session().commit() + meta.Session().commit() return 'ok' except Exception: log.error(traceback.format_exc()) @@ -239,9 +238,9 @@ # Return a rendered template p = safe_int(request.GET.get('page'), 1) - c.following = UserFollowing.query() \ - .filter(UserFollowing.user_id == request.authuser.user_id) \ - .options(joinedload(UserFollowing.follows_repository)) \ + c.following = db.UserFollowing.query() \ + .filter(db.UserFollowing.user_id == request.authuser.user_id) \ + .options(joinedload(db.UserFollowing.follows_repository)) \ .all() journal = self._get_journal_data(c.following) @@ -258,9 +257,9 @@ @LoginRequired(allow_default_user=True) def public_journal_atom(self): """Produce a simple atom-1.0 feed""" - c.following = UserFollowing.query() \ - .filter(UserFollowing.user_id == request.authuser.user_id) \ - .options(joinedload(UserFollowing.follows_repository)) \ + c.following = db.UserFollowing.query() \ + .filter(db.UserFollowing.user_id == request.authuser.user_id) \ + .options(joinedload(db.UserFollowing.follows_repository)) \ .all() return self._atom_feed(c.following) @@ -268,9 +267,9 @@ @LoginRequired(allow_default_user=True) def public_journal_rss(self): """Produce a simple rss2 feed""" - c.following = UserFollowing.query() \ - .filter(UserFollowing.user_id == request.authuser.user_id) \ - .options(joinedload(UserFollowing.follows_repository)) \ + c.following = db.UserFollowing.query() \ + .filter(db.UserFollowing.user_id == request.authuser.user_id) \ + .options(joinedload(db.UserFollowing.follows_repository)) \ .all() return self._rss_feed(c.following) diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/login.py --- a/kallithea/controllers/login.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/login.py Fri Oct 30 23:44:18 2020 +0100 @@ -37,13 +37,12 @@ from webob.exc import HTTPBadRequest, HTTPFound import kallithea.lib.helpers as h -from kallithea.config.routing import url from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator from kallithea.lib.base import BaseController, log_in_user, render from kallithea.lib.exceptions import UserCreationError -from kallithea.model.db import Setting, User +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.forms import LoginForm, PasswordResetConfirmationForm, PasswordResetRequestForm, RegisterForm -from kallithea.model.meta import Session from kallithea.model.user import UserModel @@ -82,7 +81,7 @@ # login_form will check username/password using ValidAuth and report failure to the user c.form_result = login_form.to_python(dict(request.POST)) username = c.form_result['username'] - user = User.get_by_username_or_email(username) + user = db.User.get_by_username_or_email(username) assert user is not None # the same user get just passed in the form validation except formencode.Invalid as errors: defaults = errors.value @@ -118,10 +117,10 @@ @HasPermissionAnyDecorator('hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') def register(self): - def_user_perms = AuthUser(dbuser=User.get_default_user()).permissions['global'] + def_user_perms = AuthUser(dbuser=db.User.get_default_user()).global_permissions c.auto_active = 'hg.register.auto_activate' in def_user_perms - settings = Setting.get_app_settings() + settings = db.Setting.get_app_settings() captcha_private_key = settings.get('captcha_private_key') c.captcha_active = bool(captcha_private_key) c.captcha_public_key = settings.get('captcha_public_key') @@ -147,7 +146,7 @@ UserModel().create_registration(form_result) h.flash(_('You have successfully registered with %s') % (c.site_name or 'Kallithea'), category='success') - Session().commit() + meta.Session().commit() raise HTTPFound(location=url('login_home')) except formencode.Invalid as errors: @@ -168,7 +167,7 @@ return render('/register.html') def password_reset(self): - settings = Setting.get_app_settings() + settings = db.Setting.get_app_settings() captcha_private_key = settings.get('captcha_private_key') c.captcha_active = bool(captcha_private_key) c.captcha_public_key = settings.get('captcha_public_key') diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/pullrequests.py --- a/kallithea/controllers/pullrequests.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/pullrequests.py Fri Oct 30 23:44:18 2020 +0100 @@ -35,8 +35,7 @@ from tg.i18n import ugettext as _ from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPFound, HTTPNotFound -from kallithea.config.routing import url -from kallithea.controllers.changeset import _context_url, _ignorews_url, create_cs_pr_comment, delete_cs_pr_comment +from kallithea.controllers.changeset import create_cs_pr_comment, delete_cs_pr_comment from kallithea.lib import diffs from kallithea.lib import helpers as h from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired @@ -45,11 +44,11 @@ from kallithea.lib.page import Page from kallithea.lib.utils2 import ascii_bytes, safe_bytes, safe_int from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.changeset_status import ChangesetStatusModel from kallithea.model.comment import ChangesetCommentsModel -from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, Repository, User from kallithea.model.forms import PullRequestForm, PullRequestPostForm -from kallithea.model.meta import Session from kallithea.model.pull_request import CreatePullRequestAction, CreatePullRequestIterationAction, PullRequestModel @@ -59,7 +58,7 @@ def _get_reviewer(user_id): """Look up user by ID and validate it as a potential reviewer.""" try: - user = User.get(int(user_id)) + user = db.User.get(int(user_id)) except ValueError: user = None @@ -183,9 +182,9 @@ return False owner = request.authuser.user_id == pull_request.owner_id - reviewer = PullRequestReviewer.query() \ - .filter(PullRequestReviewer.pull_request == pull_request) \ - .filter(PullRequestReviewer.user_id == request.authuser.user_id) \ + reviewer = db.PullRequestReviewer.query() \ + .filter(db.PullRequestReviewer.pull_request == pull_request) \ + .filter(db.PullRequestReviewer.user_id == request.authuser.user_id) \ .count() != 0 return request.authuser.admin or owner or reviewer @@ -202,7 +201,7 @@ url_params['closed'] = 1 p = safe_int(request.GET.get('page'), 1) - q = PullRequest.query(include_closed=c.closed, sorted=True) + q = db.PullRequest.query(include_closed=c.closed, sorted=True) if c.from_: q = q.filter_by(org_repo=c.db_repo) else: @@ -217,15 +216,15 @@ def show_my(self): c.closed = request.GET.get('closed') or '' - c.my_pull_requests = PullRequest.query( + c.my_pull_requests = db.PullRequest.query( include_closed=c.closed, sorted=True, ).filter_by(owner_id=request.authuser.user_id).all() c.participate_in_pull_requests = [] c.participate_in_pull_requests_todo = [] - done_status = set([ChangesetStatus.STATUS_APPROVED, ChangesetStatus.STATUS_REJECTED]) - for pr in PullRequest.query( + done_status = set([db.ChangesetStatus.STATUS_APPROVED, db.ChangesetStatus.STATUS_REJECTED]) + for pr in db.PullRequest.query( include_closed=c.closed, reviewer_id=request.authuser.user_id, sorted=True, @@ -320,16 +319,16 @@ # heads up: org and other might seem backward here ... org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name - org_repo = Repository.guess_instance(_form['org_repo']) + org_repo = db.Repository.guess_instance(_form['org_repo']) other_ref = _form['other_ref'] # will have symbolic name and head revision - other_repo = Repository.guess_instance(_form['other_repo']) + other_repo = db.Repository.guess_instance(_form['other_repo']) reviewers = [] title = _form['pullrequest_title'] description = _form['pullrequest_desc'].strip() - owner = User.get(request.authuser.user_id) + owner = db.User.get(request.authuser.user_id) try: cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers) @@ -339,7 +338,7 @@ try: pull_request = cmd.execute() - Session().commit() + meta.Session().commit() except Exception: h.flash(_('Error occurred while creating pull request'), category='error') @@ -351,7 +350,7 @@ raise HTTPFound(location=pull_request.url()) def create_new_iteration(self, old_pull_request, new_rev, title, description, reviewers): - owner = User.get(request.authuser.user_id) + owner = db.User.get(request.authuser.user_id) new_org_rev = self._get_ref_rev(old_pull_request.org_repo, 'rev', new_rev) new_other_rev = self._get_ref_rev(old_pull_request.other_repo, old_pull_request.other_ref_parts[0], old_pull_request.other_ref_parts[1]) try: @@ -362,7 +361,7 @@ try: pull_request = cmd.execute() - Session().commit() + meta.Session().commit() except Exception: h.flash(_('Error occurred while creating pull request'), category='error') @@ -377,7 +376,7 @@ @LoginRequired() @HasRepoPermissionLevelDecorator('read') def post(self, repo_name, pull_request_id): - pull_request = PullRequest.get_or_404(pull_request_id) + pull_request = db.PullRequest.get_or_404(pull_request_id) if pull_request.is_closed(): raise HTTPForbidden() assert pull_request.other_repo.repo_name == repo_name @@ -418,14 +417,14 @@ old_description = pull_request.description pull_request.title = _form['pullrequest_title'] pull_request.description = _form['pullrequest_desc'].strip() or _('No description') - pull_request.owner = User.get_by_username(_form['owner']) - user = User.get(request.authuser.user_id) + pull_request.owner = db.User.get_by_username(_form['owner']) + user = db.User.get(request.authuser.user_id) PullRequestModel().mention_from_description(user, pull_request, old_description) PullRequestModel().add_reviewers(user, pull_request, added_reviewers) PullRequestModel().remove_reviewers(user, pull_request, removed_reviewers) - Session().commit() + meta.Session().commit() h.flash(_('Pull request updated'), category='success') raise HTTPFound(location=pull_request.url()) @@ -434,11 +433,11 @@ @HasRepoPermissionLevelDecorator('read') @jsonify def delete(self, repo_name, pull_request_id): - pull_request = PullRequest.get_or_404(pull_request_id) + pull_request = db.PullRequest.get_or_404(pull_request_id) # only owner can delete it ! if pull_request.owner_id == request.authuser.user_id: PullRequestModel().delete(pull_request) - Session().commit() + meta.Session().commit() h.flash(_('Successfully deleted pull request'), category='success') raise HTTPFound(location=url('my_pullrequests')) @@ -447,7 +446,7 @@ @LoginRequired(allow_default_user=True) @HasRepoPermissionLevelDecorator('read') def show(self, repo_name, pull_request_id, extra=None): - c.pull_request = PullRequest.get_or_404(pull_request_id) + c.pull_request = db.PullRequest.get_or_404(pull_request_id) c.allowed_to_change_status = self._is_allowed_to_change_status(c.pull_request) cc_model = ChangesetCommentsModel() cs_model = ChangesetStatusModel() @@ -571,10 +570,8 @@ c.cs_comments = c.cs_repo.get_comments(raw_ids) c.cs_statuses = c.cs_repo.statuses(raw_ids) - ignore_whitespace = request.GET.get('ignorews') == '1' - line_context = safe_int(request.GET.get('context'), 3) - c.ignorews_url = _ignorews_url - c.context_url = _context_url + ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET) + diff_context_size = h.get_diff_context_size(request.GET) fulldiff = request.GET.get('fulldiff') diff_limit = None if fulldiff else self.cut_off_limit @@ -583,7 +580,7 @@ c.a_rev, c.cs_rev, org_scm_instance.path) try: raw_diff = diffs.get_diff(org_scm_instance, rev1=c.a_rev, rev2=c.cs_rev, - ignore_whitespace=ignore_whitespace, context=line_context) + ignore_whitespace=ignore_whitespace_diff, context=diff_context_size) except ChangesetDoesNotExistError: raw_diff = safe_bytes(_("The diff can't be shown - the PR revisions could not be found.")) diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit) @@ -618,7 +615,7 @@ c.pull_request_pending_reviewers, c.current_voting_result, ) = cs_model.calculate_pull_request_result(c.pull_request) - c.changeset_statuses = ChangesetStatus.STATUSES + c.changeset_statuses = db.ChangesetStatus.STATUSES c.is_ajax_preview = False c.ancestors = None # [c.a_rev] ... but that is shown in an other way @@ -628,7 +625,7 @@ @HasRepoPermissionLevelDecorator('read') @jsonify def comment(self, repo_name, pull_request_id): - pull_request = PullRequest.get_or_404(pull_request_id) + pull_request = db.PullRequest.get_or_404(pull_request_id) allowed_to_change_status = self._is_allowed_to_change_status(pull_request) return create_cs_pr_comment(repo_name, pull_request=pull_request, allowed_to_change_status=allowed_to_change_status) diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/root.py --- a/kallithea/controllers/root.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/root.py Fri Oct 30 23:44:18 2020 +0100 @@ -14,8 +14,8 @@ from tg import config from tgext.routes import RoutedController -from kallithea.config.routing import make_map from kallithea.controllers.error import ErrorController +from kallithea.controllers.routing import make_map from kallithea.lib.base import BaseController diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/routing.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/controllers/routing.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,774 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +Routes configuration + +The more specific and detailed routes should be defined first so they +may take precedent over the more generic routes. For more information +refer to the routes manual at http://routes.groovie.org/docs/ +""" + +import routes + +import kallithea +from kallithea.lib.utils2 import safe_str + + +class Mapper(routes.Mapper): + """ + Subclassed Mapper with routematch patched to decode "unicode" str url to + *real* unicode str before applying matches and invoking controller methods. + """ + + def routematch(self, url=None, environ=None): + """ + routematch that also decode url from "fake bytes" to real unicode + string before matching and invoking controllers. + """ + # Process url like get_path_info does ... but PATH_INFO has already + # been retrieved from environ and is passed, so - let's just use that + # instead. + url = safe_str(url.encode('latin1')) + return super().routematch(url=url, environ=environ) + + +def make_map(config): + """Create, configure and return the routes Mapper""" + rmap = Mapper(directory=config['paths']['controllers'], + always_scan=config['debug']) + rmap.minimization = False + rmap.explicit = False + + from kallithea.lib.utils import is_valid_repo, is_valid_repo_group + + def check_repo(environ, match_dict): + """ + Check for valid repository for proper 404 handling. + Also, a bit of side effect modifying match_dict ... + """ + if match_dict.get('f_path'): + # fix for multiple initial slashes that causes errors + match_dict['f_path'] = match_dict['f_path'].lstrip('/') + + return is_valid_repo(match_dict['repo_name'], config['base_path']) + + def check_group(environ, match_dict): + """ + check for valid repository group for proper 404 handling + + :param environ: + :param match_dict: + """ + repo_group_name = match_dict.get('group_name') + return is_valid_repo_group(repo_group_name, config['base_path']) + + def check_group_skip_path(environ, match_dict): + """ + check for valid repository group for proper 404 handling, but skips + verification of existing path + + :param environ: + :param match_dict: + """ + repo_group_name = match_dict.get('group_name') + return is_valid_repo_group(repo_group_name, config['base_path'], + skip_path_check=True) + + def check_user_group(environ, match_dict): + """ + check for valid user group for proper 404 handling + + :param environ: + :param match_dict: + """ + return True + + def check_int(environ, match_dict): + return match_dict.get('id').isdigit() + + #========================================================================== + # CUSTOM ROUTES HERE + #========================================================================== + + # MAIN PAGE + rmap.connect('home', '/', controller='home') + rmap.connect('about', '/about', controller='home', action='about') + rmap.redirect('/favicon.ico', '/images/favicon.ico') + rmap.connect('repo_switcher_data', '/_repos', controller='home', + action='repo_switcher_data') + rmap.connect('users_and_groups_data', '/_users_and_groups', controller='home', + action='users_and_groups_data') + + rmap.connect('rst_help', + "http://docutils.sourceforge.net/docs/user/rst/quickref.html", + _static=True) + rmap.connect('kallithea_project_url', "https://kallithea-scm.org/", _static=True) + rmap.connect('issues_url', 'https://bitbucket.org/conservancy/kallithea/issues', _static=True) + + # ADMIN REPOSITORY ROUTES + ADMIN_PREFIX = kallithea.ADMIN_PREFIX + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/repos') as m: + m.connect("repos", "/repos", + action="create", conditions=dict(method=["POST"])) + m.connect("repos", "/repos", + conditions=dict(method=["GET"])) + m.connect("new_repo", "/create_repository", + action="create_repository", conditions=dict(method=["GET"])) + m.connect("update_repo", "/repos/{repo_name:.*?}", + action="update", conditions=dict(method=["POST"], + function=check_repo)) + m.connect("delete_repo", "/repos/{repo_name:.*?}/delete", + action="delete", conditions=dict(method=["POST"])) + + # ADMIN REPOSITORY GROUPS ROUTES + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/repo_groups') as m: + m.connect("repos_groups", "/repo_groups", + action="create", conditions=dict(method=["POST"])) + m.connect("repos_groups", "/repo_groups", + conditions=dict(method=["GET"])) + m.connect("new_repos_group", "/repo_groups/new", + action="new", conditions=dict(method=["GET"])) + m.connect("update_repos_group", "/repo_groups/{group_name:.*?}", + action="update", conditions=dict(method=["POST"], + function=check_group)) + + m.connect("repos_group", "/repo_groups/{group_name:.*?}", + action="show", conditions=dict(method=["GET"], + function=check_group)) + + # EXTRAS REPO GROUP ROUTES + m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit", + action="edit", + conditions=dict(method=["GET"], function=check_group)) + + m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced", + action="edit_repo_group_advanced", + conditions=dict(method=["GET"], function=check_group)) + + m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions", + action="edit_repo_group_perms", + conditions=dict(method=["GET"], function=check_group)) + m.connect("edit_repo_group_perms_update", "/repo_groups/{group_name:.*?}/edit/permissions", + action="update_perms", + conditions=dict(method=["POST"], function=check_group)) + m.connect("edit_repo_group_perms_delete", "/repo_groups/{group_name:.*?}/edit/permissions/delete", + action="delete_perms", + conditions=dict(method=["POST"], function=check_group)) + + m.connect("delete_repo_group", "/repo_groups/{group_name:.*?}/delete", + action="delete", conditions=dict(method=["POST"], + function=check_group_skip_path)) + + # ADMIN USER ROUTES + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/users') as m: + m.connect("new_user", "/users/new", + action="create", conditions=dict(method=["POST"])) + m.connect("users", "/users", + conditions=dict(method=["GET"])) + m.connect("formatted_users", "/users.{format}", + conditions=dict(method=["GET"])) + m.connect("new_user", "/users/new", + action="new", conditions=dict(method=["GET"])) + m.connect("update_user", "/users/{id}", + action="update", conditions=dict(method=["POST"])) + m.connect("delete_user", "/users/{id}/delete", + action="delete", conditions=dict(method=["POST"])) + m.connect("edit_user", "/users/{id}/edit", + action="edit", conditions=dict(method=["GET"])) + + # EXTRAS USER ROUTES + m.connect("edit_user_advanced", "/users/{id}/edit/advanced", + action="edit_advanced", conditions=dict(method=["GET"])) + + m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys", + action="edit_api_keys", conditions=dict(method=["GET"])) + m.connect("edit_user_api_keys_update", "/users/{id}/edit/api_keys", + action="add_api_key", conditions=dict(method=["POST"])) + m.connect("edit_user_api_keys_delete", "/users/{id}/edit/api_keys/delete", + action="delete_api_key", conditions=dict(method=["POST"])) + + m.connect("edit_user_ssh_keys", "/users/{id}/edit/ssh_keys", + action="edit_ssh_keys", conditions=dict(method=["GET"])) + m.connect("edit_user_ssh_keys", "/users/{id}/edit/ssh_keys", + action="ssh_keys_add", conditions=dict(method=["POST"])) + m.connect("edit_user_ssh_keys_delete", "/users/{id}/edit/ssh_keys/delete", + action="ssh_keys_delete", conditions=dict(method=["POST"])) + + m.connect("edit_user_perms", "/users/{id}/edit/permissions", + action="edit_perms", conditions=dict(method=["GET"])) + m.connect("edit_user_perms_update", "/users/{id}/edit/permissions", + action="update_perms", conditions=dict(method=["POST"])) + + m.connect("edit_user_emails", "/users/{id}/edit/emails", + action="edit_emails", conditions=dict(method=["GET"])) + m.connect("edit_user_emails_update", "/users/{id}/edit/emails", + action="add_email", conditions=dict(method=["POST"])) + m.connect("edit_user_emails_delete", "/users/{id}/edit/emails/delete", + action="delete_email", conditions=dict(method=["POST"])) + + m.connect("edit_user_ips", "/users/{id}/edit/ips", + action="edit_ips", conditions=dict(method=["GET"])) + m.connect("edit_user_ips_update", "/users/{id}/edit/ips", + action="add_ip", conditions=dict(method=["POST"])) + m.connect("edit_user_ips_delete", "/users/{id}/edit/ips/delete", + action="delete_ip", conditions=dict(method=["POST"])) + + # ADMIN USER GROUPS REST ROUTES + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/user_groups') as m: + m.connect("users_groups", "/user_groups", + action="create", conditions=dict(method=["POST"])) + m.connect("users_groups", "/user_groups", + conditions=dict(method=["GET"])) + m.connect("new_users_group", "/user_groups/new", + action="new", conditions=dict(method=["GET"])) + m.connect("update_users_group", "/user_groups/{id}", + action="update", conditions=dict(method=["POST"])) + m.connect("delete_users_group", "/user_groups/{id}/delete", + action="delete", conditions=dict(method=["POST"])) + m.connect("edit_users_group", "/user_groups/{id}/edit", + action="edit", conditions=dict(method=["GET"]), + function=check_user_group) + + # EXTRAS USER GROUP ROUTES + m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms", + action="edit_default_perms", conditions=dict(method=["GET"])) + m.connect("edit_user_group_default_perms_update", "/user_groups/{id}/edit/default_perms", + action="update_default_perms", conditions=dict(method=["POST"])) + + m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms", + action="edit_perms", conditions=dict(method=["GET"])) + m.connect("edit_user_group_perms_update", "/user_groups/{id}/edit/perms", + action="update_perms", conditions=dict(method=["POST"])) + m.connect("edit_user_group_perms_delete", "/user_groups/{id}/edit/perms/delete", + action="delete_perms", conditions=dict(method=["POST"])) + + m.connect("edit_user_group_advanced", "/user_groups/{id}/edit/advanced", + action="edit_advanced", conditions=dict(method=["GET"])) + + m.connect("edit_user_group_members", "/user_groups/{id}/edit/members", + action="edit_members", conditions=dict(method=["GET"])) + + # ADMIN PERMISSIONS ROUTES + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/permissions') as m: + m.connect("admin_permissions", "/permissions", + action="permission_globals", conditions=dict(method=["POST"])) + m.connect("admin_permissions", "/permissions", + action="permission_globals", conditions=dict(method=["GET"])) + + m.connect("admin_permissions_ips", "/permissions/ips", + action="permission_ips", conditions=dict(method=["GET"])) + + m.connect("admin_permissions_perms", "/permissions/perms", + action="permission_perms", conditions=dict(method=["GET"])) + + # ADMIN DEFAULTS ROUTES + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/defaults') as m: + m.connect('defaults', '/defaults') + m.connect('defaults_update', 'defaults/{id}/update', + action="update", conditions=dict(method=["POST"])) + + # ADMIN AUTH SETTINGS + rmap.connect('auth_settings', '%s/auth' % ADMIN_PREFIX, + controller='admin/auth_settings', action='auth_settings', + conditions=dict(method=["POST"])) + rmap.connect('auth_home', '%s/auth' % ADMIN_PREFIX, + controller='admin/auth_settings') + + # ADMIN SETTINGS ROUTES + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/settings') as m: + m.connect("admin_settings", "/settings", + action="settings_vcs", conditions=dict(method=["POST"])) + m.connect("admin_settings", "/settings", + action="settings_vcs", conditions=dict(method=["GET"])) + + m.connect("admin_settings_mapping", "/settings/mapping", + action="settings_mapping", conditions=dict(method=["POST"])) + m.connect("admin_settings_mapping", "/settings/mapping", + action="settings_mapping", conditions=dict(method=["GET"])) + + m.connect("admin_settings_global", "/settings/global", + action="settings_global", conditions=dict(method=["POST"])) + m.connect("admin_settings_global", "/settings/global", + action="settings_global", conditions=dict(method=["GET"])) + + m.connect("admin_settings_visual", "/settings/visual", + action="settings_visual", conditions=dict(method=["POST"])) + m.connect("admin_settings_visual", "/settings/visual", + action="settings_visual", conditions=dict(method=["GET"])) + + m.connect("admin_settings_email", "/settings/email", + action="settings_email", conditions=dict(method=["POST"])) + m.connect("admin_settings_email", "/settings/email", + action="settings_email", conditions=dict(method=["GET"])) + + m.connect("admin_settings_hooks", "/settings/hooks", + action="settings_hooks", conditions=dict(method=["POST"])) + m.connect("admin_settings_hooks_delete", "/settings/hooks/delete", + action="settings_hooks", conditions=dict(method=["POST"])) + m.connect("admin_settings_hooks", "/settings/hooks", + action="settings_hooks", conditions=dict(method=["GET"])) + + m.connect("admin_settings_search", "/settings/search", + action="settings_search", conditions=dict(method=["POST"])) + m.connect("admin_settings_search", "/settings/search", + action="settings_search", conditions=dict(method=["GET"])) + + m.connect("admin_settings_system", "/settings/system", + action="settings_system", conditions=dict(method=["POST"])) + m.connect("admin_settings_system", "/settings/system", + action="settings_system", conditions=dict(method=["GET"])) + + # ADMIN MY ACCOUNT + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/my_account') as m: + + m.connect("my_account", "/my_account", + action="my_account", conditions=dict(method=["GET"])) + m.connect("my_account", "/my_account", + action="my_account", conditions=dict(method=["POST"])) + + m.connect("my_account_password", "/my_account/password", + action="my_account_password", conditions=dict(method=["GET"])) + m.connect("my_account_password", "/my_account/password", + action="my_account_password", conditions=dict(method=["POST"])) + + m.connect("my_account_repos", "/my_account/repos", + action="my_account_repos", conditions=dict(method=["GET"])) + + m.connect("my_account_watched", "/my_account/watched", + action="my_account_watched", conditions=dict(method=["GET"])) + + m.connect("my_account_perms", "/my_account/perms", + action="my_account_perms", conditions=dict(method=["GET"])) + + m.connect("my_account_emails", "/my_account/emails", + action="my_account_emails", conditions=dict(method=["GET"])) + m.connect("my_account_emails", "/my_account/emails", + action="my_account_emails_add", conditions=dict(method=["POST"])) + m.connect("my_account_emails_delete", "/my_account/emails/delete", + action="my_account_emails_delete", conditions=dict(method=["POST"])) + + m.connect("my_account_api_keys", "/my_account/api_keys", + action="my_account_api_keys", conditions=dict(method=["GET"])) + m.connect("my_account_api_keys", "/my_account/api_keys", + action="my_account_api_keys_add", conditions=dict(method=["POST"])) + m.connect("my_account_api_keys_delete", "/my_account/api_keys/delete", + action="my_account_api_keys_delete", conditions=dict(method=["POST"])) + + m.connect("my_account_ssh_keys", "/my_account/ssh_keys", + action="my_account_ssh_keys", conditions=dict(method=["GET"])) + m.connect("my_account_ssh_keys", "/my_account/ssh_keys", + action="my_account_ssh_keys_add", conditions=dict(method=["POST"])) + m.connect("my_account_ssh_keys_delete", "/my_account/ssh_keys/delete", + action="my_account_ssh_keys_delete", conditions=dict(method=["POST"])) + + # ADMIN GIST + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/gists') as m: + m.connect("gists", "/gists", + action="create", conditions=dict(method=["POST"])) + m.connect("gists", "/gists", + conditions=dict(method=["GET"])) + m.connect("new_gist", "/gists/new", + action="new", conditions=dict(method=["GET"])) + + m.connect("gist_delete", "/gists/{gist_id}/delete", + action="delete", conditions=dict(method=["POST"])) + m.connect("edit_gist", "/gists/{gist_id}/edit", + action="edit", conditions=dict(method=["GET", "POST"])) + m.connect("edit_gist_check_revision", "/gists/{gist_id}/edit/check_revision", + action="check_revision", conditions=dict(method=["POST"])) + + m.connect("gist", "/gists/{gist_id}", + action="show", conditions=dict(method=["GET"])) + m.connect("gist_rev", "/gists/{gist_id}/{revision}", + revision="tip", + action="show", conditions=dict(method=["GET"])) + m.connect("formatted_gist", "/gists/{gist_id}/{revision}/{format}", + revision="tip", + action="show", conditions=dict(method=["GET"])) + m.connect("formatted_gist_file", "/gists/{gist_id}/{revision}/{format}/{f_path:.*}", + revision='tip', + action="show", conditions=dict(method=["GET"])) + + # ADMIN MAIN PAGES + with rmap.submapper(path_prefix=ADMIN_PREFIX, + controller='admin/admin') as m: + m.connect('admin_home', '') + m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9. _-]*}', + action='add_repo') + #========================================================================== + # API V2 + #========================================================================== + with rmap.submapper(path_prefix=ADMIN_PREFIX, controller='api/api', + action='_dispatch') as m: + m.connect('api', '/api') + + # USER JOURNAL + rmap.connect('journal', '%s/journal' % ADMIN_PREFIX, + controller='journal') + rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX, + controller='journal', action='journal_rss') + rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX, + controller='journal', action='journal_atom') + + rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX, + controller='journal', action="public_journal") + + rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX, + controller='journal', action="public_journal_rss") + + rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX, + controller='journal', action="public_journal_rss") + + rmap.connect('public_journal_atom', + '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal', + action="public_journal_atom") + + rmap.connect('public_journal_atom_old', + '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal', + action="public_journal_atom") + + rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX, + controller='journal', action='toggle_following', + conditions=dict(method=["POST"])) + + # SEARCH + rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',) + rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX, + controller='search', + conditions=dict(function=check_repo)) + rmap.connect('search_repo', '/{repo_name:.*?}/search', + controller='search', + conditions=dict(function=check_repo), + ) + + # LOGIN/LOGOUT/REGISTER/SIGN IN + rmap.connect('session_csrf_secret_token', '%s/session_csrf_secret_token' % ADMIN_PREFIX, controller='login', action='session_csrf_secret_token') + rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login') + rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login', + action='logout') + + rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login', + action='register') + + rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX, + controller='login', action='password_reset') + + rmap.connect('reset_password_confirmation', + '%s/password_reset_confirmation' % ADMIN_PREFIX, + controller='login', action='password_reset_confirmation') + + # FEEDS + rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss', + controller='feed', action='rss', + conditions=dict(function=check_repo)) + + rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom', + controller='feed', action='atom', + conditions=dict(function=check_repo)) + + #========================================================================== + # REPOSITORY ROUTES + #========================================================================== + rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating', + controller='admin/repos', action='repo_creating') + rmap.connect('repo_check_home', '/{repo_name:.*?}/repo_check_creating', + controller='admin/repos', action='repo_check') + + rmap.connect('summary_home', '/{repo_name:.*?}', + controller='summary', + conditions=dict(function=check_repo)) + + # must be here for proper group/repo catching + rmap.connect('repos_group_home', '/{group_name:.*}', + controller='admin/repo_groups', action="show_by_name", + conditions=dict(function=check_group)) + rmap.connect('repo_stats_home', '/{repo_name:.*?}/statistics', + controller='summary', action='statistics', + conditions=dict(function=check_repo)) + + rmap.connect('repo_size', '/{repo_name:.*?}/repo_size', + controller='summary', action='repo_size', + conditions=dict(function=check_repo)) + + rmap.connect('repo_refs_data', '/{repo_name:.*?}/refs-data', + controller='home', action='repo_refs_data') + + rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision:.*}', + controller='changeset', revision='tip', + conditions=dict(function=check_repo)) + rmap.connect('changeset_children', '/{repo_name:.*?}/changeset_children/{revision}', + controller='changeset', revision='tip', action="changeset_children", + conditions=dict(function=check_repo)) + rmap.connect('changeset_parents', '/{repo_name:.*?}/changeset_parents/{revision}', + controller='changeset', revision='tip', action="changeset_parents", + conditions=dict(function=check_repo)) + + # repo edit options + rmap.connect("edit_repo", "/{repo_name:.*?}/settings", + controller='admin/repos', action="edit", + conditions=dict(method=["GET"], function=check_repo)) + + rmap.connect("edit_repo_perms", "/{repo_name:.*?}/settings/permissions", + controller='admin/repos', action="edit_permissions", + conditions=dict(method=["GET"], function=check_repo)) + rmap.connect("edit_repo_perms_update", "/{repo_name:.*?}/settings/permissions", + controller='admin/repos', action="edit_permissions_update", + conditions=dict(method=["POST"], function=check_repo)) + rmap.connect("edit_repo_perms_revoke", "/{repo_name:.*?}/settings/permissions/delete", + controller='admin/repos', action="edit_permissions_revoke", + conditions=dict(method=["POST"], function=check_repo)) + + rmap.connect("edit_repo_fields", "/{repo_name:.*?}/settings/fields", + controller='admin/repos', action="edit_fields", + conditions=dict(method=["GET"], function=check_repo)) + rmap.connect('create_repo_fields', "/{repo_name:.*?}/settings/fields/new", + controller='admin/repos', action="create_repo_field", + conditions=dict(method=["POST"], function=check_repo)) + rmap.connect('delete_repo_fields', "/{repo_name:.*?}/settings/fields/{field_id}/delete", + controller='admin/repos', action="delete_repo_field", + conditions=dict(method=["POST"], function=check_repo)) + + rmap.connect("edit_repo_advanced", "/{repo_name:.*?}/settings/advanced", + controller='admin/repos', action="edit_advanced", + conditions=dict(method=["GET"], function=check_repo)) + + rmap.connect("edit_repo_advanced_journal", "/{repo_name:.*?}/settings/advanced/journal", + controller='admin/repos', action="edit_advanced_journal", + conditions=dict(method=["POST"], function=check_repo)) + + rmap.connect("edit_repo_advanced_fork", "/{repo_name:.*?}/settings/advanced/fork", + controller='admin/repos', action="edit_advanced_fork", + conditions=dict(method=["POST"], function=check_repo)) + + rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote", + controller='admin/repos', action="edit_remote", + conditions=dict(method=["GET"], function=check_repo)) + rmap.connect("edit_repo_remote_update", "/{repo_name:.*?}/settings/remote", + controller='admin/repos', action="edit_remote", + conditions=dict(method=["POST"], function=check_repo)) + + rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics", + controller='admin/repos', action="edit_statistics", + conditions=dict(method=["GET"], function=check_repo)) + rmap.connect("edit_repo_statistics_update", "/{repo_name:.*?}/settings/statistics", + controller='admin/repos', action="edit_statistics", + conditions=dict(method=["POST"], function=check_repo)) + + # still working url for backward compat. + rmap.connect('raw_changeset_home_depraced', + '/{repo_name:.*?}/raw-changeset/{revision}', + controller='changeset', action='changeset_raw', + revision='tip', conditions=dict(function=check_repo)) + + ## new URLs + rmap.connect('changeset_raw_home', + '/{repo_name:.*?}/changeset-diff/{revision}', + controller='changeset', action='changeset_raw', + revision='tip', conditions=dict(function=check_repo)) + + rmap.connect('changeset_patch_home', + '/{repo_name:.*?}/changeset-patch/{revision}', + controller='changeset', action='changeset_patch', + revision='tip', conditions=dict(function=check_repo)) + + rmap.connect('changeset_download_home', + '/{repo_name:.*?}/changeset-download/{revision}', + controller='changeset', action='changeset_download', + revision='tip', conditions=dict(function=check_repo)) + + rmap.connect('changeset_comment', + '/{repo_name:.*?}/changeset-comment/{revision}', + controller='changeset', revision='tip', action='comment', + conditions=dict(function=check_repo)) + + rmap.connect('changeset_comment_delete', + '/{repo_name:.*?}/changeset-comment/{comment_id}/delete', + controller='changeset', action='delete_comment', + conditions=dict(function=check_repo, method=["POST"])) + + rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}', + controller='changeset', action='changeset_info') + + rmap.connect('compare_home', + '/{repo_name:.*?}/compare', + controller='compare', + conditions=dict(function=check_repo)) + + rmap.connect('compare_url', + '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref_name:.*?}...{other_ref_type}@{other_ref_name:.*?}', + controller='compare', action='compare', + conditions=dict(function=check_repo), + requirements=dict( + org_ref_type='(branch|book|tag|rev|__other_ref_type__)', + other_ref_type='(branch|book|tag|rev|__org_ref_type__)') + ) + + rmap.connect('pullrequest_home', + '/{repo_name:.*?}/pull-request/new', controller='pullrequests', + conditions=dict(function=check_repo, + method=["GET"])) + + rmap.connect('pullrequest_repo_info', + '/{repo_name:.*?}/pull-request-repo-info', + controller='pullrequests', action='repo_info', + conditions=dict(function=check_repo, method=["GET"])) + + rmap.connect('pullrequest', + '/{repo_name:.*?}/pull-request/new', controller='pullrequests', + action='create', conditions=dict(function=check_repo, + method=["POST"])) + + rmap.connect('pullrequest_show', + '/{repo_name:.*?}/pull-request/{pull_request_id:\\d+}{extra:(/.*)?}', extra='', + controller='pullrequests', + action='show', conditions=dict(function=check_repo, + method=["GET"])) + rmap.connect('pullrequest_post', + '/{repo_name:.*?}/pull-request/{pull_request_id}', + controller='pullrequests', + action='post', conditions=dict(function=check_repo, + method=["POST"])) + rmap.connect('pullrequest_delete', + '/{repo_name:.*?}/pull-request/{pull_request_id}/delete', + controller='pullrequests', + action='delete', conditions=dict(function=check_repo, + method=["POST"])) + + rmap.connect('pullrequest_show_all', + '/{repo_name:.*?}/pull-request', + controller='pullrequests', + action='show_all', conditions=dict(function=check_repo, + method=["GET"])) + + rmap.connect('my_pullrequests', + '/my_pullrequests', + controller='pullrequests', + action='show_my', conditions=dict(method=["GET"])) + + rmap.connect('pullrequest_comment', + '/{repo_name:.*?}/pull-request-comment/{pull_request_id}', + controller='pullrequests', + action='comment', conditions=dict(function=check_repo, + method=["POST"])) + + rmap.connect('pullrequest_comment_delete', + '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete', + controller='pullrequests', action='delete_comment', + conditions=dict(function=check_repo, method=["POST"])) + + rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary', + controller='summary', conditions=dict(function=check_repo)) + + rmap.connect('changelog_home', '/{repo_name:.*?}/changelog', + controller='changelog', conditions=dict(function=check_repo)) + + rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}', + controller='changelog', + conditions=dict(function=check_repo)) + + rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}', + controller='changelog', action='changelog_details', + conditions=dict(function=check_repo)) + + rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}', + controller='files', revision='tip', f_path='', + conditions=dict(function=check_repo)) + + rmap.connect('files_home_nopath', '/{repo_name:.*?}/files/{revision}', + controller='files', revision='tip', f_path='', + conditions=dict(function=check_repo)) + + rmap.connect('files_history_home', + '/{repo_name:.*?}/history/{revision}/{f_path:.*}', + controller='files', action='history', revision='tip', f_path='', + conditions=dict(function=check_repo)) + + rmap.connect('files_authors_home', + '/{repo_name:.*?}/authors/{revision}/{f_path:.*}', + controller='files', action='authors', revision='tip', f_path='', + conditions=dict(function=check_repo)) + + rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}', + controller='files', action='diff', revision='tip', f_path='', + conditions=dict(function=check_repo)) + + rmap.connect('files_diff_2way_home', '/{repo_name:.*?}/diff-2way/{f_path:.+}', + controller='files', action='diff_2way', revision='tip', f_path='', + conditions=dict(function=check_repo)) + + rmap.connect('files_rawfile_home', + '/{repo_name:.*?}/rawfile/{revision}/{f_path:.*}', + controller='files', action='rawfile', revision='tip', + f_path='', conditions=dict(function=check_repo)) + + rmap.connect('files_raw_home', + '/{repo_name:.*?}/raw/{revision}/{f_path:.*}', + controller='files', action='raw', revision='tip', f_path='', + conditions=dict(function=check_repo)) + + rmap.connect('files_annotate_home', + '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}', + controller='files', revision='tip', + f_path='', annotate='1', conditions=dict(function=check_repo)) + + rmap.connect('files_edit_home', + '/{repo_name:.*?}/edit/{revision}/{f_path:.*}', + controller='files', action='edit', revision='tip', + f_path='', conditions=dict(function=check_repo)) + + rmap.connect('files_add_home', + '/{repo_name:.*?}/add/{revision}/{f_path:.*}', + controller='files', action='add', revision='tip', + f_path='', conditions=dict(function=check_repo)) + + rmap.connect('files_delete_home', + '/{repo_name:.*?}/delete/{revision}/{f_path:.*}', + controller='files', action='delete', revision='tip', + f_path='', conditions=dict(function=check_repo)) + + rmap.connect('files_archive_home', '/{repo_name:.*?}/archive/{fname}', + controller='files', action='archivefile', + conditions=dict(function=check_repo)) + + rmap.connect('files_nodelist_home', + '/{repo_name:.*?}/nodelist/{revision}/{f_path:.*}', + controller='files', action='nodelist', + conditions=dict(function=check_repo)) + + rmap.connect('repo_fork_create_home', '/{repo_name:.*?}/fork', + controller='forks', action='fork_create', + conditions=dict(function=check_repo, method=["POST"])) + + rmap.connect('repo_fork_home', '/{repo_name:.*?}/fork', + controller='forks', action='fork', + conditions=dict(function=check_repo)) + + rmap.connect('repo_forks_home', '/{repo_name:.*?}/forks', + controller='forks', action='forks', + conditions=dict(function=check_repo)) + + rmap.connect('repo_followers_home', '/{repo_name:.*?}/followers', + controller='followers', action='followers', + conditions=dict(function=check_repo)) + + return rmap diff -r c387989f868f -r 3669e58f3002 kallithea/controllers/summary.py --- a/kallithea/controllers/summary.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/controllers/summary.py Fri Oct 30 23:44:18 2020 +0100 @@ -39,18 +39,18 @@ from webob.exc import HTTPBadRequest import kallithea.lib.helpers as h -from kallithea.config.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP from kallithea.lib import ext_json from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired from kallithea.lib.base import BaseRepoController, jsonify, render from kallithea.lib.celerylib.tasks import get_commits_stats +from kallithea.lib.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP from kallithea.lib.markup_renderer import MarkupRenderer from kallithea.lib.page import Page from kallithea.lib.utils2 import safe_int, safe_str from kallithea.lib.vcs.backends.base import EmptyChangeset from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, NodeDoesNotExistError from kallithea.lib.vcs.nodes import FileNode -from kallithea.model.db import Statistics +from kallithea.model import db log = logging.getLogger(__name__) @@ -131,8 +131,8 @@ else: c.show_stats = False - stats = Statistics.query() \ - .filter(Statistics.repository == c.db_repo) \ + stats = db.Statistics.query() \ + .filter(db.Statistics.repository == c.db_repo) \ .scalar() c.stats_percentage = 0 @@ -181,8 +181,8 @@ c.ts_min = ts_min_m c.ts_max = ts_max_y - stats = Statistics.query() \ - .filter(Statistics.repository == c.db_repo) \ + stats = db.Statistics.query() \ + .filter(db.Statistics.repository == c.db_repo) \ .scalar() c.stats_percentage = 0 if stats and stats.languages: diff -r c387989f868f -r 3669e58f3002 kallithea/front-end/kallithea-diff.less --- a/kallithea/front-end/kallithea-diff.less Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/front-end/kallithea-diff.less Fri Oct 30 23:44:18 2020 +0100 @@ -62,6 +62,7 @@ border-collapse: collapse; border-radius: 0px !important; width: 100%; + table-layout: fixed; /* line coloring */ .context { @@ -105,31 +106,26 @@ border-color: rgba(0, 0, 0, 0.3); } - /* line numbers */ - .lineno { - padding-left: 2px; - padding-right: 2px !important; - width: 30px; + /* line number columns */ + td.lineno { + width: 4em; border-right: 1px solid @panel-default-border !important; vertical-align: middle !important; - text-align: center; - } - .lineno.new { - text-align: right; - } - .lineno.old { - text-align: right; - } - .lineno a { - color: #aaa !important; font-size: 11px; font-family: @font-family-monospace; line-height: normal; - padding-left: 6px; - padding-right: 6px; - display: block; + text-align: center; + } + td.lineno[colspan="2"] { + width: 8em; } - .line:hover .lineno a { + td.lineno a { + color: #aaa !important; + display: inline-block; + min-width: 2em; + text-align: right; + } + tr.line:hover td.lineno a { color: #333 !important; } /** CODE **/ @@ -172,27 +168,24 @@ left: -8px; box-sizing: border-box; } -/* comment bubble, only visible when in a commentable diff */ -.commentable-diff tr.line.add:hover td .add-bubble, -.commentable-diff tr.line.del:hover td .add-bubble, -.commentable-diff tr.line.unmod:hover td .add-bubble { +.commentable-diff tr.line:hover td .add-bubble { display: block; z-index: 1; } .add-bubble div { background: @kallithea-theme-main-color; - width: 16px; - height: 16px; - line-height: 14px; + width: 1.2em; + height: 1.2em; + line-height: 1em; cursor: pointer; - padding: 0 2px 2px 0.5px; + padding: 0.1em 0.1em 0.1em 0.12em; border: 1px solid @kallithea-theme-main-color; - border-radius: 3px; + border-radius: 0.2em; box-sizing: border-box; overflow: hidden; } .add-bubble div:before { - font-size: 14px; + font-size: 1em; color: #ffffff; font-family: "kallithea"; content: '\1f5ea'; diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/be/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/be/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/be/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -462,13 +462,6 @@ msgid "Updated VCS settings" msgstr "Абноўлены налады VCS" -msgid "" -"Unable to activate hgsubversion support. The \"hgsubversion\" library is " -"missing" -msgstr "" -"Немагчыма ўключыць падтрымку hgsubversion. Бібліятэка hgsubversion " -"адсутнічае" - msgid "Error occurred while updating application settings" msgstr "Памылка пры абнаўленні наладаў праграмы" @@ -944,13 +937,6 @@ msgid "Invalid repository URL" msgstr "Няслушны URL рэпазітара" -msgid "" -"Invalid repository URL. It must be a valid http, https, ssh, svn+http or " -"svn+https URL" -msgstr "" -"Няслушны URL рэпазітара. Ён мусіць быць карэктным URL http, https, ssh, " -"svn+http ці svn+https" - msgid "Fork has to be the same type as parent" msgstr "Тып форка будзе супадаць з бацькоўскім" @@ -1675,9 +1661,6 @@ msgid "Enable largefiles extension" msgstr "Уключыць падтрымку вялікіх файлаў" -msgid "Enable hgsubversion extension" -msgstr "Уключыць падтрымку hgsubversion" - msgid "Location of repositories" msgstr "Месцазнаходжанне рэпазітароў" diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/da/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/da/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/da/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -495,13 +495,6 @@ msgid "Updated VCS settings" msgstr "Opdateret VCS-indstillinger" -msgid "" -"Unable to activate hgsubversion support. The \"hgsubversion\" library is " -"missing" -msgstr "" -"Ude af stand til at aktivere hgsubversion understøttelse. \"hgsubversion" -"\" biblioteket mangler" - msgid "Error occurred while updating application settings" msgstr "Der opstod en fejl ved opdatering af applikationsindstillinger" diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/de/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/de/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/de/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -486,13 +486,6 @@ msgid "Updated VCS settings" msgstr "VCS-Einstellungen aktualisiert" -msgid "" -"Unable to activate hgsubversion support. The \"hgsubversion\" library is " -"missing" -msgstr "" -"hgsubversion-Unterstützung konnte nicht aktiviert werden. Die " -"\"hgsubversion\"-Bibliothek fehlt" - msgid "Error occurred while updating application settings" msgstr "" "Ein Fehler ist während der Aktualisierung der Applikationseinstellungen " @@ -1021,13 +1014,6 @@ msgid "Invalid repository URL" msgstr "Ungültige Repository-URL" -msgid "" -"Invalid repository URL. It must be a valid http, https, ssh, svn+http or " -"svn+https URL" -msgstr "" -"Ungültige Repository-URL. Es muss eine gültige http, https, ssh, svn+http " -"oder svn+https URL sein" - msgid "Fork has to be the same type as parent" msgstr "Forke um den selben typ wie der Vorgesetze zu haben" @@ -2103,17 +2089,6 @@ msgid "Enable largefiles extension" msgstr "Erweiterung largefiles aktivieren" -msgid "Enable hgsubversion extension" -msgstr "Erweiterung hgsubversion aktivieren" - -msgid "" -"Requires hgsubversion library to be installed. Enables cloning of remote " -"Subversion repositories while converting them to Mercurial." -msgstr "" -"Erfordert die Installation der hgsubversion-Bibliothek. Ermöglicht das " -"Klonen von entfernten Subversion-Repositories während der Konvertierung " -"zu Mercurial." - msgid "Location of repositories" msgstr "Ort der Repositories" diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/el/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/el/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/el/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -540,13 +540,6 @@ msgid "Updated VCS settings" msgstr "Ενημερωμένες ρυθμίσεις VCS" -msgid "" -"Unable to activate hgsubversion support. The \"hgsubversion\" library is " -"missing" -msgstr "" -"Δεν γίνεται να ενεργοποιηθεί υποστήριξη για το hgsubversion. Λείπει η " -"βιβλιοθήκη \"hgsubversion\"" - msgid "Error occurred while updating application settings" msgstr "Παρουσιάστηκε σφάλμα κατά την ενημέρωση των ρυθμίσεων της εφαρμογής" @@ -1131,13 +1124,6 @@ msgid "Invalid repository URL" msgstr "Μη έγκυρη διεύθυνση URL αποθετηρίου" -msgid "" -"Invalid repository URL. It must be a valid http, https, ssh, svn+http or " -"svn+https URL" -msgstr "" -"Μη έγκυρη διεύθυνση URL του αποθετηρίου. Πρέπει να είναι μια έγκυρη http, " -"https, ssh, svn+http ή svn+https διεύθυνση URL" - msgid "You don't have permissions to create repository in this group" msgstr "Δεν έχετε δικαιώματα δημιουργίας αποθετηρίου σε αυτήν την ομάδα" @@ -2260,17 +2246,6 @@ msgid "Enable largefiles extension" msgstr "Ενεργοποίηση επέκτασης μεγάλων αρχείων" -msgid "Enable hgsubversion extension" -msgstr "Ενεργοποίηση επέκτασης hgsubversion" - -msgid "" -"Requires hgsubversion library to be installed. Enables cloning of remote " -"Subversion repositories while converting them to Mercurial." -msgstr "" -"Απαιτεί την εγκατάσταση της βιβλιοθήκης hgsubversion. Ενεργοποιεί την " -"κλωνοποίηση απομακρυσμένων Subversion αποθετηρίων και τη μετατροπή τους " -"σε Mercurial." - msgid "Location of repositories" msgstr "Τοποθεσία αποθετηρίων" diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/fr/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/fr/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/fr/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -549,13 +549,6 @@ msgid "Updated VCS settings" msgstr "Réglages des gestionnaires de versions mis à jour" -msgid "" -"Unable to activate hgsubversion support. The \"hgsubversion\" library is " -"missing" -msgstr "" -"Impossible d'activer la prise en charge de hgsubversion. La bibliothèque " -"« hgsubversion » est manquante" - msgid "Error occurred while updating application settings" msgstr "" "Une erreur est survenue durant la mise à jour des réglages de " @@ -1227,13 +1220,6 @@ msgid "Invalid repository URL" msgstr "URL de dépôt invalide" -msgid "" -"Invalid repository URL. It must be a valid http, https, ssh, svn+http or " -"svn+https URL" -msgstr "" -"URL de dépôt invalide. Ce doit être une URL valide de type http, https, " -"ssh, svn+http ou svn+https" - msgid "Fork has to be the same type as parent" msgstr "Le fork doit être du même type que le parent" @@ -2390,16 +2376,6 @@ msgid "Enable largefiles extension" msgstr "Activer l'extension largefiles" -msgid "Enable hgsubversion extension" -msgstr "Activer l'extension hgsubversion" - -msgid "" -"Requires hgsubversion library to be installed. Enables cloning of remote " -"Subversion repositories while converting them to Mercurial." -msgstr "" -"La bibliothèque hgsubversion doit être installée. Elle permet de cloner " -"des dépôts SVN distants et de les migrer vers Mercurial." - msgid "Location of repositories" msgstr "Emplacement des dépôts" diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/ja/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/ja/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/ja/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -435,13 +435,6 @@ msgid "Updated VCS settings" msgstr "VCS設定を更新しました" -msgid "" -"Unable to activate hgsubversion support. The \"hgsubversion\" library is " -"missing" -msgstr "" -"\"hgsubversion\"ライブラリが見つからないため、hgsubversionサポートを有効に" -"出来ません" - msgid "Error occurred while updating application settings" msgstr "アプリケーション設定の更新中にエラーが発生しました" @@ -1811,16 +1804,6 @@ msgid "Enable largefiles extension" msgstr "largefilesエクステンションを有効にする" -msgid "Enable hgsubversion extension" -msgstr "hgsubversionエクステンションを有効にする" - -msgid "" -"Requires hgsubversion library to be installed. Enables cloning of remote " -"Subversion repositories while converting them to Mercurial." -msgstr "" -"hgsubversion ライブラリのインストールが必要です。リモートのSVNリポジトリを" -"クローンしてMercurialリポジトリに変換するすることが可能です。" - msgid "Location of repositories" msgstr "リポジトリの場所" diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/pl/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/pl/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/pl/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -1355,9 +1355,6 @@ msgid "Enable largefiles extension" msgstr "Rozszerzenia dużych plików" -msgid "Enable hgsubversion extension" -msgstr "Rozszerzenia hgsubversion" - msgid "" "Click to unlock. You must restart Kallithea in order to make this setting " "take effect." diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/pt/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/pt/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/pt/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -1011,9 +1011,6 @@ msgid "Enable largefiles extension" msgstr "Ativar extensão largefiles" -msgid "Enable hgsubversion extension" -msgstr "Ativar extensão hgsubversion" - msgid "" "Click to unlock. You must restart Kallithea in order to make this setting " "take effect." diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/pt_BR/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/pt_BR/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/pt_BR/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -1009,9 +1009,6 @@ msgid "Enable largefiles extension" msgstr "Habilitar extensão largefiles" -msgid "Enable hgsubversion extension" -msgstr "Habilitar extensão hgsubversion" - msgid "" "Click to unlock. You must restart Kallithea in order to make this setting " "take effect." diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/ru/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/ru/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/ru/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -527,13 +527,6 @@ msgid "Updated VCS settings" msgstr "Обновлены настройки VCS" -msgid "" -"Unable to activate hgsubversion support. The \"hgsubversion\" library is " -"missing" -msgstr "" -"Невозможно включить поддержку hgsubversion. Библиотека «hgsubversion» " -"отсутствует" - msgid "Error occurred while updating application settings" msgstr "Произошла ошибка при обновлении настроек приложения" @@ -1194,13 +1187,6 @@ msgid "Invalid repository URL" msgstr "Недопустимый URL репозитория" -msgid "" -"Invalid repository URL. It must be a valid http, https, ssh, svn+http or " -"svn+https URL" -msgstr "" -"Недопустимый URL репозитория. Требуется корректный http, https, ssh, svn" -"+http или svn+https URL" - msgid "Fork has to be the same type as parent" msgstr "Форк будет иметь тот же тип, что и родительский" @@ -2342,16 +2328,6 @@ msgid "Enable largefiles extension" msgstr "Включить поддержку больших файлов" -msgid "Enable hgsubversion extension" -msgstr "Включить поддержку hgsubversion" - -msgid "" -"Requires hgsubversion library to be installed. Enables cloning of remote " -"Subversion repositories while converting them to Mercurial." -msgstr "" -"Требует наличия библиотеки hgsubversion. Включает клонирование удалённых " -"репозиториев Subversion с последующим конвертированием в Mercurial." - msgid "Location of repositories" msgstr "Местонахождение репозиториев" diff -r c387989f868f -r 3669e58f3002 kallithea/i18n/uk/LC_MESSAGES/kallithea.po --- a/kallithea/i18n/uk/LC_MESSAGES/kallithea.po Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/i18n/uk/LC_MESSAGES/kallithea.po Fri Oct 30 23:44:18 2020 +0100 @@ -515,13 +515,6 @@ msgid "Updated VCS settings" msgstr "Оновлені налаштування VCS" -msgid "" -"Unable to activate hgsubversion support. The \"hgsubversion\" library is " -"missing" -msgstr "" -"Не вдається активувати підтримку hgsubversion. Бібліотека \"hgsubversion" -"\" відсутня" - msgid "Error occurred while updating application settings" msgstr "Під час оновлення параметрів застосунку сталася помилка" @@ -1429,16 +1422,6 @@ msgid "Enable largefiles extension" msgstr "Увімкнути розширення largefiles" -msgid "Enable hgsubversion extension" -msgstr "Увімкнути розширення hgsubversion" - -msgid "" -"Requires hgsubversion library to be installed. Enables cloning of remote " -"Subversion repositories while converting them to Mercurial." -msgstr "" -"Потрібна установка бібліотеки hgsubversion. Дозволяє клонувати віддалені " -"сховища Subversion під час перетворення їх у Mercurial." - msgid "Location of repositories" msgstr "Розташування репозиторіїв" diff -r c387989f868f -r 3669e58f3002 kallithea/lib/annotate.py --- a/kallithea/lib/annotate.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/annotate.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,6 +28,7 @@ from pygments import highlight from pygments.formatters import HtmlFormatter +from kallithea.lib.pygmentsutils import get_custom_lexer from kallithea.lib.vcs.exceptions import VCSError from kallithea.lib.vcs.nodes import FileNode from kallithea.lib.vcs.utils import safe_str @@ -48,7 +49,6 @@ :param headers: dictionary with headers (keys are whats in ``order`` parameter) """ - from kallithea.lib.pygmentsutils import get_custom_lexer options['linenos'] = True formatter = AnnotateHtmlFormatter(filenode=filenode, annotate_from_changeset_func=annotate_from_changeset_func, order=order, diff -r c387989f868f -r 3669e58f3002 kallithea/lib/auth.py --- a/kallithea/lib/auth.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/auth.py Fri Oct 30 23:44:18 2020 +0100 @@ -40,13 +40,11 @@ from webob.exc import HTTPForbidden, HTTPFound import kallithea -from kallithea.config.routing import url from kallithea.lib.utils import get_repo_group_slug, get_repo_slug, get_user_group_slug from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes from kallithea.lib.vcs.utils.lazy import LazyProperty -from kallithea.model.db import (Permission, UserApiKeys, UserGroup, UserGroupMember, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm, - UserGroupUserGroupToPerm, UserIpMap, UserToPerm) -from kallithea.model.meta import Session +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.user import UserModel @@ -117,232 +115,18 @@ return False -def _cached_perms_data(user_id, user_is_admin): - RK = 'repositories' - GK = 'repositories_groups' - UK = 'user_groups' - GLOBAL = 'global' - PERM_WEIGHTS = Permission.PERM_WEIGHTS - permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()} - - def bump_permission(kind, key, new_perm): - """Add a new permission for kind and key. - Assuming the permissions are comparable, set the new permission if it - has higher weight, else drop it and keep the old permission. - """ - cur_perm = permissions[kind][key] - new_perm_val = PERM_WEIGHTS[new_perm] - cur_perm_val = PERM_WEIGHTS[cur_perm] - if new_perm_val > cur_perm_val: - permissions[kind][key] = new_perm - - #====================================================================== - # fetch default permissions - #====================================================================== - default_repo_perms = Permission.get_default_perms(kallithea.DEFAULT_USER_ID) - default_repo_groups_perms = Permission.get_default_group_perms(kallithea.DEFAULT_USER_ID) - default_user_group_perms = Permission.get_default_user_group_perms(kallithea.DEFAULT_USER_ID) - - if user_is_admin: - #================================================================== - # admin users have all rights; - # based on default permissions, just set everything to admin - #================================================================== - permissions[GLOBAL].add('hg.admin') - permissions[GLOBAL].add('hg.create.write_on_repogroup.true') - - # repositories - for perm in default_repo_perms: - r_k = perm.repository.repo_name - p = 'repository.admin' - permissions[RK][r_k] = p - - # repository groups - for perm in default_repo_groups_perms: - rg_k = perm.group.group_name - p = 'group.admin' - permissions[GK][rg_k] = p - - # user groups - for perm in default_user_group_perms: - u_k = perm.user_group.users_group_name - p = 'usergroup.admin' - permissions[UK][u_k] = p - return permissions - - #================================================================== - # SET DEFAULTS GLOBAL, REPOS, REPOSITORY GROUPS - #================================================================== - - # default global permissions taken from the default user - default_global_perms = UserToPerm.query() \ - .filter(UserToPerm.user_id == kallithea.DEFAULT_USER_ID) \ - .options(joinedload(UserToPerm.permission)) - - for perm in default_global_perms: - permissions[GLOBAL].add(perm.permission.permission_name) - - # defaults for repositories, taken from default user - for perm in default_repo_perms: - r_k = perm.repository.repo_name - if perm.repository.owner_id == user_id: - p = 'repository.admin' - elif perm.repository.private: - p = 'repository.none' - else: - p = perm.permission.permission_name - permissions[RK][r_k] = p - - # defaults for repository groups taken from default user permission - # on given group - for perm in default_repo_groups_perms: - rg_k = perm.group.group_name - p = perm.permission.permission_name - permissions[GK][rg_k] = p - - # defaults for user groups taken from default user permission - # on given user group - for perm in default_user_group_perms: - u_k = perm.user_group.users_group_name - p = perm.permission.permission_name - permissions[UK][u_k] = p - - #====================================================================== - # !! Augment GLOBALS with user permissions if any found !! - #====================================================================== +PERM_WEIGHTS = db.Permission.PERM_WEIGHTS - # USER GROUPS comes first - # user group global permissions - user_perms_from_users_groups = Session().query(UserGroupToPerm) \ - .options(joinedload(UserGroupToPerm.permission)) \ - .join((UserGroupMember, UserGroupToPerm.users_group_id == - UserGroupMember.users_group_id)) \ - .filter(UserGroupMember.user_id == user_id) \ - .join((UserGroup, UserGroupMember.users_group_id == - UserGroup.users_group_id)) \ - .filter(UserGroup.users_group_active == True) \ - .order_by(UserGroupToPerm.users_group_id) \ - .all() - # need to group here by groups since user can be in more than - # one group - _grouped = [[x, list(y)] for x, y in - itertools.groupby(user_perms_from_users_groups, - lambda x:x.users_group)] - for gr, perms in _grouped: - for perm in perms: - permissions[GLOBAL].add(perm.permission.permission_name) - - # user specific global permissions - user_perms = Session().query(UserToPerm) \ - .options(joinedload(UserToPerm.permission)) \ - .filter(UserToPerm.user_id == user_id).all() - - for perm in user_perms: - permissions[GLOBAL].add(perm.permission.permission_name) - - # for each kind of global permissions, only keep the one with heighest weight - kind_max_perm = {} - for perm in sorted(permissions[GLOBAL], key=lambda n: PERM_WEIGHTS[n]): - kind = perm.rsplit('.', 1)[0] - kind_max_perm[kind] = perm - permissions[GLOBAL] = set(kind_max_perm.values()) - ## END GLOBAL PERMISSIONS - - #====================================================================== - # !! PERMISSIONS FOR REPOSITORIES !! - #====================================================================== - #====================================================================== - # check if user is part of user groups for this repository and - # fill in his permission from it. - #====================================================================== - - # user group for repositories permissions - user_repo_perms_from_users_groups = \ - Session().query(UserGroupRepoToPerm) \ - .join((UserGroup, UserGroupRepoToPerm.users_group_id == - UserGroup.users_group_id)) \ - .filter(UserGroup.users_group_active == True) \ - .join((UserGroupMember, UserGroupRepoToPerm.users_group_id == - UserGroupMember.users_group_id)) \ - .filter(UserGroupMember.user_id == user_id) \ - .options(joinedload(UserGroupRepoToPerm.repository)) \ - .options(joinedload(UserGroupRepoToPerm.permission)) \ - .all() - - for perm in user_repo_perms_from_users_groups: - bump_permission(RK, - perm.repository.repo_name, - perm.permission.permission_name) - - # user permissions for repositories - user_repo_perms = Permission.get_default_perms(user_id) - for perm in user_repo_perms: - bump_permission(RK, - perm.repository.repo_name, - perm.permission.permission_name) - - #====================================================================== - # !! PERMISSIONS FOR REPOSITORY GROUPS !! - #====================================================================== - #====================================================================== - # check if user is part of user groups for this repository groups and - # fill in his permission from it. - #====================================================================== - # user group for repo groups permissions - user_repo_group_perms_from_users_groups = \ - Session().query(UserGroupRepoGroupToPerm) \ - .join((UserGroup, UserGroupRepoGroupToPerm.users_group_id == - UserGroup.users_group_id)) \ - .filter(UserGroup.users_group_active == True) \ - .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id - == UserGroupMember.users_group_id)) \ - .filter(UserGroupMember.user_id == user_id) \ - .options(joinedload(UserGroupRepoGroupToPerm.permission)) \ - .all() - - for perm in user_repo_group_perms_from_users_groups: - bump_permission(GK, - perm.group.group_name, - perm.permission.permission_name) - - # user explicit permissions for repository groups - user_repo_groups_perms = Permission.get_default_group_perms(user_id) - for perm in user_repo_groups_perms: - bump_permission(GK, - perm.group.group_name, - perm.permission.permission_name) - - #====================================================================== - # !! PERMISSIONS FOR USER GROUPS !! - #====================================================================== - # user group for user group permissions - user_group_user_groups_perms = \ - Session().query(UserGroupUserGroupToPerm) \ - .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id - == UserGroup.users_group_id)) \ - .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id - == UserGroupMember.users_group_id)) \ - .filter(UserGroupMember.user_id == user_id) \ - .join((UserGroup, UserGroupMember.users_group_id == - UserGroup.users_group_id), aliased=True, from_joinpoint=True) \ - .filter(UserGroup.users_group_active == True) \ - .options(joinedload(UserGroupUserGroupToPerm.permission)) \ - .all() - - for perm in user_group_user_groups_perms: - bump_permission(UK, - perm.target_user_group.users_group_name, - perm.permission.permission_name) - - # user explicit permission for user groups - user_user_groups_perms = Permission.get_default_user_group_perms(user_id) - for perm in user_user_groups_perms: - bump_permission(UK, - perm.user_group.users_group_name, - perm.permission.permission_name) - - return permissions - +def bump_permission(permissions, key, new_perm): + """Add a new permission for key to permissions. + Assuming the permissions are comparable, set the new permission if it + has higher weight, else drop it and keep the old permission. + """ + cur_perm = permissions[key] + new_perm_val = PERM_WEIGHTS[new_perm] + cur_perm_val = PERM_WEIGHTS[cur_perm] + if new_perm_val > cur_perm_val: + permissions[key] = new_perm class AuthUser(object): """ @@ -430,16 +214,206 @@ log.debug('Auth User is now %s', self) @LazyProperty + def global_permissions(self): + log.debug('Getting global permissions for %s', self) + + if self.is_admin: + return set(['hg.admin']) + + global_permissions = set() + + # default global permissions from the default user + default_global_perms = db.UserToPerm.query() \ + .filter(db.UserToPerm.user_id == kallithea.DEFAULT_USER_ID) \ + .options(joinedload(db.UserToPerm.permission)) + for perm in default_global_perms: + global_permissions.add(perm.permission.permission_name) + + # user group global permissions + user_perms_from_users_groups = meta.Session().query(db.UserGroupToPerm) \ + .options(joinedload(db.UserGroupToPerm.permission)) \ + .join((db.UserGroupMember, db.UserGroupToPerm.users_group_id == + db.UserGroupMember.users_group_id)) \ + .filter(db.UserGroupMember.user_id == self.user_id) \ + .join((db.UserGroup, db.UserGroupMember.users_group_id == + db.UserGroup.users_group_id)) \ + .filter(db.UserGroup.users_group_active == True) \ + .order_by(db.UserGroupToPerm.users_group_id) \ + .all() + # need to group here by groups since user can be in more than + # one group + _grouped = [[x, list(y)] for x, y in + itertools.groupby(user_perms_from_users_groups, + lambda x:x.users_group)] + for gr, perms in _grouped: + for perm in perms: + global_permissions.add(perm.permission.permission_name) + + # user specific global permissions + user_perms = meta.Session().query(db.UserToPerm) \ + .options(joinedload(db.UserToPerm.permission)) \ + .filter(db.UserToPerm.user_id == self.user_id).all() + for perm in user_perms: + global_permissions.add(perm.permission.permission_name) + + # for each kind of global permissions, only keep the one with heighest weight + kind_max_perm = {} + for perm in sorted(global_permissions, key=lambda n: PERM_WEIGHTS.get(n, -1)): + kind = perm.rsplit('.', 1)[0] + kind_max_perm[kind] = perm + return set(kind_max_perm.values()) + + @LazyProperty + def repository_permissions(self): + log.debug('Getting repository permissions for %s', self) + repository_permissions = {} + default_repo_perms = db.Permission.get_default_perms(kallithea.DEFAULT_USER_ID) + + if self.is_admin: + for perm in default_repo_perms: + r_k = perm.repository.repo_name + p = 'repository.admin' + repository_permissions[r_k] = p + + else: + # defaults for repositories from default user + for perm in default_repo_perms: + r_k = perm.repository.repo_name + if perm.repository.owner_id == self.user_id: + p = 'repository.admin' + elif perm.repository.private: + p = 'repository.none' + else: + p = perm.permission.permission_name + repository_permissions[r_k] = p + + # user group repository permissions + user_repo_perms_from_users_groups = \ + meta.Session().query(db.UserGroupRepoToPerm) \ + .join((db.UserGroup, db.UserGroupRepoToPerm.users_group_id == + db.UserGroup.users_group_id)) \ + .filter(db.UserGroup.users_group_active == True) \ + .join((db.UserGroupMember, db.UserGroupRepoToPerm.users_group_id == + db.UserGroupMember.users_group_id)) \ + .filter(db.UserGroupMember.user_id == self.user_id) \ + .options(joinedload(db.UserGroupRepoToPerm.repository)) \ + .options(joinedload(db.UserGroupRepoToPerm.permission)) \ + .all() + for perm in user_repo_perms_from_users_groups: + bump_permission(repository_permissions, + perm.repository.repo_name, + perm.permission.permission_name) + + # user permissions for repositories + user_repo_perms = db.Permission.get_default_perms(self.user_id) + for perm in user_repo_perms: + bump_permission(repository_permissions, + perm.repository.repo_name, + perm.permission.permission_name) + + return repository_permissions + + @LazyProperty + def repository_group_permissions(self): + log.debug('Getting repository group permissions for %s', self) + repository_group_permissions = {} + default_repo_groups_perms = db.Permission.get_default_group_perms(kallithea.DEFAULT_USER_ID) + + if self.is_admin: + for perm in default_repo_groups_perms: + rg_k = perm.group.group_name + p = 'group.admin' + repository_group_permissions[rg_k] = p + + else: + # defaults for repository groups taken from default user permission + # on given group + for perm in default_repo_groups_perms: + rg_k = perm.group.group_name + p = perm.permission.permission_name + repository_group_permissions[rg_k] = p + + # user group for repo groups permissions + user_repo_group_perms_from_users_groups = \ + meta.Session().query(db.UserGroupRepoGroupToPerm) \ + .join((db.UserGroup, db.UserGroupRepoGroupToPerm.users_group_id == + db.UserGroup.users_group_id)) \ + .filter(db.UserGroup.users_group_active == True) \ + .join((db.UserGroupMember, db.UserGroupRepoGroupToPerm.users_group_id + == db.UserGroupMember.users_group_id)) \ + .filter(db.UserGroupMember.user_id == self.user_id) \ + .options(joinedload(db.UserGroupRepoGroupToPerm.permission)) \ + .all() + for perm in user_repo_group_perms_from_users_groups: + bump_permission(repository_group_permissions, + perm.group.group_name, + perm.permission.permission_name) + + # user explicit permissions for repository groups + user_repo_groups_perms = db.Permission.get_default_group_perms(self.user_id) + for perm in user_repo_groups_perms: + bump_permission(repository_group_permissions, + perm.group.group_name, + perm.permission.permission_name) + + return repository_group_permissions + + @LazyProperty + def user_group_permissions(self): + log.debug('Getting user group permissions for %s', self) + user_group_permissions = {} + default_user_group_perms = db.Permission.get_default_user_group_perms(kallithea.DEFAULT_USER_ID) + + if self.is_admin: + for perm in default_user_group_perms: + u_k = perm.user_group.users_group_name + p = 'usergroup.admin' + user_group_permissions[u_k] = p + + else: + # defaults for user groups taken from default user permission + # on given user group + for perm in default_user_group_perms: + u_k = perm.user_group.users_group_name + p = perm.permission.permission_name + user_group_permissions[u_k] = p + + # user group for user group permissions + user_group_user_groups_perms = \ + meta.Session().query(db.UserGroupUserGroupToPerm) \ + .join((db.UserGroup, db.UserGroupUserGroupToPerm.target_user_group_id + == db.UserGroup.users_group_id)) \ + .join((db.UserGroupMember, db.UserGroupUserGroupToPerm.user_group_id + == db.UserGroupMember.users_group_id)) \ + .filter(db.UserGroupMember.user_id == self.user_id) \ + .join((db.UserGroup, db.UserGroupMember.users_group_id == + db.UserGroup.users_group_id), aliased=True, from_joinpoint=True) \ + .filter(db.UserGroup.users_group_active == True) \ + .options(joinedload(db.UserGroupUserGroupToPerm.permission)) \ + .all() + for perm in user_group_user_groups_perms: + bump_permission(user_group_permissions, + perm.target_user_group.users_group_name, + perm.permission.permission_name) + + # user explicit permission for user groups + user_user_groups_perms = db.Permission.get_default_user_group_perms(self.user_id) + for perm in user_user_groups_perms: + bump_permission(user_group_permissions, + perm.user_group.users_group_name, + perm.permission.permission_name) + + return user_group_permissions + + @LazyProperty def permissions(self): - """ - Fills user permission attribute with permissions taken from database - works for permissions given for repositories, and for permissions that - are granted to groups - - :param user: `AuthUser` instance - """ - log.debug('Getting PERMISSION tree for %s', self) - return _cached_perms_data(self.user_id, self.is_admin) + """dict with all 4 kind of permissions - mainly for backwards compatibility""" + return { + 'global': self.global_permissions, + 'repositories': self.repository_permissions, + 'repositories_groups': self.repository_group_permissions, + 'user_groups': self.user_group_permissions, + } def has_repository_permission_level(self, repo_name, level, purpose=None): required_perms = { @@ -447,7 +421,7 @@ 'write': ['repository.write', 'repository.admin'], 'admin': ['repository.admin'], }[level] - actual_perm = self.permissions['repositories'].get(repo_name) + actual_perm = self.repository_permissions.get(repo_name) ok = actual_perm in required_perms log.debug('Checking if user %r can %r repo %r (%s): %s (has %r)', self.username, level, repo_name, purpose, ok, actual_perm) @@ -459,7 +433,7 @@ 'write': ['group.write', 'group.admin'], 'admin': ['group.admin'], }[level] - actual_perm = self.permissions['repositories_groups'].get(repo_group_name) + actual_perm = self.repository_group_permissions.get(repo_group_name) ok = actual_perm in required_perms log.debug('Checking if user %r can %r repo group %r (%s): %s (has %r)', self.username, level, repo_group_name, purpose, ok, actual_perm) @@ -471,7 +445,7 @@ 'write': ['usergroup.write', 'usergroup.admin'], 'admin': ['usergroup.admin'], }[level] - actual_perm = self.permissions['user_groups'].get(user_group_name) + actual_perm = self.user_group_permissions.get(user_group_name) ok = actual_perm in required_perms log.debug('Checking if user %r can %r user group %r (%s): %s (has %r)', self.username, level, user_group_name, purpose, ok, actual_perm) @@ -483,7 +457,7 @@ def _get_api_keys(self): api_keys = [self.api_key] - for api_key in UserApiKeys.query() \ + for api_key in db.UserApiKeys.query() \ .filter_by(user_id=self.user_id, is_expired=False): api_keys.append(api_key.api_key) @@ -498,7 +472,7 @@ """ Returns list of repositories you're an admin of """ - return [x[0] for x in self.permissions['repositories'].items() + return [x[0] for x in self.repository_permissions.items() if x[1] == 'repository.admin'] @property @@ -506,7 +480,7 @@ """ Returns list of repository groups you're an admin of """ - return [x[0] for x in self.permissions['repositories_groups'].items() + return [x[0] for x in self.repository_group_permissions.items() if x[1] == 'group.admin'] @property @@ -514,7 +488,7 @@ """ Returns list of user groups you're an admin of """ - return [x[0] for x in self.permissions['user_groups'].items() + return [x[0] for x in self.user_group_permissions.items() if x[1] == 'usergroup.admin'] def __repr__(self): @@ -542,7 +516,7 @@ def get_allowed_ips(cls, user_id): _set = set() - default_ips = UserIpMap.query().filter(UserIpMap.user_id == kallithea.DEFAULT_USER_ID) + default_ips = db.UserIpMap.query().filter(db.UserIpMap.user_id == kallithea.DEFAULT_USER_ID) for ip in default_ips: try: _set.add(ip.ip_addr) @@ -551,7 +525,7 @@ # deleted objects here, we just skip them pass - user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id) + user_ips = db.UserIpMap.query().filter(db.UserIpMap.user_id == user_id) for ip in user_ips: try: _set.add(ip.ip_addr) @@ -673,8 +647,7 @@ """ def check_permissions(self, user): - global_permissions = user.permissions['global'] # usually very short - return any(p in global_permissions for p in self.required_perms) + return any(p in user.global_permissions for p in self.required_perms) class _PermDecorator(_PermsDecorator): @@ -740,8 +713,7 @@ class HasPermissionAny(_PermsFunction): def __call__(self, purpose=None): - global_permissions = request.authuser.permissions['global'] # usually very short - ok = any(p in global_permissions for p in self.required_perms) + ok = any(p in request.authuser.global_permissions for p in self.required_perms) log.debug('Check %s for global %s (%s): %s', request.authuser.username, self.required_perms, purpose, ok) @@ -784,7 +756,7 @@ def __call__(self, authuser, repo_name, purpose=None): try: - ok = authuser.permissions['repositories'][repo_name] in self.required_perms + ok = authuser.repository_permissions[repo_name] in self.required_perms except KeyError: ok = False diff -r c387989f868f -r 3669e58f3002 kallithea/lib/auth_modules/__init__.py --- a/kallithea/lib/auth_modules/__init__.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/auth_modules/__init__.py Fri Oct 30 23:44:18 2020 +0100 @@ -18,12 +18,12 @@ import importlib import logging import traceback +from inspect import isfunction from kallithea.lib.auth import AuthUser, PasswordGenerator from kallithea.lib.compat import hybrid_property -from kallithea.lib.utils2 import str2bool -from kallithea.model.db import Setting, User -from kallithea.model.meta import Session +from kallithea.lib.utils2 import asbool +from kallithea.model import db, meta, validators from kallithea.model.user import UserModel from kallithea.model.user_group import UserGroupModel @@ -38,7 +38,6 @@ self.kwargs = kwargs def __call__(self, *args, **kwargs): - from inspect import isfunction formencode_obj = self.formencode_obj if isfunction(formencode_obj): # case we wrap validators into functions @@ -69,8 +68,7 @@ self.validator_name = name def __call__(self, *args, **kwargs): - from kallithea.model import validators as v - obj = getattr(v, self.validator_name) + obj = getattr(validators, self.validator_name) #log.debug('Initializing lazy formencode object: %s', obj) return LazyFormencode(obj, *args, **kwargs) @@ -135,7 +133,7 @@ log.debug('Trying to fetch user `%s` from Kallithea database', username) if username: - user = User.get_by_username_or_email(username) + user = db.User.get_by_username_or_email(username) else: log.debug('provided username:`%s` is empty skipping...', username) return user @@ -182,8 +180,8 @@ OVERRIDING THIS METHOD WILL CAUSE YOUR PLUGIN TO FAIL. """ - rcsettings = self.settings() - rcsettings.insert(0, { + settings = self.settings() + settings.insert(0, { "name": "enabled", "validator": self.validators.StringBoolean(if_missing=False), "type": "bool", @@ -191,7 +189,7 @@ "formname": "Enabled" } ) - return rcsettings + return settings def auth(self, userobj, username, passwd, settings, **kwargs): """ @@ -240,7 +238,7 @@ userobj, username, passwd, settings, **kwargs) if user_data is not None: if userobj is None: # external authentication of unknown user that will be created soon - def_user_perms = AuthUser(dbuser=User.get_default_user()).permissions['global'] + def_user_perms = AuthUser(dbuser=db.User.get_default_user()).global_permissions active = 'hg.extern_activate.auto' in def_user_perms else: active = userobj.active @@ -267,7 +265,7 @@ # created from plugins. We store this info in _group_data JSON field groups = user_data['groups'] or [] UserGroupModel().enforce_groups(user, groups, self.name) - Session().commit() + meta.Session().commit() return user_data @@ -316,7 +314,7 @@ def get_auth_plugins(): """Return a list of instances of plugins that are available and enabled""" auth_plugins = [] - for plugin_name in Setting.get_by_name("auth_plugins").app_settings_value: + for plugin_name in db.Setting.get_by_name("auth_plugins").app_settings_value: try: plugin = loadplugin(plugin_name) except Exception: @@ -346,11 +344,11 @@ plugin_settings = {} for v in plugin.plugin_settings(): conf_key = "auth_%s_%s" % (plugin_name, v["name"]) - setting = Setting.get_by_name(conf_key) + setting = db.Setting.get_by_name(conf_key) plugin_settings[v["name"]] = setting.app_settings_value if setting else None log.debug('Settings for auth plugin %s: %s', plugin_name, plugin_settings) - if not str2bool(plugin_settings["enabled"]): + if not asbool(plugin_settings["enabled"]): log.info("Authentication plugin %s is disabled, skipping for %s", module, username) continue diff -r c387989f868f -r 3669e58f3002 kallithea/lib/auth_modules/auth_container.py --- a/kallithea/lib/auth_modules/auth_container.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/auth_modules/auth_container.py Fri Oct 30 23:44:18 2020 +0100 @@ -29,8 +29,8 @@ from kallithea.lib import auth_modules from kallithea.lib.compat import hybrid_property -from kallithea.lib.utils2 import str2bool -from kallithea.model.db import Setting +from kallithea.lib.utils2 import asbool +from kallithea.model import db log = logging.getLogger(__name__) @@ -131,7 +131,7 @@ username = environ.get(header) log.debug('extracted %s:%s', header, username) - if username and str2bool(settings.get('clean_username')): + if username and asbool(settings.get('clean_username')): log.debug('Received username %s from container', username) username = self._clean_username(username) log.debug('New cleanup user is: %s', username) @@ -212,10 +212,10 @@ def get_managed_fields(self): fields = ['username', 'password'] - if(Setting.get_by_name('auth_container_email_header').app_settings_value): + if(db.Setting.get_by_name('auth_container_email_header').app_settings_value): fields.append('email') - if(Setting.get_by_name('auth_container_firstname_header').app_settings_value): + if(db.Setting.get_by_name('auth_container_firstname_header').app_settings_value): fields.append('firstname') - if(Setting.get_by_name('auth_container_lastname_header').app_settings_value): + if(db.Setting.get_by_name('auth_container_lastname_header').app_settings_value): fields.append('lastname') return fields diff -r c387989f868f -r 3669e58f3002 kallithea/lib/auth_modules/auth_internal.py --- a/kallithea/lib/auth_modules/auth_internal.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/auth_modules/auth_internal.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,7 +28,7 @@ import logging -from kallithea.lib import auth_modules +from kallithea.lib import auth, auth_modules from kallithea.lib.compat import hybrid_property @@ -78,7 +78,6 @@ } log.debug('user data: %s', user_data) - from kallithea.lib import auth password_match = auth.check_password(password, userobj.password) if userobj.is_default_user: log.info('user %s authenticated correctly as anonymous user', diff -r c387989f868f -r 3669e58f3002 kallithea/lib/base.py --- a/kallithea/lib/base.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/base.py Fri Oct 30 23:44:18 2020 +0100 @@ -43,16 +43,15 @@ from tg import tmpl_context as c from tg.i18n import ugettext as _ -from kallithea import BACKENDS, __version__ -from kallithea.config.routing import url +import kallithea from kallithea.lib import auth_modules, ext_json from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware from kallithea.lib.exceptions import UserCreationError from kallithea.lib.utils import get_repo_slug, is_valid_repo -from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, safe_str, set_hook_environment, str2bool +from kallithea.lib.utils2 import AttributeDict, asbool, ascii_bytes, safe_int, safe_str, set_hook_environment from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError -from kallithea.model import meta -from kallithea.model.db import PullRequest, Repository, Setting, User +from kallithea.lib.webutils import url +from kallithea.model import db, meta from kallithea.model.scm import ScmModel @@ -223,7 +222,7 @@ Returns (None, wsgi_app) to send the wsgi_app response to the client. """ # Use anonymous access if allowed for action on repo. - default_user = User.get_default_user() + default_user = db.User.get_default_user() default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) if default_authuser is None: log.debug('No anonymous access at all') # move on to proper user auth @@ -260,7 +259,7 @@ # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME #============================================================== try: - user = User.get_by_username_or_email(username) + user = db.User.get_by_username_or_email(username) except Exception: log.error(traceback.format_exc()) return None, webob.exc.HTTPInternalServerError() @@ -368,24 +367,24 @@ log.error('CSRF check failed') raise webob.exc.HTTPForbidden() - c.kallithea_version = __version__ - rc_config = Setting.get_app_settings() + c.kallithea_version = kallithea.__version__ + settings = db.Setting.get_app_settings() # Visual options c.visual = AttributeDict({}) ## DB stored - c.visual.show_public_icon = str2bool(rc_config.get('show_public_icon')) - c.visual.show_private_icon = str2bool(rc_config.get('show_private_icon')) - c.visual.stylify_metalabels = str2bool(rc_config.get('stylify_metalabels')) - c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100)) - c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100)) - c.visual.repository_fields = str2bool(rc_config.get('repository_fields')) - c.visual.show_version = str2bool(rc_config.get('show_version')) - c.visual.use_gravatar = str2bool(rc_config.get('use_gravatar')) - c.visual.gravatar_url = rc_config.get('gravatar_url') + c.visual.show_public_icon = asbool(settings.get('show_public_icon')) + c.visual.show_private_icon = asbool(settings.get('show_private_icon')) + c.visual.stylify_metalabels = asbool(settings.get('stylify_metalabels')) + c.visual.page_size = safe_int(settings.get('dashboard_items', 100)) + c.visual.admin_grid_items = safe_int(settings.get('admin_grid_items', 100)) + c.visual.repository_fields = asbool(settings.get('repository_fields')) + c.visual.show_version = asbool(settings.get('show_version')) + c.visual.use_gravatar = asbool(settings.get('use_gravatar')) + c.visual.gravatar_url = settings.get('gravatar_url') - c.ga_code = rc_config.get('ga_code') + c.ga_code = settings.get('ga_code') # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code if c.ga_code and '<' not in c.ga_code: c.ga_code = '''''' % c.ga_code - c.site_name = rc_config.get('title') - c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl') or Repository.DEFAULT_CLONE_URI - c.clone_ssh_tmpl = rc_config.get('clone_ssh_tmpl') or Repository.DEFAULT_CLONE_SSH + c.site_name = settings.get('title') + c.clone_uri_tmpl = settings.get('clone_uri_tmpl') or db.Repository.DEFAULT_CLONE_URI + c.clone_ssh_tmpl = settings.get('clone_ssh_tmpl') or db.Repository.DEFAULT_CLONE_SSH ## INI stored - c.visual.allow_repo_location_change = str2bool(config.get('allow_repo_location_change', True)) - c.visual.allow_custom_hooks_settings = str2bool(config.get('allow_custom_hooks_settings', True)) - c.ssh_enabled = str2bool(config.get('ssh_enabled', False)) + c.visual.allow_repo_location_change = asbool(config.get('allow_repo_location_change', True)) + c.visual.allow_custom_hooks_settings = asbool(config.get('allow_custom_hooks_settings', True)) + c.ssh_enabled = asbool(config.get('ssh_enabled', False)) c.instance_id = config.get('instance_id') c.issues_url = config.get('bugtracker', url('issues_url')) # END CONFIG VARS c.repo_name = get_repo_slug(request) # can be empty - c.backends = list(BACKENDS) + c.backends = list(kallithea.BACKENDS) self.cut_off_limit = safe_int(config.get('cut_off_limit')) - c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count() + c.my_pr_count = db.PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count() self.scm_model = ScmModel() @@ -450,11 +449,11 @@ else: if user_info is not None: username = user_info['username'] - user = User.get_by_username(username, case_insensitive=True) + user = db.User.get_by_username(username, case_insensitive=True) return log_in_user(user, remember=False, is_external_auth=True, ip_addr=ip_addr) # User is default user (if active) or anonymous - default_user = User.get_default_user() + default_user = db.User.get_default_user() authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) if authuser is None: # fall back to anonymous authuser = AuthUser(dbuser=default_user) # TODO: somehow use .make? @@ -513,7 +512,7 @@ needs_csrf_check = request.method not in ['GET', 'HEAD'] else: - dbuser = User.get_by_api_key(api_key) + dbuser = db.User.get_by_api_key(api_key) if dbuser is None: log.info('No db user found for authentication with API key ****%s from %s', api_key[-4:], ip_addr) @@ -553,7 +552,7 @@ def _before(self, *args, **kwargs): super(BaseRepoController, self)._before(*args, **kwargs) if c.repo_name: # extracted from request by base-base BaseController._before - _dbr = Repository.get_by_repo_name(c.repo_name) + _dbr = db.Repository.get_by_repo_name(c.repo_name) if not _dbr: return @@ -565,7 +564,7 @@ if route in ['delete_repo']: return - if _dbr.repo_state in [Repository.STATE_PENDING]: + if _dbr.repo_state in [db.Repository.STATE_PENDING]: if route in ['repo_creating_home']: return check_url = url('repo_creating_home', repo_name=c.repo_name) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/celerylib/__init__.py --- a/kallithea/lib/celerylib/__init__.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/celerylib/__init__.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,7 +28,7 @@ import logging import os -from hashlib import md5 +from hashlib import sha1 from decorator import decorator from tg import config @@ -94,7 +94,7 @@ func_name = str(func.__name__) if hasattr(func, '__name__') else str(func) lockkey = 'task_%s.lock' % \ - md5(safe_bytes(func_name + '-' + '-'.join(str(x) for x in params))).hexdigest() + sha1(safe_bytes(func_name + '-' + '-'.join(str(x) for x in params))).hexdigest() return lockkey diff -r c387989f868f -r 3669e58f3002 kallithea/lib/celerylib/tasks.py --- a/kallithea/lib/celerylib/tasks.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/celerylib/tasks.py Fri Oct 30 23:44:18 2020 +0100 @@ -26,8 +26,12 @@ :license: GPLv3, see LICENSE.md for more details. """ +import email.mime.multipart +import email.mime.text import email.utils import os +import smtplib +import time import traceback from collections import OrderedDict from operator import itemgetter @@ -37,14 +41,15 @@ from tg import config import kallithea -from kallithea.lib import celerylib, ext_json +from kallithea.lib import celerylib, conf, ext_json from kallithea.lib.helpers import person from kallithea.lib.hooks import log_create_repository -from kallithea.lib.rcmail.smtp_mailer import SmtpMailer +from kallithea.lib.indexers.daemon import WhooshIndexingDaemon from kallithea.lib.utils import action_logger -from kallithea.lib.utils2 import ascii_bytes, str2bool +from kallithea.lib.utils2 import asbool, ascii_bytes from kallithea.lib.vcs.utils import author_email -from kallithea.model.db import RepoGroup, Repository, Statistics, User +from kallithea.model import db +from kallithea.model.repo import RepoModel __all__ = ['whoosh_index', 'get_commits_stats', 'send_email'] @@ -57,7 +62,6 @@ @celerylib.locked_task @celerylib.dbsession def whoosh_index(repo_location, full_index): - from kallithea.lib.indexers.daemon import WhooshIndexingDaemon celerylib.get_session() # initialize database connection index_location = config['index_dir'] @@ -86,7 +90,7 @@ co_day_auth_aggr = {} commits_by_day_aggregate = {} - repo = Repository.get_by_repo_name(repo_name) + repo = db.Repository.get_by_repo_name(repo_name) if repo is None: return True @@ -103,10 +107,10 @@ last_cs = None timegetter = itemgetter('time') - dbrepo = DBS.query(Repository) \ - .filter(Repository.repo_name == repo_name).scalar() - cur_stats = DBS.query(Statistics) \ - .filter(Statistics.repository == dbrepo).scalar() + dbrepo = DBS.query(db.Repository) \ + .filter(db.Repository.repo_name == repo_name).scalar() + cur_stats = DBS.query(db.Statistics) \ + .filter(db.Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision @@ -193,7 +197,7 @@ "schema": ["commits"], } - stats = cur_stats if cur_stats else Statistics() + stats = cur_stats if cur_stats else db.Statistics() stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr)) stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data)) @@ -240,7 +244,7 @@ :param recipients: list of recipients, if this is None, the defined email address from field 'email_to' and all admins is used instead :param subject: subject of the mail - :param body: body of the mail + :param body: plain text body of the mail :param html_body: html version of body :param headers: dictionary of prepopulated e-mail headers :param from_name: full name to be used as sender of this mail - often a @@ -260,8 +264,8 @@ if not recipients: # if recipients are not defined we send to email_config + all admins - recipients = [u.email for u in User.query() - .filter(User.admin == True).all()] + recipients = [u.email for u in db.User.query() + .filter(db.User.admin == True).all()] if email_config.get('email_to') is not None: recipients += email_config.get('email_to').split(',') @@ -274,25 +278,24 @@ log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients)) # SMTP sender - envelope_from = email_config.get('app_email_from', 'Kallithea') + app_email_from = email_config.get('app_email_from', 'Kallithea') # 'From' header if from_name is not None: # set From header based on from_name but with a generic e-mail address # In case app_email_from is in "Some Name " format, we first # extract the e-mail address. - envelope_addr = author_email(envelope_from) + envelope_addr = author_email(app_email_from) headers['From'] = '"%s" <%s>' % ( email.utils.quote('%s (no-reply)' % from_name), envelope_addr) - user = email_config.get('smtp_username') - passwd = email_config.get('smtp_password') - mail_server = email_config.get('smtp_server') - mail_port = email_config.get('smtp_port') - tls = str2bool(email_config.get('smtp_use_tls')) - ssl = str2bool(email_config.get('smtp_use_ssl')) - debug = str2bool(email_config.get('debug')) - smtp_auth = email_config.get('smtp_auth') + smtp_server = email_config.get('smtp_server') + smtp_port = email_config.get('smtp_port') + smtp_use_tls = asbool(email_config.get('smtp_use_tls')) + smtp_use_ssl = asbool(email_config.get('smtp_use_ssl')) + smtp_auth = email_config.get('smtp_auth') # undocumented - overrule automatic choice of auth mechanism + smtp_username = email_config.get('smtp_username') + smtp_password = email_config.get('smtp_password') logmsg = ("Mail details:\n" "recipients: %s\n" @@ -302,17 +305,45 @@ "html:\n%s\n" % (' '.join(recipients), headers, subject, body, html_body)) - if mail_server: + if smtp_server: log.debug("Sending e-mail. " + logmsg) else: log.error("SMTP mail server not configured - cannot send e-mail.") log.warning(logmsg) return False + msg = email.mime.multipart.MIMEMultipart('alternative') + msg['Subject'] = subject + msg['From'] = app_email_from # fallback - might be overridden by a header + msg['To'] = ', '.join(recipients) + msg['Date'] = email.utils.formatdate(time.time()) + + for key, value in headers.items(): + msg[key] = value + + msg.attach(email.mime.text.MIMEText(body, 'plain')) + msg.attach(email.mime.text.MIMEText(html_body, 'html')) + try: - m = SmtpMailer(envelope_from, user, passwd, mail_server, smtp_auth, - mail_port, ssl, tls, debug=debug) - m.send(recipients, subject, body, html_body, headers=headers) + if smtp_use_ssl: + smtp_serv = smtplib.SMTP_SSL(smtp_server, smtp_port) + else: + smtp_serv = smtplib.SMTP(smtp_server, smtp_port) + + if smtp_use_tls: + smtp_serv.starttls() + + if smtp_auth: + smtp_serv.ehlo() # populate esmtp_features + smtp_serv.esmtp_features["auth"] = smtp_auth + + if smtp_username and smtp_password is not None: + smtp_serv.login(smtp_username, smtp_password) + + smtp_serv.sendmail(app_email_from, recipients, msg.as_string()) + smtp_serv.quit() + + log.info('Mail was sent to: %s' % recipients) except: log.error('Mail sending failed') log.error(traceback.format_exc()) @@ -323,12 +354,9 @@ @celerylib.task @celerylib.dbsession def create_repo(form_data, cur_user): - from kallithea.model.repo import RepoModel - from kallithea.model.db import Setting - DBS = celerylib.get_session() - cur_user = User.guess_instance(cur_user) + cur_user = db.User.guess_instance(cur_user) owner = cur_user repo_name = form_data['repo_name'] @@ -342,10 +370,10 @@ copy_fork_permissions = form_data.get('copy_permissions') copy_group_permissions = form_data.get('repo_copy_permissions') fork_of = form_data.get('fork_parent_id') - state = form_data.get('repo_state', Repository.STATE_PENDING) + state = form_data.get('repo_state', db.Repository.STATE_PENDING) # repo creation defaults, private and repo_type are filled in form - defs = Setting.get_default_repo_settings(strip_prefix=True) + defs = db.Setting.get_default_repo_settings(strip_prefix=True) enable_statistics = defs.get('repo_enable_statistics') enable_downloads = defs.get('repo_enable_downloads') @@ -375,25 +403,25 @@ RepoModel()._create_filesystem_repo( repo_name=repo_name, repo_type=repo_type, - repo_group=RepoGroup.guess_instance(repo_group), + repo_group=db.RepoGroup.guess_instance(repo_group), clone_uri=clone_uri, ) - repo = Repository.get_by_repo_name(repo_name_full) + repo = db.Repository.get_by_repo_name(repo_name_full) log_create_repository(repo.get_dict(), created_by=owner.username) # update repo changeset caches initially repo.update_changeset_cache() # set new created state - repo.set_state(Repository.STATE_CREATED) + repo.set_state(db.Repository.STATE_CREATED) DBS.commit() except Exception as e: log.warning('Exception %s occurred when forking repository, ' 'doing cleanup...' % e) # rollback things manually ! - repo = Repository.get_by_repo_name(repo_name_full) + repo = db.Repository.get_by_repo_name(repo_name_full) if repo: - Repository.delete(repo.repo_id) + db.Repository.delete(repo.repo_id) DBS.commit() RepoModel()._delete_filesystem_repo(repo) raise @@ -410,12 +438,10 @@ :param form_data: :param cur_user: """ - from kallithea.model.repo import RepoModel - DBS = celerylib.get_session() base_path = kallithea.CONFIG['base_path'] - cur_user = User.guess_instance(cur_user) + cur_user = db.User.guess_instance(cur_user) repo_name = form_data['repo_name'] # fork in this case repo_name_full = form_data['repo_name_full'] @@ -429,7 +455,7 @@ copy_fork_permissions = form_data.get('copy_permissions') try: - fork_of = Repository.guess_instance(form_data.get('fork_parent_id')) + fork_of = db.Repository.guess_instance(form_data.get('fork_parent_id')) RepoModel()._create_repo( repo_name=repo_name_full, @@ -453,25 +479,25 @@ RepoModel()._create_filesystem_repo( repo_name=repo_name, repo_type=repo_type, - repo_group=RepoGroup.guess_instance(repo_group), + repo_group=db.RepoGroup.guess_instance(repo_group), clone_uri=source_repo_path, ) - repo = Repository.get_by_repo_name(repo_name_full) + repo = db.Repository.get_by_repo_name(repo_name_full) log_create_repository(repo.get_dict(), created_by=owner.username) # update repo changeset caches initially repo.update_changeset_cache() # set new created state - repo.set_state(Repository.STATE_CREATED) + repo.set_state(db.Repository.STATE_CREATED) DBS.commit() except Exception as e: log.warning('Exception %s occurred when forking repository, ' 'doing cleanup...' % e) # rollback things manually ! - repo = Repository.get_by_repo_name(repo_name_full) + repo = db.Repository.get_by_repo_name(repo_name_full) if repo: - Repository.delete(repo.repo_id) + db.Repository.delete(repo.repo_id) DBS.commit() RepoModel()._delete_filesystem_repo(repo) raise @@ -480,8 +506,7 @@ def __get_codes_stats(repo_name): - from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP - repo = Repository.get_by_repo_name(repo_name).scm_instance + repo = db.Repository.get_by_repo_name(repo_name).scm_instance tip = repo.get_changeset() code_stats = {} @@ -489,7 +514,7 @@ for _topnode, _dirnodes, filenodes in tip.walk('/'): for filenode in filenodes: ext = filenode.extension.lower() - if ext in LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary: + if ext in conf.LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary: if ext in code_stats: code_stats[ext] += 1 else: diff -r c387989f868f -r 3669e58f3002 kallithea/lib/colored_formatter.py --- a/kallithea/lib/colored_formatter.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/colored_formatter.py Fri Oct 30 23:44:18 2020 +0100 @@ -13,6 +13,7 @@ # along with this program. If not, see . import logging +import sys BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38) @@ -65,15 +66,18 @@ def __init__(self, *args, **kwargs): # can't do super(...) here because Formatter is an old school class logging.Formatter.__init__(self, *args, **kwargs) + self.plain = not getattr(sys.stderr, 'isatty', lambda: False)() def format(self, record): """ Changes record's levelname to use with COLORS enum """ + def_record = logging.Formatter.format(self, record) + if self.plain: + return def_record levelname = record.levelname start = COLOR_SEQ % (COLORS[levelname]) - def_record = logging.Formatter.format(self, record) end = RESET_SEQ colored_record = ''.join([start, def_record, end]) @@ -85,14 +89,17 @@ def __init__(self, *args, **kwargs): # can't do super(...) here because Formatter is an old school class logging.Formatter.__init__(self, *args, **kwargs) + self.plain = not getattr(sys.stderr, 'isatty', lambda: False)() def format(self, record): """ Changes record's levelname to use with COLORS enum """ + def_record = format_sql(logging.Formatter.format(self, record)) + if self.plain: + return def_record start = COLOR_SEQ % (COLORS['SQL']) - def_record = format_sql(logging.Formatter.format(self, record)) end = RESET_SEQ colored_record = ''.join([start, def_record, end]) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/conf.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/lib/conf.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.conf +~~~~~~~~~~~~~~~~~~~~~ + +Various config settings for Kallithea + +This file was forked by the Kallithea project in July 2014. +Original author and date, and relevant copyright and licensing information is below: +:created_on: Mar 7, 2012 +:author: marcink +:copyright: (c) 2013 RhodeCode GmbH, and others. +:license: GPLv3, see LICENSE.md for more details. +""" + +from kallithea.lib import pygmentsutils + + +# language map is also used by whoosh indexer, which for those specified +# extensions will index it's content +LANGUAGES_EXTENSIONS_MAP = pygmentsutils.get_extension_descriptions() + +# Whoosh index targets + +# Extensions we want to index content of using whoosh +INDEX_EXTENSIONS = list(LANGUAGES_EXTENSIONS_MAP) + +# Filenames we want to index content of using whoosh +INDEX_FILENAMES = pygmentsutils.get_index_filenames() + +# list of readme files to search in file tree and display in summary +# attached weights defines the search order lower is first +ALL_READMES = [ + ('readme', 0), ('README', 0), ('Readme', 0), + ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1), + ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2), + ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2), + ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2), +] + +# extension together with weights to search lower is first +RST_EXTS = [ + ('', 0), ('.rst', 1), ('.rest', 1), + ('.RST', 2), ('.REST', 2), + ('.txt', 3), ('.TXT', 3) +] + +MARKDOWN_EXTS = [ + ('.md', 1), ('.MD', 1), + ('.mkdn', 2), ('.MKDN', 2), + ('.mdown', 3), ('.MDOWN', 3), + ('.markdown', 4), ('.MARKDOWN', 4) +] + +PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)] + +ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS diff -r c387989f868f -r 3669e58f3002 kallithea/lib/db_manage.py --- a/kallithea/lib/db_manage.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/db_manage.py Fri Oct 30 23:44:18 2020 +0100 @@ -26,6 +26,7 @@ :license: GPLv3, see LICENSE.md for more details. """ +import getpass import logging import os import sys @@ -36,12 +37,10 @@ import sqlalchemy from sqlalchemy.engine import create_engine +from kallithea.lib.utils2 import ask_ok +from kallithea.model import db, meta from kallithea.model.base import init_model -from kallithea.model.db import Permission, RepoGroup, Repository, Setting, Ui, User, UserRepoGroupToPerm, UserToPerm -#from kallithea.model import meta -from kallithea.model.meta import Base, Session from kallithea.model.permission import PermissionModel -from kallithea.model.repo_group import RepoGroupModel from kallithea.model.user import UserModel @@ -49,9 +48,8 @@ class DbManage(object): - def __init__(self, dbconf, root, tests=False, SESSION=None, cli_args=None): + def __init__(self, dbconf, root, SESSION=None, cli_args=None): self.dbname = dbconf.split('/')[-1] - self.tests = tests self.root = root self.dburi = dbconf self.cli_args = cli_args or {} @@ -62,7 +60,6 @@ force_ask = self.cli_args.get('force_ask') if force_ask is not None: return force_ask - from kallithea.lib.utils2 import ask_ok return ask_ok(msg) def init_db(self, SESSION=None): @@ -72,48 +69,49 @@ # init new sessions engine = create_engine(self.dburi) init_model(engine) - self.sa = Session() - - def create_tables(self, override=False): - """ - Create a auth database - """ + self.sa = meta.Session() - log.info("Any existing database is going to be destroyed") - if self.tests: - destroy = True + def create_tables(self, reuse_database=False): + """ + Create database (optional) and tables. + If reuse_database is false, the database will be dropped (if it exists) + and a new one created. If true, the existing database will be reused + and cleaned for content. + """ + url = sqlalchemy.engine.url.make_url(self.dburi) + database = url.database + if reuse_database: + log.info("The content of the database %r will be destroyed and new tables created." % database) else: - destroy = self._ask_ok('Are you sure to destroy old database ? [y/n]') - if not destroy: + log.info("The existing database %r will be destroyed and a new one created." % database) + + if not self._ask_ok('Are you sure to destroy old database? [y/n]'): print('Nothing done.') sys.exit(0) - if destroy: - # drop and re-create old schemas - url = sqlalchemy.engine.url.make_url(self.dburi) - database = url.database - - # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work + if reuse_database: + meta.Base.metadata.drop_all() + else: if url.drivername == 'mysql': url.database = None # don't connect to the database (it might not exist) engine = sqlalchemy.create_engine(url) with engine.connect() as conn: - conn.execute('DROP DATABASE IF EXISTS ' + database) - conn.execute('CREATE DATABASE ' + database) + conn.execute('DROP DATABASE IF EXISTS `%s`' % database) + conn.execute('CREATE DATABASE `%s` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci' % database) elif url.drivername == 'postgresql': from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT url.database = 'postgres' # connect to the system database (as the real one might not exist) engine = sqlalchemy.create_engine(url) with engine.connect() as conn: conn.connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) - conn.execute('DROP DATABASE IF EXISTS ' + database) - conn.execute('CREATE DATABASE ' + database) + conn.execute('DROP DATABASE IF EXISTS "%s"' % database) + conn.execute('CREATE DATABASE "%s"' % database) else: + # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work, but this is # known to work on SQLite - possibly not on other databases with strong referential integrity - Base.metadata.drop_all() + meta.Base.metadata.drop_all() - checkfirst = not override - Base.metadata.create_all(checkfirst=checkfirst) + meta.Base.metadata.create_all(checkfirst=False) # Create an Alembic configuration and generate the version table, # "stamping" it with the most recent Alembic migration revision, to @@ -128,91 +126,36 @@ log.info('Created tables for %s', self.dbname) - def fix_repo_paths(self): - """ - Fixes a old kallithea version path into new one without a '*' - """ - - paths = Ui.query() \ - .filter(Ui.ui_key == '/') \ - .scalar() - - paths.ui_value = paths.ui_value.replace('*', '') - - self.sa.commit() + def create_admin_user(self): + username = self.cli_args.get('username') + password = self.cli_args.get('password') + email = self.cli_args.get('email') - def fix_default_user(self): - """ - Fixes a old default user with some 'nicer' default values, - used mostly for anonymous access - """ - def_user = User.query().filter_by(is_default_user=True).one() - - def_user.name = 'Anonymous' - def_user.lastname = 'User' - def_user.email = 'anonymous@kallithea-scm.org' - - self.sa.commit() + def get_password(): + password = getpass.getpass('Specify admin password ' + '(min 6 chars):') + confirm = getpass.getpass('Confirm password:') - def fix_settings(self): - """ - Fixes kallithea settings adds ga_code key for google analytics - """ - - hgsettings3 = Setting('ga_code', '') - - self.sa.add(hgsettings3) - self.sa.commit() - - def admin_prompt(self, second=False): - if not self.tests: - import getpass + if password != confirm: + log.error('passwords mismatch') + return False + if len(password) < 6: + log.error('password is to short use at least 6 characters') + return False - username = self.cli_args.get('username') - password = self.cli_args.get('password') - email = self.cli_args.get('email') - - def get_password(): - password = getpass.getpass('Specify admin password ' - '(min 6 chars):') - confirm = getpass.getpass('Confirm password:') - - if password != confirm: - log.error('passwords mismatch') - return False - if len(password) < 6: - log.error('password is to short use at least 6 characters') - return False - - return password - if username is None: - username = input('Specify admin username:') - if password is None: + return password + if username is None: + username = input('Specify admin username:') + if password is None: + password = get_password() + if not password: + # second try password = get_password() if not password: - # second try - password = get_password() - if not password: - sys.exit() - if email is None: - email = input('Specify admin email:') - self.create_user(username, password, email, True) - else: - log.info('creating admin and regular test users') - from kallithea.tests.base import TEST_USER_ADMIN_LOGIN, \ - TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ - TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ - TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ - TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL - - self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, - TEST_USER_ADMIN_EMAIL, True) - - self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, - TEST_USER_REGULAR_EMAIL, False) - - self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, - TEST_USER_REGULAR2_EMAIL, False) + sys.exit() + if email is None: + email = input('Specify admin email:') + self.create_user(username, password, email, True) def create_auth_plugin_options(self, skip_existing=False): """ @@ -223,10 +166,10 @@ for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'), ('auth_internal_enabled', 'True', 'bool')]: - if skip_existing and Setting.get_by_name(k) is not None: + if skip_existing and db.Setting.get_by_name(k) is not None: log.debug('Skipping option %s', k) continue - setting = Setting(k, v, t) + setting = db.Setting(k, v, t) self.sa.add(setting) def create_default_options(self, skip_existing=False): @@ -238,51 +181,12 @@ ('default_repo_private', False, 'bool'), ('default_repo_type', 'hg', 'unicode') ]: - if skip_existing and Setting.get_by_name(k) is not None: + if skip_existing and db.Setting.get_by_name(k) is not None: log.debug('Skipping option %s', k) continue - setting = Setting(k, v, t) + setting = db.Setting(k, v, t) self.sa.add(setting) - def fixup_groups(self): - def_usr = User.get_default_user() - for g in RepoGroup.query().all(): - g.group_name = g.get_new_name(g.name) - # get default perm - default = UserRepoGroupToPerm.query() \ - .filter(UserRepoGroupToPerm.group == g) \ - .filter(UserRepoGroupToPerm.user == def_usr) \ - .scalar() - - if default is None: - log.debug('missing default permission for group %s adding', g) - RepoGroupModel()._create_default_perms(g) - - def reset_permissions(self, username): - """ - Resets permissions to default state, useful when old systems had - bad permissions, we must clean them up - - :param username: - """ - default_user = User.get_by_username(username) - if not default_user: - return - - u2p = UserToPerm.query() \ - .filter(UserToPerm.user == default_user).all() - fixed = False - if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): - for p in u2p: - Session().delete(p) - fixed = True - self.populate_default_permissions() - return fixed - - def update_repo_info(self): - for repo in Repository.query(): - repo.update_changeset_cache() - def prompt_repo_root_path(self, test_repo_path='', retries=3): _path = self.cli_args.get('repos_location') if retries == 3: @@ -290,7 +194,7 @@ if _path is not None: path = _path - elif not self.tests and not test_repo_path: + elif not test_repo_path: path = input( 'Enter a valid absolute path to store repositories. ' 'All repositories in that path will be added automatically:' @@ -340,15 +244,14 @@ ui_config = [ ('paths', '/', repo_root_path, True), #('phases', 'publish', 'false', False) - ('hooks', Ui.HOOK_UPDATE, 'hg update >&2', False), - ('hooks', Ui.HOOK_REPO_SIZE, 'python:kallithea.lib.hooks.repo_size', True), + ('hooks', db.Ui.HOOK_UPDATE, 'hg update >&2', False), + ('hooks', db.Ui.HOOK_REPO_SIZE, 'python:kallithea.lib.hooks.repo_size', True), ('extensions', 'largefiles', '', True), ('largefiles', 'usercache', os.path.join(repo_root_path, '.cache', 'largefiles'), True), - ('extensions', 'hgsubversion', '', False), ('extensions', 'hggit', '', False), ] for ui_section, ui_key, ui_value, ui_active in ui_config: - ui_conf = Ui( + ui_conf = db.Ui( ui_section=ui_section, ui_key=ui_key, ui_value=ui_value, @@ -366,12 +269,12 @@ ('admin_grid_items', 25, 'int'), ('show_version', True, 'bool'), ('use_gravatar', True, 'bool'), - ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), - ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), - ('clone_ssh_tmpl', Repository.DEFAULT_CLONE_SSH, 'unicode'), + ('gravatar_url', db.User.DEFAULT_GRAVATAR_URL, 'unicode'), + ('clone_uri_tmpl', db.Repository.DEFAULT_CLONE_URI, 'unicode'), + ('clone_ssh_tmpl', db.Repository.DEFAULT_CLONE_SSH, 'unicode'), ] for key, val, type_ in settings: - sett = Setting(key, val, type_) + sett = db.Setting(key, val, type_) self.sa.add(sett) self.create_auth_plugin_options() @@ -384,12 +287,12 @@ UserModel().create_or_update(username, password, email, firstname='Kallithea', lastname='Admin', active=True, admin=admin, - extern_type=User.DEFAULT_AUTH_TYPE) + extern_type=db.User.DEFAULT_AUTH_TYPE) def create_default_user(self): log.info('creating default user') # create default user for handling default permissions. - user = UserModel().create_or_update(username=User.DEFAULT_USER_NAME, + user = UserModel().create_or_update(username=db.User.DEFAULT_USER_NAME, password=str(uuid.uuid1())[:20], email='anonymous@kallithea-scm.org', firstname='Anonymous', @@ -399,7 +302,7 @@ if self.cli_args.get('public_access') is False: log.info('Public access disabled') user.active = False - Session().commit() + meta.Session().commit() def create_permissions(self): """ @@ -416,4 +319,4 @@ permissions that are missing, and not alter already defined ones """ log.info('creating default user permissions') - PermissionModel().create_default_permissions(user=User.DEFAULT_USER_NAME) + PermissionModel().create_default_permissions(user=db.User.DEFAULT_USER_NAME) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/diffs.py --- a/kallithea/lib/diffs.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/diffs.py Fri Oct 30 23:44:18 2020 +0100 @@ -70,80 +70,72 @@ """ Return given diff as html table with customized css classes """ - def _link_to_if(condition, label, url): - """ - Generates a link if condition is meet or just the label if not. - """ - - if condition: - return '''''' % { - 'url': url, - 'label': label - } - else: - return label - _html_empty = True _html = [] _html.append('''\n''' % { 'table_class': table_class }) - for diff in parsed_lines: - for line in diff['chunks']: + for file_info in parsed_lines: + count_no_lineno = 0 # counter to allow comments on lines without new/old line numbers + for chunk in file_info['chunks']: _html_empty = False - for change in line: + for change in chunk: _html.append('''\n''' % { 'lc': line_class, 'action': change['action'] }) - anchor_old_id = '' - anchor_new_id = '' - anchor_old = "%(filename)s_o%(oldline_no)s" % { - 'filename': _safe_id(diff['filename']), - 'oldline_no': change['old_lineno'] - } - anchor_new = "%(filename)s_n%(oldline_no)s" % { - 'filename': _safe_id(diff['filename']), - 'oldline_no': change['new_lineno'] - } - cond_old = (change['old_lineno'] != '...' and - change['old_lineno']) - cond_new = (change['new_lineno'] != '...' and - change['new_lineno']) - no_lineno = (change['old_lineno'] == '...' and - change['new_lineno'] == '...') - if cond_old: - anchor_old_id = 'id="%s"' % anchor_old - if cond_new: - anchor_new_id = 'id="%s"' % anchor_new - ########################################################### - # OLD LINE NUMBER - ########################################################### - _html.append('''\t\n''') - ########################################################### - # NEW LINE NUMBER - ########################################################### - - if not no_lineno: + if change['old_lineno'] or change['new_lineno']: + ########################################################### + # OLD LINE NUMBER + ########################################################### + anchor_old = "%(filename)s_o%(oldline_no)s" % { + 'filename': _safe_id(file_info['filename']), + 'oldline_no': change['old_lineno'] + } + anchor_old_id = '' + if change['old_lineno']: + anchor_old_id = 'id="%s"' % anchor_old + _html.append('''\t\n''') + ########################################################### + # NEW LINE NUMBER + ########################################################### + anchor_new = "%(filename)s_n%(newline_no)s" % { + 'filename': _safe_id(file_info['filename']), + 'newline_no': change['new_lineno'] + } + anchor_new_id = '' + if change['new_lineno']: + anchor_new_id = 'id="%s"' % anchor_new _html.append('''\t\n''') + else: + ########################################################### + # NO LINE NUMBER + ########################################################### + anchor = "%(filename)s_%(count_no_lineno)s" % { + 'filename': _safe_id(file_info['filename']), + 'count_no_lineno': count_no_lineno, + } + count_no_lineno += 1 + _html.append('''\t\n''') ########################################################### @@ -453,12 +445,49 @@ return self.adds, self.removes -_escape_re = re.compile(r'(&)|(<)|(>)|(\t)|(\r)|(?<=.)( \n| $)') +_escape_re = re.compile(r'(&)|(<)|(>)|(\t)|(\r)|(?<=.)( \n| $)|(\t\n|\t$)') -def _escaper(string): - """ - Do HTML escaping/markup +def _escaper(diff_line): + r""" + Do HTML escaping/markup of a single diff line (including first +/- column) + + >>> _escaper('foobar') + 'foobar' + >>> _escaper('@foo & bar') + '@foo & bar' + >>> _escaper('+foo < bar') + '+foo < bar' + >>> _escaper('-foo > bar') + '-foo > bar' + >>> _escaper(' ') + ' <foo>' + >>> _escaper(' foo\tbar') + ' foo\tbar' + >>> _escaper(' foo\rbar\r') + ' foobar' + >>> _escaper(' foo\t') + ' foo\t' + >>> _escaper(' foo ') + ' foo ' + >>> _escaper(' foo ') + ' foo ' + >>> _escaper(' ') + ' ' + >>> _escaper(' ') + ' ' + >>> _escaper(' \t') + ' \t' + >>> _escaper(' \t ') + ' \t ' + >>> _escaper(' \t') + ' \t' + >>> _escaper(' \t\t ') + ' \t\t ' + >>> _escaper(' \t\t') + ' \t\t' + >>> _escaper(' foo&bar ') + ' foo&bar<baz> ' """ def substitute(m): @@ -470,14 +499,16 @@ if groups[2]: return '>' if groups[3]: - return '\t' + return '\t' # Note: trailing tabs will get a longer match later if groups[4]: return '' if groups[5]: return ' ' + if groups[6]: + return '\t' assert False - return _escape_re.sub(substitute, safe_str(string)) + return _escape_re.sub(substitute, diff_line) _git_header_re = re.compile(br""" @@ -542,7 +573,7 @@ rest = diff_chunk[match.end():] if rest and _header_next_check.match(rest): raise Exception('cannot parse %s diff header: %r followed by %r' % (vcs, safe_str(bytes(diff_chunk[:match.end()])), safe_str(bytes(rest[:1000])))) - diff_lines = (_escaper(m.group(0)) for m in re.finditer(br'.*\n|.+$', rest)) # don't split on \r as str.splitlines do + diff_lines = (_escaper(safe_str(m.group(0))) for m in re.finditer(br'.*\n|.+$', rest)) # don't split on \r as str.splitlines do return meta_info, diff_lines @@ -585,8 +616,8 @@ # skip context only if it's first line if int(gr[0]) > 1: lines.append({ - 'old_lineno': '...', - 'new_lineno': '...', + 'old_lineno': '', + 'new_lineno': '', 'action': 'context', 'line': line, }) @@ -630,8 +661,8 @@ # we need to append to lines, since this is not # counted in the line specs of diff lines.append({ - 'old_lineno': '...', - 'new_lineno': '...', + 'old_lineno': '', + 'new_lineno': '', 'action': 'context', 'line': line, }) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/exceptions.py --- a/kallithea/lib/exceptions.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/exceptions.py Fri Oct 30 23:44:18 2020 +0100 @@ -74,8 +74,5 @@ pass -class HgsubversionImportError(Exception): - pass - class InvalidCloneUriException(Exception): pass diff -r c387989f868f -r 3669e58f3002 kallithea/lib/helpers.py --- a/kallithea/lib/helpers.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/helpers.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,6 +28,7 @@ from beaker.cache import cache_region from pygments import highlight as code_highlight from pygments.formatters.html import HtmlFormatter +from tg import session from tg.i18n import ugettext as _ from webhelpers2.html import HTML, escape, literal from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form @@ -38,23 +39,25 @@ from webhelpers2.number import format_byte_size from webhelpers2.text import chop_at, truncate, wrap_paragraphs -from kallithea.config.routing import url +import kallithea from kallithea.lib.annotate import annotate_highlight #============================================================================== # PERMS #============================================================================== from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel +from kallithea.lib.diffs import BIN_FILENODE, CHMOD_FILENODE, DEL_FILENODE, MOD_FILENODE, NEW_FILENODE, RENAMED_FILENODE from kallithea.lib.markup_renderer import url_re from kallithea.lib.pygmentsutils import get_custom_lexer -from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict -from kallithea.lib.utils2 import age as _age -from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime +from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict, age, asbool, credentials_filter, safe_bytes, safe_int, safe_str, time_to_datetime from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError #============================================================================== # SCM FILTERS available via h. #============================================================================== from kallithea.lib.vcs.utils import author_email, author_name +from kallithea.lib.webutils import url +from kallithea.model import db +from kallithea.model.changeset_status import ChangesetStatusModel # mute pyflakes "imported but unused" @@ -72,6 +75,7 @@ assert HasPermissionAny assert HasRepoGroupPermissionLevel assert HasRepoPermissionLevel +assert age assert time_to_datetime assert EmptyChangeset @@ -82,9 +86,8 @@ def canonical_url(*args, **kargs): '''Like url(x, qualified=True), but returns url that not only is qualified but also canonical, as configured in canonical_url''' - from kallithea import CONFIG try: - parts = CONFIG.get('canonical_url', '').split('://', 1) + parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1) kargs['host'] = parts[1] kargs['protocol'] = parts[0] except IndexError: @@ -94,9 +97,8 @@ def canonical_hostname(): '''Return canonical hostname of system''' - from kallithea import CONFIG try: - parts = CONFIG.get('canonical_url', '').split('://', 1) + parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1) return parts[1].split('/', 1)[0] except IndexError: parts = url('home', qualified=True).split('://', 1) @@ -213,12 +215,48 @@ """ Creates a unique ID for filenode based on it's hash of path and revision it's safe to use in urls + """ + return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12]) - :param raw_id: - :param path: - """ + +def get_ignore_whitespace_diff(GET): + """Return true if URL requested whitespace to be ignored""" + return bool(GET.get('ignorews')) + - return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12]) +def ignore_whitespace_link(GET, anchor=None): + """Return snippet with link to current URL with whitespace ignoring toggled""" + params = dict(GET) # ignoring duplicates + if get_ignore_whitespace_diff(GET): + params.pop('ignorews') + title = _("Show whitespace changes") + else: + params['ignorews'] = '1' + title = _("Ignore whitespace changes") + params['anchor'] = anchor + return link_to( + literal(''), + url.current(**params), + title=title, + **{'data-toggle': 'tooltip'}) + + +def get_diff_context_size(GET): + """Return effective context size requested in URL""" + return safe_int(GET.get('context'), default=3) + + +def increase_context_link(GET, anchor=None): + """Return snippet with link to current URL with double context size""" + context = get_diff_context_size(GET) * 2 + params = dict(GET) # ignoring duplicates + params['context'] = str(context) + params['anchor'] = anchor + return link_to( + literal(''), + url.current(**params), + title=_('Increase diff context to %(num)s lines') % {'num': context}, + **{'data-toggle': 'tooltip'}) class _FilesBreadCrumbs(object): @@ -450,7 +488,6 @@ """Manage a message queue in tg.session: return the current message queue after appending the given message, and possibly clearing the queue.""" key = 'flash' - from tg import session if key in session: flash_messages = session[key] else: @@ -502,9 +539,6 @@ return [_Message(category, message) for category, message in _session_flash_messages(clear=True)] -def age(x, y=False): - return _age(x, y) - def capitalize(x): return x.capitalize() @@ -524,9 +558,8 @@ :param cs: changeset instance """ - from kallithea import CONFIG - def_len = safe_int(CONFIG.get('show_sha_length', 12)) - show_rev = str2bool(CONFIG.get('show_revision_number', False)) + def_len = safe_int(kallithea.CONFIG.get('show_sha_length', 12)) + show_rev = asbool(kallithea.CONFIG.get('show_revision_number', False)) raw_id = cs.raw_id[:def_len] if show_rev: @@ -567,8 +600,7 @@ - or return None if user not found""" email = author_email(author) if email: - from kallithea.model.db import User - user = User.get_by_email(email) + user = db.User.get_by_email(email) if user is not None: return getattr(user, show_attr) return None @@ -595,9 +627,8 @@ def person(author, show_attr="username"): """Find the user identified by 'author', return one of the users attributes, default to the username attribute, None if there is no user""" - from kallithea.model.db import User # if author is already an instance use it for extraction - if isinstance(author, User): + if isinstance(author, db.User): return getattr(author, show_attr) value = user_attr_or_none(author, show_attr) @@ -609,11 +640,10 @@ def person_by_id(id_, show_attr="username"): - from kallithea.model.db import User # maybe it's an ID ? if str(id_).isdigit() or isinstance(id_, int): id_ = int(id_) - user = User.get(id_) + user = db.User.get(id_) if user is not None: return getattr(user, show_attr) return id_ @@ -791,9 +821,8 @@ return group_name def get_pull_request(): - from kallithea.model.db import PullRequest pull_request_id = action_params - nice_id = PullRequest.make_nice_id(pull_request_id) + nice_id = db.PullRequest.make_nice_id(pull_request_id) deleted = user_log.repository is None if deleted: @@ -932,16 +961,14 @@ else: # if src is empty then there was no gravatar, so we use a font icon html = ("""""" - .format(cls=cls, size=size, src=src)) + .format(cls=cls, size=size)) return literal(html) def gravatar_url(email_address, size=30, default=''): - # doh, we need to re-import those to mock it later - from kallithea.config.routing import url - from kallithea.model.db import User from tg import tmpl_context as c + if not c.visual.use_gravatar: return "" @@ -952,13 +979,12 @@ return default parsed_url = urllib.parse.urlparse(url.current(qualified=True)) - url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \ + return (c.visual.gravatar_url or db.User.DEFAULT_GRAVATAR_URL) \ .replace('{email}', email_address) \ .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \ .replace('{netloc}', parsed_url.netloc) \ .replace('{scheme}', parsed_url.scheme) \ .replace('{size}', str(size)) - return url def changed_tooltip(nodes): @@ -986,8 +1012,6 @@ :param stats: two element list of added/deleted lines of code """ - from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \ - MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE a, d = stats['added'], stats['deleted'] width = 100 @@ -1088,15 +1112,14 @@ """ def _replace(match_obj): - url = match_obj.group('url') - if url is not None: - return '%(url)s' % {'url': url} + match_url = match_obj.group('url') + if match_url is not None: + return '%(url)s' % {'url': match_url} mention = match_obj.group('mention') if mention is not None: return '%s' % mention hash_ = match_obj.group('hash') if hash_ is not None and repo_name is not None: - from kallithea.config.routing import url # doh, we need to re-import url to mock it later return '%(hash)s' % { 'url': url('changeset_home', repo_name=repo_name, revision=hash_), 'hash': hash_, @@ -1173,25 +1196,23 @@ """Urlify issue references according to .ini configuration""" global _urlify_issues_f if _urlify_issues_f is None: - from kallithea import CONFIG - from kallithea.model.db import URL_SEP - assert CONFIG['sqlalchemy.url'] # make sure config has been loaded + assert kallithea.CONFIG['sqlalchemy.url'] # make sure config has been loaded # Build chain of urlify functions, starting with not doing any transformation def tmp_urlify_issues_f(s): return s issue_pat_re = re.compile(r'issue_pat(.*)') - for k in CONFIG: + for k in kallithea.CONFIG: # Find all issue_pat* settings that also have corresponding server_link and prefix configuration m = issue_pat_re.match(k) if m is None: continue suffix = m.group(1) - issue_pat = CONFIG.get(k) - issue_server_link = CONFIG.get('issue_server_link%s' % suffix) - issue_sub = CONFIG.get('issue_sub%s' % suffix) - issue_prefix = CONFIG.get('issue_prefix%s' % suffix) + issue_pat = kallithea.CONFIG.get(k) + issue_server_link = kallithea.CONFIG.get('issue_server_link%s' % suffix) + issue_sub = kallithea.CONFIG.get('issue_sub%s' % suffix) + issue_prefix = kallithea.CONFIG.get('issue_prefix%s' % suffix) if issue_prefix: log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix) if not issue_pat: @@ -1221,7 +1242,7 @@ log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) issue_url = issue_server_link issue_url = issue_url.replace('{repo}', repo_name) - issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1]) + issue_url = issue_url.replace('{repo_name}', repo_name.split(kallithea.URL_SEP)[-1]) # if issue_sub is empty use the matched issue reference verbatim if not issue_sub: issue_text = match_obj.group() @@ -1285,18 +1306,15 @@ def changeset_status(repo, revision): - from kallithea.model.changeset_status import ChangesetStatusModel return ChangesetStatusModel().get_status(repo, revision) def changeset_status_lbl(changeset_status): - from kallithea.model.db import ChangesetStatus - return ChangesetStatus.get_status_lbl(changeset_status) + return db.ChangesetStatus.get_status_lbl(changeset_status) def get_permission_name(key): - from kallithea.model.db import Permission - return dict(Permission.PERMS).get(key) + return dict(db.Permission.PERMS).get(key) def journal_filter_help(): @@ -1327,8 +1345,7 @@ def ip_range(ip_addr): - from kallithea.model.db import UserIpMap - s, e = UserIpMap._get_ip_range(ip_addr) + s, e = db.UserIpMap._get_ip_range(ip_addr) return '%s - %s' % (s, e) @@ -1336,7 +1353,6 @@ def session_csrf_secret_token(): """Return (and create) the current session's CSRF protection token.""" - from tg import session if not session_csrf_secret_name in session: session[session_csrf_secret_name] = str(random.getrandbits(128)) session.save() diff -r c387989f868f -r 3669e58f3002 kallithea/lib/hooks.py --- a/kallithea/lib/hooks.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/hooks.py Fri Oct 30 23:44:18 2020 +0100 @@ -31,12 +31,13 @@ import mercurial.scmutil +import kallithea from kallithea.lib import helpers as h from kallithea.lib.exceptions import UserCreationError from kallithea.lib.utils import action_logger, make_ui from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str from kallithea.lib.vcs.backends.base import EmptyChangeset -from kallithea.model.db import Repository, User +from kallithea.model import db def _get_scm_size(alias, root_path): @@ -90,19 +91,16 @@ """ ex = get_hook_environment() - user = User.get_by_username(ex.username) + user = db.User.get_by_username(ex.username) action = 'pull' action_logger(user, action, ex.repository, ex.ip, commit=True) # extension hook call - from kallithea import EXTENSIONS - callback = getattr(EXTENSIONS, 'PULL_HOOK', None) + callback = getattr(kallithea.EXTENSIONS, 'PULL_HOOK', None) if callable(callback): kw = {} kw.update(ex) callback(**kw) - return 0 - def log_push_action(ui, repo, node, node_last, **kwargs): """ @@ -116,7 +114,6 @@ """ revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])] process_pushed_raw_ids(revs) - return 0 def process_pushed_raw_ids(revs): @@ -136,8 +133,7 @@ ScmModel().mark_for_invalidation(ex.repository) # extension hook call - from kallithea import EXTENSIONS - callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) + callback = getattr(kallithea.EXTENSIONS, 'PUSH_HOOK', None) if callable(callback): kw = {'pushed_revs': revs} kw.update(ex) @@ -167,22 +163,18 @@ 'repo_name' """ - from kallithea import EXTENSIONS - callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) + callback = getattr(kallithea.EXTENSIONS, 'CREATE_REPO_HOOK', None) if callable(callback): kw = {} kw.update(repository_dict) kw.update({'created_by': created_by}) kw.update(kwargs) - return callback(**kw) - - return 0 + callback(**kw) def check_allowed_create_user(user_dict, created_by, **kwargs): # pre create hooks - from kallithea import EXTENSIONS - callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) + callback = getattr(kallithea.EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) if callable(callback): allowed, reason = callback(created_by=created_by, **user_dict) if not allowed: @@ -217,14 +209,23 @@ 'emails', """ - from kallithea import EXTENSIONS - callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None) + callback = getattr(kallithea.EXTENSIONS, 'CREATE_USER_HOOK', None) if callable(callback): - return callback(created_by=created_by, **user_dict) + callback(created_by=created_by, **user_dict) + + +def log_create_pullrequest(pullrequest_dict, created_by, **kwargs): + """ + Post create pull request hook. + + :param pullrequest_dict: dict dump of pull request object + """ + callback = getattr(kallithea.EXTENSIONS, 'CREATE_PULLREQUEST_HOOK', None) + if callable(callback): + return callback(created_by=created_by, **pullrequest_dict) return 0 - def log_delete_repository(repository_dict, deleted_by, **kwargs): """ Post delete repository Hook. @@ -248,17 +249,14 @@ 'repo_name' """ - from kallithea import EXTENSIONS - callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None) + callback = getattr(kallithea.EXTENSIONS, 'DELETE_REPO_HOOK', None) if callable(callback): kw = {} kw.update(repository_dict) kw.update({'deleted_by': deleted_by, 'deleted_on': time.time()}) kw.update(kwargs) - return callback(**kw) - - return 0 + callback(**kw) def log_delete_user(user_dict, deleted_by, **kwargs): @@ -289,12 +287,9 @@ 'emails', """ - from kallithea import EXTENSIONS - callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None) + callback = getattr(kallithea.EXTENSIONS, 'DELETE_USER_HOOK', None) if callable(callback): - return callback(deleted_by=deleted_by, **user_dict) - - return 0 + callback(deleted_by=deleted_by, **user_dict) def _hook_environment(repo_path): @@ -307,20 +302,21 @@ connect to the database. """ import paste.deploy - import kallithea.config.middleware + + import kallithea.config.application extras = get_hook_environment() path_to_ini_file = extras['config'] - kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file) + config = paste.deploy.appconfig('config:' + path_to_ini_file) #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging - kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf) + kallithea.config.application.make_app(config.global_conf, **config.local_conf) # fix if it's not a bare repo if repo_path.endswith(os.sep + '.git'): repo_path = repo_path[:-5] - repo = Repository.get_by_full_path(repo_path) + repo = db.Repository.get_by_full_path(repo_path) if not repo: raise OSError('Repository %s not found in database' % repo_path) @@ -329,13 +325,17 @@ def handle_git_pre_receive(repo_path, git_stdin_lines): - """Called from Git pre-receive hook""" + """Called from Git pre-receive hook. + The returned value is used as hook exit code and must be 0. + """ # Currently unused. TODO: remove? return 0 def handle_git_post_receive(repo_path, git_stdin_lines): - """Called from Git post-receive hook""" + """Called from Git post-receive hook. + The returned value is used as hook exit code and must be 0. + """ try: baseui, repo = _hook_environment(repo_path) except HookEnvironmentError as e: @@ -398,7 +398,9 @@ # Almost exactly like Mercurial contrib/hg-ssh: def rejectpush(ui, **kwargs): - """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos""" + """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos. + Return value 1 will make the hook fail and reject the push. + """ ex = get_hook_environment() ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository)) return 1 diff -r c387989f868f -r 3669e58f3002 kallithea/lib/indexers/daemon.py --- a/kallithea/lib/indexers/daemon.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/indexers/daemon.py Fri Oct 30 23:44:18 2020 +0100 @@ -37,11 +37,11 @@ from whoosh.index import create_in, exists_in, open_dir from whoosh.qparser import QueryParser -from kallithea.config.conf import INDEX_EXTENSIONS, INDEX_FILENAMES +from kallithea.lib.conf import INDEX_EXTENSIONS, INDEX_FILENAMES from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA from kallithea.lib.utils2 import safe_str from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, NodeDoesNotExistError, RepositoryError -from kallithea.model.db import Repository +from kallithea.model import db from kallithea.model.scm import ScmModel @@ -109,7 +109,7 @@ self.initial = False def _get_index_revision(self, repo): - db_repo = Repository.get_by_repo_name(repo.name) + db_repo = db.Repository.get_by_repo_name(repo.name) landing_rev = 'tip' if db_repo: _rev_type, _rev = db_repo.landing_rev diff -r c387989f868f -r 3669e58f3002 kallithea/lib/inifile.py --- a/kallithea/lib/inifile.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/inifile.py Fri Oct 30 23:44:18 2020 +0100 @@ -32,7 +32,7 @@ template_file = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), - 'kallithea/lib/paster_commands/template.ini.mako') + 'kallithea/templates/ini/template.ini.mako') default_variables = { 'database_engine': 'sqlite', @@ -119,9 +119,6 @@ #variable7 = 7.1 #variable8 = 8.0 - variable8 = None - variable9 = None - [fourth-section] fourth = "four" fourth_extra = 4 @@ -180,7 +177,7 @@ new_value = section_settings[key] if new_value == line_value: line = line.lstrip('#') - else: + elif new_value is not None: line += '\n%s = %s' % (key, new_value) section_settings.pop(key) return line @@ -189,8 +186,12 @@ # 3rd pass: # settings that haven't been consumed yet at is appended to section - if section_settings: - lines += '\n' + ''.join('%s = %s\n' % (key, value) for key, value in sorted(section_settings.items())) + append_lines = ''.join( + '%s = %s\n' % (key, value) + for key, value in sorted(section_settings.items()) + if value is not None) + if append_lines: + lines += '\n' + append_lines return sectionname + '\n' + re.sub('[ \t]+\n', '\n', lines) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/markup_renderer.py --- a/kallithea/lib/markup_renderer.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/markup_renderer.py Fri Oct 30 23:44:18 2020 +0100 @@ -26,12 +26,15 @@ """ +import hashlib import logging import re import traceback import bleach import markdown as markdown_mod +from docutils.core import publish_parts +from docutils.parsers.rst import directives from kallithea.lib.utils2 import MENTIONS_REGEX, safe_str @@ -74,13 +77,12 @@ :param text: """ - from hashlib import md5 # Extract pre blocks. extractions = {} def pre_extraction_callback(matchobj): - digest = md5(matchobj.group(0)).hexdigest() + digest = hashlib.sha1(matchobj.group(0)).hexdigest() extractions[digest] = matchobj.group(0) return "{gfm-extraction-%s}" % digest pattern = re.compile(r'
.*?
', re.MULTILINE | re.DOTALL) @@ -215,8 +217,6 @@ def rst(cls, source, safe=True): source = safe_str(source) try: - from docutils.core import publish_parts - from docutils.parsers.rst import directives docutils_settings = dict([(alias, None) for alias in cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES]) @@ -231,9 +231,6 @@ settings_overrides=docutils_settings) return parts['html_title'] + parts["fragment"] - except ImportError: - log.warning('Install docutils to use this function') - return cls.plain(source) except Exception: log.error(traceback.format_exc()) if safe: diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/__init__.py --- a/kallithea/lib/middleware/__init__.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/appenlight.py --- a/kallithea/lib/middleware/appenlight.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.middleware.appenlight -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -middleware to handle appenlight publishing of errors - -This file was forked by the Kallithea project in July 2014. -Original author and date, and relevant copyright and licensing information is below: -:created_on: October 18, 2012 -:author: marcink -:copyright: (c) 2013 RhodeCode GmbH, and others. -:license: GPLv3, see LICENSE.md for more details. -""" - - -try: - from appenlight_client import make_appenlight_middleware -except ImportError: - AppEnlight = None -else: - AppEnlight = make_appenlight_middleware diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/https_fixup.py --- a/kallithea/lib/middleware/https_fixup.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.middleware.https_fixup -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -middleware to handle https correctly - -This file was forked by the Kallithea project in July 2014. -Original author and date, and relevant copyright and licensing information is below: -:created_on: May 23, 2010 -:author: marcink -:copyright: (c) 2013 RhodeCode GmbH, and others. -:license: GPLv3, see LICENSE.md for more details. -""" - - -from kallithea.lib.utils2 import str2bool - - -class HttpsFixup(object): - - def __init__(self, app, config): - self.application = app - self.config = config - - def __call__(self, environ, start_response): - self.__fixup(environ) - debug = str2bool(self.config.get('debug')) - is_ssl = environ['wsgi.url_scheme'] == 'https' - - def custom_start_response(status, headers, exc_info=None): - if is_ssl and str2bool(self.config.get('use_htsts')) and not debug: - headers.append(('Strict-Transport-Security', - 'max-age=8640000; includeSubDomains')) - return start_response(status, headers, exc_info) - - return self.application(environ, custom_start_response) - - def __fixup(self, environ): - """ - Function to fixup the environ as needed. In order to use this - middleware you should set this header inside your - proxy ie. nginx, apache etc. - """ - # DETECT PROTOCOL ! - if 'HTTP_X_URL_SCHEME' in environ: - proto = environ.get('HTTP_X_URL_SCHEME') - elif 'HTTP_X_FORWARDED_SCHEME' in environ: - proto = environ.get('HTTP_X_FORWARDED_SCHEME') - elif 'HTTP_X_FORWARDED_PROTO' in environ: - proto = environ.get('HTTP_X_FORWARDED_PROTO') - else: - proto = 'http' - org_proto = proto - - # if we have force, just override - if str2bool(self.config.get('force_https')): - proto = 'https' - - environ['wsgi.url_scheme'] = proto - environ['wsgi._org_proto'] = org_proto diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/permanent_repo_url.py --- a/kallithea/lib/middleware/permanent_repo_url.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.middleware.permanent_repo_url -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -middleware to handle permanent repo URLs, replacing PATH_INFO '/_123/yada' with -'/name/of/repo/yada' after looking 123 up in the database. -""" - - -from kallithea.lib.utils import fix_repo_id_name -from kallithea.lib.utils2 import safe_bytes, safe_str - - -class PermanentRepoUrl(object): - - def __init__(self, app, config): - self.application = app - self.config = config - - def __call__(self, environ, start_response): - # Extract path_info as get_path_info does, but do it explicitly because - # we also have to do the reverse operation when patching it back in - path_info = safe_str(environ['PATH_INFO'].encode('latin1')) - if path_info.startswith('/'): # it must - path_info = '/' + fix_repo_id_name(path_info[1:]) - environ['PATH_INFO'] = safe_bytes(path_info).decode('latin1') - - return self.application(environ, start_response) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/pygrack.py --- a/kallithea/lib/middleware/pygrack.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,228 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.middleware.pygrack -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Python implementation of git-http-backend's Smart HTTP protocol - -Based on original code from git_http_backend.py project. - -Copyright (c) 2010 Daniel Dotsenko -Copyright (c) 2012 Marcin Kuzminski - -This file was forked by the Kallithea project in July 2014. -""" - -import logging -import os -import socket -import traceback - -from webob import Request, Response, exc - -import kallithea -from kallithea.lib.utils2 import ascii_bytes -from kallithea.lib.vcs import subprocessio - - -log = logging.getLogger(__name__) - - -class FileWrapper(object): - - def __init__(self, fd, content_length): - self.fd = fd - self.content_length = content_length - self.remain = content_length - - def read(self, size): - if size <= self.remain: - try: - data = self.fd.read(size) - except socket.error: - raise IOError(self) - self.remain -= size - elif self.remain: - data = self.fd.read(self.remain) - self.remain = 0 - else: - data = None - return data - - def __repr__(self): - return '' % ( - self.fd, self.content_length, self.content_length - self.remain - ) - - -class GitRepository(object): - git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs']) - commands = ['git-upload-pack', 'git-receive-pack'] - - def __init__(self, repo_name, content_path): - files = set([f.lower() for f in os.listdir(content_path)]) - if not (self.git_folder_signature.intersection(files) - == self.git_folder_signature): - raise OSError('%s missing git signature' % content_path) - self.content_path = content_path - self.valid_accepts = ['application/x-%s-result' % - c for c in self.commands] - self.repo_name = repo_name - - def _get_fixedpath(self, path): - """ - Small fix for repo_path - - :param path: - """ - assert path.startswith('/' + self.repo_name + '/') - return path[len(self.repo_name) + 2:].strip('/') - - def inforefs(self, req, environ): - """ - WSGI Response producer for HTTP GET Git Smart - HTTP /info/refs request. - """ - - git_command = req.GET.get('service') - if git_command not in self.commands: - log.debug('command %s not allowed', git_command) - return exc.HTTPMethodNotAllowed() - - # From Documentation/technical/http-protocol.txt shipped with Git: - # - # Clients MUST verify the first pkt-line is `# service=$servicename`. - # Servers MUST set $servicename to be the request parameter value. - # Servers SHOULD include an LF at the end of this line. - # Clients MUST ignore an LF at the end of the line. - # - # smart_reply = PKT-LINE("# service=$servicename" LF) - # ref_list - # "0000" - server_advert = '# service=%s\n' % git_command - packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower() - _git_path = kallithea.CONFIG.get('git_path', 'git') - cmd = [_git_path, git_command[4:], - '--stateless-rpc', '--advertise-refs', self.content_path] - log.debug('handling cmd %s', cmd) - try: - out = subprocessio.SubprocessIOChunker(cmd, - starting_values=[ascii_bytes(packet_len + server_advert + '0000')] - ) - except EnvironmentError as e: - log.error(traceback.format_exc()) - raise exc.HTTPExpectationFailed() - resp = Response() - resp.content_type = 'application/x-%s-advertisement' % git_command - resp.charset = None - resp.app_iter = out - return resp - - def backend(self, req, environ): - """ - WSGI Response producer for HTTP POST Git Smart HTTP requests. - Reads commands and data from HTTP POST's body. - returns an iterator obj with contents of git command's - response to stdout - """ - _git_path = kallithea.CONFIG.get('git_path', 'git') - git_command = self._get_fixedpath(req.path_info) - if git_command not in self.commands: - log.debug('command %s not allowed', git_command) - return exc.HTTPMethodNotAllowed() - - if 'CONTENT_LENGTH' in environ: - inputstream = FileWrapper(environ['wsgi.input'], - req.content_length) - else: - inputstream = environ['wsgi.input'] - - gitenv = dict(os.environ) - # forget all configs - gitenv['GIT_CONFIG_NOGLOBAL'] = '1' - cmd = [_git_path, git_command[4:], '--stateless-rpc', self.content_path] - log.debug('handling cmd %s', cmd) - try: - out = subprocessio.SubprocessIOChunker( - cmd, - inputstream=inputstream, - env=gitenv, - cwd=self.content_path, - ) - except EnvironmentError as e: - log.error(traceback.format_exc()) - raise exc.HTTPExpectationFailed() - - if git_command in ['git-receive-pack']: - # updating refs manually after each push. - # Needed for pre-1.7.0.4 git clients using regular HTTP mode. - from kallithea.lib.vcs import get_repo - from dulwich.server import update_server_info - repo = get_repo(self.content_path) - if repo: - update_server_info(repo._repo) - - resp = Response() - resp.content_type = 'application/x-%s-result' % git_command - resp.charset = None - resp.app_iter = out - return resp - - def __call__(self, environ, start_response): - req = Request(environ) - _path = self._get_fixedpath(req.path_info) - if _path.startswith('info/refs'): - app = self.inforefs - elif req.accept.acceptable_offers(self.valid_accepts): - app = self.backend - try: - resp = app(req, environ) - except exc.HTTPException as e: - resp = e - log.error(traceback.format_exc()) - except Exception as e: - log.error(traceback.format_exc()) - resp = exc.HTTPInternalServerError() - return resp(environ, start_response) - - -class GitDirectory(object): - - def __init__(self, repo_root, repo_name): - repo_location = os.path.join(repo_root, repo_name) - if not os.path.isdir(repo_location): - raise OSError(repo_location) - - self.content_path = repo_location - self.repo_name = repo_name - self.repo_location = repo_location - - def __call__(self, environ, start_response): - content_path = self.content_path - try: - app = GitRepository(self.repo_name, content_path) - except (AssertionError, OSError): - content_path = os.path.join(content_path, '.git') - if os.path.isdir(content_path): - app = GitRepository(self.repo_name, content_path) - else: - return exc.HTTPNotFound()(environ, start_response) - return app(environ, start_response) - - -def make_wsgi_app(repo_name, repo_root): - from dulwich.web import LimitedInputFilter, GunzipFilter - app = GitDirectory(repo_root, repo_name) - return GunzipFilter(LimitedInputFilter(app)) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/sentry.py --- a/kallithea/lib/middleware/sentry.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.middleware.sentry -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -middleware to handle sentry/raven publishing of errors - -This file was forked by the Kallithea project in July 2014. -Original author and date, and relevant copyright and licensing information is below: -:created_on: September 18, 2012 -:author: marcink -:copyright: (c) 2013 RhodeCode GmbH, and others. -:license: GPLv3, see LICENSE.md for more details. -""" - - -try: - from raven.base import Client - from raven.contrib.pylons import list_from_setting - from raven.middleware import Sentry as Middleware -except ImportError: - Sentry = None -else: - class Sentry(Middleware): - def __init__(self, app, config, client_cls=Client): - client = client_cls( - dsn=config.get('sentry.dsn'), - servers=list_from_setting(config, 'sentry.servers'), - name=config.get('sentry.name'), - key=config.get('sentry.key'), - public_key=config.get('sentry.public_key'), - secret_key=config.get('sentry.secret_key'), - project=config.get('sentry.project'), - site=config.get('sentry.site'), - include_paths=list_from_setting(config, 'sentry.include_paths'), - exclude_paths=list_from_setting(config, 'sentry.exclude_paths'), - ) - super(Sentry, self).__init__(app, client) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/simplegit.py --- a/kallithea/lib/middleware/simplegit.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.middleware.simplegit -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -SimpleGit middleware for handling Git protocol requests (push/clone etc.) -It's implemented with basic auth function - -This file was forked by the Kallithea project in July 2014. -Original author and date, and relevant copyright and licensing information is below: -:created_on: Apr 28, 2010 -:author: marcink -:copyright: (c) 2013 RhodeCode GmbH, and others. -:license: GPLv3, see LICENSE.md for more details. - -""" - - -import logging -import re - -from kallithea.lib.base import BaseVCSController, get_path_info -from kallithea.lib.hooks import log_pull_action -from kallithea.lib.middleware.pygrack import make_wsgi_app -from kallithea.lib.utils import make_ui -from kallithea.model.db import Repository - - -log = logging.getLogger(__name__) - - -GIT_PROTO_PAT = re.compile(r'^/(.+)/(info/refs|git-upload-pack|git-receive-pack)$') - - -cmd_mapping = { - 'git-receive-pack': 'push', - 'git-upload-pack': 'pull', -} - - -class SimpleGit(BaseVCSController): - - scm_alias = 'git' - - @classmethod - def parse_request(cls, environ): - path_info = get_path_info(environ) - m = GIT_PROTO_PAT.match(path_info) - if m is None: - return None - - class parsed_request(object): - # See https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols#_the_smart_protocol - repo_name = m.group(1).rstrip('/') - cmd = m.group(2) - - query_string = environ['QUERY_STRING'] - if cmd == 'info/refs' and query_string.startswith('service='): - service = query_string.split('=', 1)[1] - action = cmd_mapping.get(service) - else: - service = None - action = cmd_mapping.get(cmd) - - return parsed_request - - def _make_app(self, parsed_request): - """ - Return a pygrack wsgi application. - """ - pygrack_app = make_wsgi_app(parsed_request.repo_name, self.basepath) - - def wrapper_app(environ, start_response): - if (parsed_request.cmd == 'info/refs' and - parsed_request.service == 'git-upload-pack' - ): - baseui = make_ui() - repo = Repository.get_by_repo_name(parsed_request.repo_name) - scm_repo = repo.scm_instance - # Run hooks, like Mercurial outgoing.pull_logger does - log_pull_action(ui=baseui, repo=scm_repo._repo) - # Note: push hooks are handled by post-receive hook - - return pygrack_app(environ, start_response) - - return wrapper_app diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/simplehg.py --- a/kallithea/lib/middleware/simplehg.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.middleware.simplehg -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -SimpleHg middleware for handling Mercurial protocol requests (push/clone etc.). -It's implemented with basic auth function - -This file was forked by the Kallithea project in July 2014. -Original author and date, and relevant copyright and licensing information is below: -:created_on: Apr 28, 2010 -:author: marcink -:copyright: (c) 2013 RhodeCode GmbH, and others. -:license: GPLv3, see LICENSE.md for more details. - -""" - - -import logging -import os -import urllib.parse - -import mercurial.hgweb - -from kallithea.lib.base import BaseVCSController, get_path_info -from kallithea.lib.utils import make_ui -from kallithea.lib.utils2 import safe_bytes - - -log = logging.getLogger(__name__) - - -def get_header_hgarg(environ): - """Decode the special Mercurial encoding of big requests over multiple headers. - >>> get_header_hgarg({}) - '' - >>> get_header_hgarg({'HTTP_X_HGARG_0': ' ', 'HTTP_X_HGARG_1': 'a','HTTP_X_HGARG_2': '','HTTP_X_HGARG_3': 'b+c %20'}) - 'ab+c %20' - """ - chunks = [] - i = 1 - while True: - v = environ.get('HTTP_X_HGARG_%d' % i) - if v is None: - break - chunks.append(v) - i += 1 - return ''.join(chunks) - - -cmd_mapping = { - # 'batch' is not in this list - it is handled explicitly - 'between': 'pull', - 'branches': 'pull', - 'branchmap': 'pull', - 'capabilities': 'pull', - 'changegroup': 'pull', - 'changegroupsubset': 'pull', - 'changesetdata': 'pull', - 'clonebundles': 'pull', - 'debugwireargs': 'pull', - 'filedata': 'pull', - 'getbundle': 'pull', - 'getlfile': 'pull', - 'heads': 'pull', - 'hello': 'pull', - 'known': 'pull', - 'lheads': 'pull', - 'listkeys': 'pull', - 'lookup': 'pull', - 'manifestdata': 'pull', - 'narrow_widen': 'pull', - 'protocaps': 'pull', - 'statlfile': 'pull', - 'stream_out': 'pull', - 'pushkey': 'push', - 'putlfile': 'push', - 'unbundle': 'push', - } - - -class SimpleHg(BaseVCSController): - - scm_alias = 'hg' - - @classmethod - def parse_request(cls, environ): - http_accept = environ.get('HTTP_ACCEPT', '') - if not http_accept.startswith('application/mercurial'): - return None - path_info = get_path_info(environ) - if not path_info.startswith('/'): # it must! - return None - - class parsed_request(object): - repo_name = path_info[1:].rstrip('/') - - query_string = environ['QUERY_STRING'] - - action = None - for qry in query_string.split('&'): - parts = qry.split('=', 1) - if len(parts) == 2 and parts[0] == 'cmd': - cmd = parts[1] - if cmd == 'batch': - hgarg = get_header_hgarg(environ) - if not hgarg.startswith('cmds='): - action = 'push' # paranoid and safe - break - action = 'pull' - for cmd_arg in hgarg[5:].split(';'): - cmd, _args = urllib.parse.unquote_plus(cmd_arg).split(' ', 1) - op = cmd_mapping.get(cmd, 'push') - if op != 'pull': - assert op == 'push' - action = 'push' - break - else: - action = cmd_mapping.get(cmd, 'push') - break # only process one cmd - - return parsed_request - - def _make_app(self, parsed_request): - """ - Make an hgweb wsgi application. - """ - repo_name = parsed_request.repo_name - repo_path = os.path.join(self.basepath, repo_name) - baseui = make_ui(repo_path=repo_path) - hgweb_app = mercurial.hgweb.hgweb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui) - - def wrapper_app(environ, start_response): - environ['REPO_NAME'] = repo_name # used by mercurial.hgweb.hgweb - return hgweb_app(environ, start_response) - - return wrapper_app diff -r c387989f868f -r 3669e58f3002 kallithea/lib/middleware/wrapper.py --- a/kallithea/lib/middleware/wrapper.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.middleware.wrapper -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Wrap app to measure request and response time ... all the way to the response -WSGI iterator has been closed. - -This file was forked by the Kallithea project in July 2014. -Original author and date, and relevant copyright and licensing information is below: -:created_on: May 23, 2013 -:author: marcink -:copyright: (c) 2013 RhodeCode GmbH, and others. -:license: GPLv3, see LICENSE.md for more details. -""" - -import logging -import time - -from kallithea.lib.base import _get_ip_addr, get_path_info - - -log = logging.getLogger(__name__) - - -class Meter: - - def __init__(self, start_response): - self._start_response = start_response - self._start = time.time() - self.status = None - self._size = 0 - - def duration(self): - return time.time() - self._start - - def start_response(self, status, response_headers, exc_info=None): - self.status = status - write = self._start_response(status, response_headers, exc_info) - def metered_write(s): - self.measure(s) - write(s) - return metered_write - - def measure(self, chunk): - self._size += len(chunk) - - def size(self): - return self._size - - -class ResultIter: - - def __init__(self, result, meter, description): - self._result_close = getattr(result, 'close', None) or (lambda: None) - self._next = iter(result).__next__ - self._meter = meter - self._description = description - - def __iter__(self): - return self - - def __next__(self): - chunk = self._next() - self._meter.measure(chunk) - return chunk - - def close(self): - self._result_close() - log.info("%s responded %r after %.3fs with %s bytes", self._description, self._meter.status, self._meter.duration(), self._meter.size()) - - -class RequestWrapper(object): - - def __init__(self, app, config): - self.application = app - self.config = config - - def __call__(self, environ, start_response): - meter = Meter(start_response) - description = "Request from %s for %s" % ( - _get_ip_addr(environ), - get_path_info(environ), - ) - log.info("%s received", description) - try: - result = self.application(environ, meter.start_response) - finally: - log.info("%s responding %r after %.3fs", description, meter.status, meter.duration()) - return ResultIter(result, meter, description) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/page.py --- a/kallithea/lib/page.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/page.py Fri Oct 30 23:44:18 2020 +0100 @@ -21,7 +21,7 @@ import sqlalchemy.orm from webhelpers2.html import literal -from kallithea.config.routing import url +from kallithea.lib.webutils import url log = logging.getLogger(__name__) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/paster_commands/template.ini.mako --- a/kallithea/lib/paster_commands/template.ini.mako Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,617 +0,0 @@ -## -*- coding: utf-8 -*- -<%text>################################################################################### -<%text>################################################################################### -<%text>## Kallithea config file generated with kallithea-cli ${'%-27s' % version }## -<%text>## ## -<%text>## The %(here)s variable will generally be replaced with the parent directory of ## -<%text>## this file. Other use of % must be escaped as %% . ## -<%text>################################################################################### -<%text>################################################################################### - -[DEFAULT] - -<%text>################################################################################ -<%text>## Email settings ## -<%text>## ## -<%text>## Refer to the documentation ("Email settings") for more details. ## -<%text>## ## -<%text>## It is recommended to use a valid sender address that passes access ## -<%text>## validation and spam filtering in mail servers. ## -<%text>################################################################################ - -<%text>## 'From' header for application emails. You can optionally add a name. -<%text>## Default: -#app_email_from = Kallithea -<%text>## Examples: -#app_email_from = Kallithea -#app_email_from = kallithea-noreply@example.com - -<%text>## Subject prefix for application emails. -<%text>## A space between this prefix and the real subject is automatically added. -<%text>## Default: -#email_prefix = -<%text>## Example: -#email_prefix = [Kallithea] - -<%text>## Recipients for error emails and fallback recipients of application mails. -<%text>## Multiple addresses can be specified, comma-separated. -<%text>## Only addresses are allowed, do not add any name part. -<%text>## Default: -#email_to = -<%text>## Examples: -#email_to = admin@example.com -#email_to = admin@example.com,another_admin@example.com -email_to = - -<%text>## 'From' header for error emails. You can optionally add a name. -<%text>## Default: (none) -<%text>## Examples: -#error_email_from = Kallithea Errors -#error_email_from = kallithea_errors@example.com -error_email_from = - -<%text>## SMTP server settings -<%text>## If specifying credentials, make sure to use secure connections. -<%text>## Default: Send unencrypted unauthenticated mails to the specified smtp_server. -<%text>## For "SSL", use smtp_use_ssl = true and smtp_port = 465. -<%text>## For "STARTTLS", use smtp_use_tls = true and smtp_port = 587. -smtp_server = -smtp_username = -smtp_password = -smtp_port = -smtp_use_ssl = false -smtp_use_tls = false - -%if http_server != 'uwsgi': -<%text>## Entry point for 'gearbox serve' -[server:main] -host = ${host} -port = ${port} - -%if http_server == 'gearbox': -<%text>## Gearbox default web server ## -use = egg:gearbox#wsgiref -<%text>## nr of worker threads to spawn -threadpool_workers = 1 -<%text>## max request before thread respawn -threadpool_max_requests = 100 -<%text>## option to use threads of process -use_threadpool = true - -%elif http_server == 'gevent': -<%text>## Gearbox gevent web server ## -use = egg:gearbox#gevent - -%elif http_server == 'waitress': -<%text>## WAITRESS ## -use = egg:waitress#main -<%text>## number of worker threads -threads = 1 -<%text>## MAX BODY SIZE 100GB -max_request_body_size = 107374182400 -<%text>## use poll instead of select, fixes fd limits, may not work on old -<%text>## windows systems. -#asyncore_use_poll = True - -%elif http_server == 'gunicorn': -<%text>## GUNICORN ## -use = egg:gunicorn#main -<%text>## number of process workers. You must set `instance_id = *` when this option -<%text>## is set to more than one worker -workers = 4 -<%text>## process name -proc_name = kallithea -<%text>## type of worker class, one of sync, eventlet, gevent, tornado -<%text>## recommended for bigger setup is using of of other than sync one -worker_class = sync -max_requests = 1000 -<%text>## amount of time a worker can handle request before it gets killed and -<%text>## restarted -timeout = 3600 - -%endif -%else: -<%text>## UWSGI ## -[uwsgi] -<%text>## Note: this section is parsed by the uWSGI .ini parser when run as: -<%text>## uwsgi --venv /srv/kallithea/venv --ini-paste-logged my.ini -<%text>## Note: in uWSGI 2.0.18 or older, pastescript needs to be installed to -<%text>## get correct application logging. In later versions this is not necessary. -<%text>## pip install pastescript - -<%text>## HTTP Basics: -http-socket = ${host}:${port} -buffer-size = 65535 ; Mercurial will use huge GET headers for discovery - -<%text>## Scaling: -master = true ; Use separate master and worker processes -auto-procname = true ; Name worker processes accordingly -lazy = true ; App *must* be loaded in workers - db connections can't be shared -workers = 4 ; On demand scaling up to this many worker processes -cheaper = 1 ; Initial and on demand scaling down to this many worker processes -max-requests = 1000 ; Graceful reload of worker processes to avoid leaks - -<%text>## Tweak defaults: -strict = true ; Fail on unknown config directives -enable-threads = true ; Enable Python threads (not threaded workers) -vacuum = true ; Delete sockets during shutdown -single-interpreter = true -die-on-term = true ; Shutdown when receiving SIGTERM (default is respawn) -need-app = true ; Exit early if no app can be loaded. -reload-on-exception = true ; Don't assume that the application worker can process more requests after a severe error - -%endif -<%text>## middleware for hosting the WSGI application under a URL prefix -#[filter:proxy-prefix] -#use = egg:PasteDeploy#prefix -#prefix = / - -[app:main] -use = egg:kallithea -<%text>## enable proxy prefix middleware -#filter-with = proxy-prefix - -full_stack = true -static_files = true - -<%text>## Internationalization (see setup documentation for details) -<%text>## By default, the languages requested by the browser are used if available, with English as default. -<%text>## Set i18n.enabled=false to disable automatic language choice. -#i18n.enabled = true -<%text>## To Force a language, set i18n.enabled=false and specify the language in i18n.lang. -<%text>## Valid values are the names of subdirectories in kallithea/i18n with a LC_MESSAGES/kallithea.mo -#i18n.lang = en - -cache_dir = %(here)s/data -index_dir = %(here)s/data/index - -<%text>## uncomment and set this path to use archive download cache -archive_cache_dir = %(here)s/tarballcache - -<%text>## change this to unique ID for security -app_instance_uuid = ${uuid()} - -<%text>## cut off limit for large diffs (size in bytes) -cut_off_limit = 256000 - -<%text>## force https in Kallithea, fixes https redirects, assumes it's always https -force_https = false - -<%text>## use Strict-Transport-Security headers -use_htsts = false - -<%text>## number of commits stats will parse on each iteration -commit_parse_limit = 25 - -<%text>## Path to Python executable to be used for git hooks. -<%text>## This value will be written inside the git hook scripts as the text -<%text>## after '#!' (shebang). When empty or not defined, the value of -<%text>## 'sys.executable' at the time of installation of the git hooks is -<%text>## used, which is correct in many cases but for example not when using uwsgi. -<%text>## If you change this setting, you should reinstall the Git hooks via -<%text>## Admin > Settings > Remap and Rescan. -#git_hook_interpreter = /srv/kallithea/venv/bin/python3 -%if git_hook_interpreter: -git_hook_interpreter = ${git_hook_interpreter} -%endif - -<%text>## path to git executable -git_path = git - -<%text>## git rev filter option, --all is the default filter, if you need to -<%text>## hide all refs in changelog switch this to --branches --tags -#git_rev_filter = --branches --tags - -<%text>## RSS feed options -rss_cut_off_limit = 256000 -rss_items_per_page = 10 -rss_include_diff = false - -<%text>## options for showing and identifying changesets -show_sha_length = 12 -show_revision_number = false - -<%text>## Canonical URL to use when creating full URLs in UI and texts. -<%text>## Useful when the site is available under different names or protocols. -<%text>## Defaults to what is provided in the WSGI environment. -#canonical_url = https://kallithea.example.com/repos - -<%text>## gist URL alias, used to create nicer urls for gist. This should be an -<%text>## url that does rewrites to _admin/gists/. -<%text>## example: http://gist.example.com/{gistid}. Empty means use the internal -<%text>## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/ -gist_alias_url = - -<%text>## default encoding used to convert from and to unicode -<%text>## can be also a comma separated list of encoding in case of mixed encodings -default_encoding = utf-8 - -<%text>## Set Mercurial encoding, similar to setting HGENCODING before launching Kallithea -hgencoding = utf-8 - -<%text>## issue tracker for Kallithea (leave blank to disable, absent for default) -#bugtracker = https://bitbucket.org/conservancy/kallithea/issues - -<%text>## issue tracking mapping for commit messages, comments, PR descriptions, ... -<%text>## Refer to the documentation ("Integration with issue trackers") for more details. - -<%text>## regular expression to match issue references -<%text>## This pattern may/should contain parenthesized groups, that can -<%text>## be referred to in issue_server_link or issue_sub using Python backreferences -<%text>## (e.g. \1, \2, ...). You can also create named groups with '(?P)'. -<%text>## To require mandatory whitespace before the issue pattern, use: -<%text>## (?:^|(?<=\s)) before the actual pattern, and for mandatory whitespace -<%text>## behind the issue pattern, use (?:$|(?=\s)) after the actual pattern. - -issue_pat = #(\d+) - -<%text>## server url to the issue -<%text>## This pattern may/should contain backreferences to parenthesized groups in issue_pat. -<%text>## A backreference can be \1, \2, ... or \g if you specified a named group -<%text>## called 'groupname' in issue_pat. -<%text>## The special token {repo} is replaced with the full repository name -<%text>## including repository groups, while {repo_name} is replaced with just -<%text>## the name of the repository. - -issue_server_link = https://issues.example.com/{repo}/issue/\1 - -<%text>## substitution pattern to use as the link text -<%text>## If issue_sub is empty, the text matched by issue_pat is retained verbatim -<%text>## for the link text. Otherwise, the link text is that of issue_sub, with any -<%text>## backreferences to groups in issue_pat replaced. - -issue_sub = - -<%text>## issue_pat, issue_server_link and issue_sub can have suffixes to specify -<%text>## multiple patterns, to other issues server, wiki or others -<%text>## below an example how to create a wiki pattern -<%text>## wiki-some-id -> https://wiki.example.com/some-id - -#issue_pat_wiki = wiki-(\S+) -#issue_server_link_wiki = https://wiki.example.com/\1 -#issue_sub_wiki = WIKI-\1 - -<%text>## alternative return HTTP header for failed authentication. Default HTTP -<%text>## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with -<%text>## handling that. Set this variable to 403 to return HTTPForbidden -auth_ret_code = - -<%text>## allows to change the repository location in settings page -allow_repo_location_change = True - -<%text>## allows to setup custom hooks in settings page -allow_custom_hooks_settings = True - -<%text>## extra extensions for indexing, space separated and without the leading '.'. -#index.extensions = -# gemfile -# lock - -<%text>## extra filenames for indexing, space separated -#index.filenames = -# .dockerignore -# .editorconfig -# INSTALL -# CHANGELOG - -<%text>#################################### -<%text>## SSH CONFIG ## -<%text>#################################### - -<%text>## SSH is disabled by default, until an Administrator decides to enable it. -ssh_enabled = false - -<%text>## File where users' SSH keys will be stored *if* ssh_enabled is true. -#ssh_authorized_keys = /home/kallithea/.ssh/authorized_keys -%if user_home_path: -ssh_authorized_keys = ${user_home_path}/.ssh/authorized_keys -%endif - -<%text>## Path to be used in ssh_authorized_keys file to invoke kallithea-cli with ssh-serve. -#kallithea_cli_path = /srv/kallithea/venv/bin/kallithea-cli -%if kallithea_cli_path: -kallithea_cli_path = ${kallithea_cli_path} -%endif - -<%text>## Locale to be used in the ssh-serve command. -<%text>## This is needed because an SSH client may try to use its own locale -<%text>## settings, which may not be available on the server. -<%text>## See `locale -a` for valid values on this system. -#ssh_locale = C.UTF-8 -%if ssh_locale: -ssh_locale = ${ssh_locale} -%endif - -<%text>#################################### -<%text>## CELERY CONFIG ## -<%text>#################################### - -<%text>## Note: Celery doesn't support Windows. -use_celery = false - -<%text>## Celery config settings from https://docs.celeryproject.org/en/4.4.0/userguide/configuration.html prefixed with 'celery.'. - -<%text>## Example: use the message queue on the local virtual host 'kallitheavhost' as the RabbitMQ user 'kallithea': -celery.broker_url = amqp://kallithea:thepassword@localhost:5672/kallitheavhost - -celery.result.backend = db+sqlite:///celery-results.db - -#celery.amqp.task.result.expires = 18000 - -celery.worker_concurrency = 2 -celery.worker_max_tasks_per_child = 1 - -<%text>## If true, tasks will never be sent to the queue, but executed locally instead. -celery.task_always_eager = false - -<%text>#################################### -<%text>## BEAKER CACHE ## -<%text>#################################### - -beaker.cache.data_dir = %(here)s/data/cache/data -beaker.cache.lock_dir = %(here)s/data/cache/lock - -beaker.cache.regions = long_term,long_term_file - -beaker.cache.long_term.type = memory -beaker.cache.long_term.expire = 36000 -beaker.cache.long_term.key_length = 256 - -beaker.cache.long_term_file.type = file -beaker.cache.long_term_file.expire = 604800 -beaker.cache.long_term_file.key_length = 256 - -<%text>#################################### -<%text>## BEAKER SESSION ## -<%text>#################################### - -<%text>## Name of session cookie. Should be unique for a given host and path, even when running -<%text>## on different ports. Otherwise, cookie sessions will be shared and messed up. -session.key = kallithea -<%text>## Sessions should always only be accessible by the browser, not directly by JavaScript. -session.httponly = true -<%text>## Session lifetime. 2592000 seconds is 30 days. -session.timeout = 2592000 - -<%text>## Server secret used with HMAC to ensure integrity of cookies. -session.secret = ${uuid()} -<%text>## Further, encrypt the data with AES. -#session.encrypt_key = -#session.validate_key = - -<%text>## Type of storage used for the session, current types are -<%text>## dbm, file, memcached, database, and memory. - -<%text>## File system storage of session data. (default) -#session.type = file - -<%text>## Cookie only, store all session data inside the cookie. Requires secure secrets. -#session.type = cookie - -<%text>## Database storage of session data. -#session.type = ext:database -#session.sa.url = postgresql://postgres:qwe@localhost/kallithea -#session.table_name = db_session - -<%text>#################################### -<%text>## ERROR HANDLING ## -<%text>#################################### - -<%text>## Show a nice error page for application HTTP errors and exceptions (default true) -#errorpage.enabled = true - -<%text>## Enable Backlash client-side interactive debugger (default false) -<%text>## WARNING: *THIS MUST BE false IN PRODUCTION ENVIRONMENTS!!!* -<%text>## This debug mode will allow all visitors to execute malicious code. -#debug = false - -<%text>## Enable Backlash server-side error reporting (unless debug mode handles it client-side) (default true) -#trace_errors.enable = true -<%text>## Errors will be reported by mail if trace_errors.error_email is set. - -<%text>## Propagate email settings to ErrorReporter of TurboGears2 -<%text>## You do not normally need to change these lines -get trace_errors.smtp_server = smtp_server -get trace_errors.smtp_port = smtp_port -get trace_errors.from_address = error_email_from -get trace_errors.error_email = email_to -get trace_errors.smtp_username = smtp_username -get trace_errors.smtp_password = smtp_password -get trace_errors.smtp_use_tls = smtp_use_tls - -%if error_aggregation_service == 'sentry': -<%text>################ -<%text>## [sentry] ## -<%text>################ - -<%text>## sentry is a alternative open source error aggregator -<%text>## you must install python packages `sentry` and `raven` to enable - -sentry.dsn = YOUR_DNS -sentry.servers = -sentry.name = -sentry.key = -sentry.public_key = -sentry.secret_key = -sentry.project = -sentry.site = -sentry.include_paths = -sentry.exclude_paths = - -%endif - -<%text>################################## -<%text>## LOGVIEW CONFIG ## -<%text>################################## - -logview.sqlalchemy = #faa -logview.pylons.templating = #bfb -logview.pylons.util = #eee - -<%text>######################### -<%text>## DB CONFIG ## -<%text>######################### - -%if database_engine == 'sqlite': -<%text>## SQLITE [default] -sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60 - -%elif database_engine == 'postgres': -<%text>## POSTGRESQL -sqlalchemy.url = postgresql://user:pass@localhost/kallithea - -%elif database_engine == 'mysql': -<%text>## MySQL -sqlalchemy.url = mysql://user:pass@localhost/kallithea?charset=utf8 -<%text>## Note: the mysql:// prefix should also be used for MariaDB - -%endif -<%text>## see sqlalchemy docs for other backends - -sqlalchemy.pool_recycle = 3600 - -<%text>################################ -<%text>## ALEMBIC CONFIGURATION ## -<%text>################################ - -[alembic] -script_location = kallithea:alembic - -<%text>################################ -<%text>## LOGGING CONFIGURATION ## -<%text>################################ - -[loggers] -keys = root, routes, kallithea, sqlalchemy, tg, gearbox, beaker, templates, whoosh_indexer, werkzeug, backlash - -[handlers] -keys = console, console_color, console_color_sql, null - -[formatters] -keys = generic, color_formatter, color_formatter_sql - -<%text>############# -<%text>## LOGGERS ## -<%text>############# - -[logger_root] -level = NOTSET -handlers = console -<%text>## For coloring based on log level: -#handlers = console_color - -[logger_routes] -level = WARN -handlers = -qualname = routes.middleware -<%text>## "level = DEBUG" logs the route matched and routing variables. - -[logger_beaker] -level = WARN -handlers = -qualname = beaker.container - -[logger_templates] -level = WARN -handlers = -qualname = pylons.templating - -[logger_kallithea] -level = WARN -handlers = -qualname = kallithea - -[logger_tg] -level = WARN -handlers = -qualname = tg - -[logger_gearbox] -level = WARN -handlers = -qualname = gearbox - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine -<%text>## For coloring based on log level and pretty printing of SQL: -#level = INFO -#handlers = console_color_sql -#propagate = 0 - -[logger_whoosh_indexer] -level = WARN -handlers = -qualname = whoosh_indexer - -[logger_werkzeug] -level = WARN -handlers = -qualname = werkzeug - -[logger_backlash] -level = WARN -handlers = -qualname = backlash - -<%text>############## -<%text>## HANDLERS ## -<%text>############## - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -formatter = generic - -[handler_console_color] -<%text>## ANSI color coding based on log level -class = StreamHandler -args = (sys.stderr,) -formatter = color_formatter - -[handler_console_color_sql] -<%text>## ANSI color coding and pretty printing of SQL statements -class = StreamHandler -args = (sys.stderr,) -formatter = color_formatter_sql - -[handler_null] -class = NullHandler -args = () - -<%text>################ -<%text>## FORMATTERS ## -<%text>################ - -[formatter_generic] -format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %Y-%m-%d %H:%M:%S - -[formatter_color_formatter] -class = kallithea.lib.colored_formatter.ColorFormatter -format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %Y-%m-%d %H:%M:%S - -[formatter_color_formatter_sql] -class = kallithea.lib.colored_formatter.ColorFormatterSql -format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %Y-%m-%d %H:%M:%S - -<%text>################# -<%text>## SSH LOGGING ## -<%text>################# - -<%text>## The default loggers use 'handler_console' that uses StreamHandler with -<%text>## destination 'sys.stderr'. In the context of the SSH server process, these log -<%text>## messages would be sent to the client, which is normally not what you want. -<%text>## By default, when running ssh-serve, just use NullHandler and disable logging -<%text>## completely. For other logging options, see: -<%text>## https://docs.python.org/2/library/logging.handlers.html - -[ssh_serve:logger_root] -level = CRITICAL -handlers = null - -<%text>## Note: If logging is configured with other handlers, they might need similar -<%text>## muting for ssh-serve too. diff -r c387989f868f -r 3669e58f3002 kallithea/lib/pygmentsutils.py --- a/kallithea/lib/pygmentsutils.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/pygmentsutils.py Fri Oct 30 23:44:18 2020 +0100 @@ -29,6 +29,8 @@ from pygments import lexers +import kallithea + def get_extension_descriptions(): """ @@ -69,10 +71,9 @@ def get_custom_lexer(extension): """ - returns a custom lexer if it's defined in rcextensions module, or None + returns a custom lexer if it's defined in the extensions module, or None if there's no custom lexer defined """ - import kallithea lexer_name = getattr(kallithea.EXTENSIONS, 'EXTRA_LEXERS', {}).get(extension) if lexer_name is None: return None diff -r c387989f868f -r 3669e58f3002 kallithea/lib/rcmail/__init__.py diff -r c387989f868f -r 3669e58f3002 kallithea/lib/rcmail/exceptions.py --- a/kallithea/lib/rcmail/exceptions.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ - - -class InvalidMessage(RuntimeError): - """ - Raised if message is missing vital headers, such - as recipients or sender address. - """ - - -class BadHeaders(RuntimeError): - """ - Raised if message contains newlines in headers. - """ diff -r c387989f868f -r 3669e58f3002 kallithea/lib/rcmail/message.py --- a/kallithea/lib/rcmail/message.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,152 +0,0 @@ -from kallithea.lib.rcmail.exceptions import BadHeaders, InvalidMessage -from kallithea.lib.rcmail.response import MailResponse - - -class Message(object): - """ - Encapsulates an email message. - - :param subject: email subject header - :param recipients: list of email addresses - :param body: plain text message - :param html: HTML message - :param sender: email sender address - :param cc: CC list - :param bcc: BCC list - :param extra_headers: dict of extra email headers - :param attachments: list of Attachment instances - :param recipients_separator: alternative separator for any of - 'From', 'To', 'Delivered-To', 'Cc', 'Bcc' fields - """ - - def __init__(self, - subject=None, - recipients=None, - body=None, - html=None, - sender=None, - cc=None, - bcc=None, - extra_headers=None, - attachments=None, - recipients_separator="; "): - - self.subject = subject or '' - self.sender = sender - self.body = body - self.html = html - - self.recipients = recipients or [] - self.attachments = attachments or [] - self.cc = cc or [] - self.bcc = bcc or [] - self.extra_headers = extra_headers or {} - - self.recipients_separator = recipients_separator - - @property - def send_to(self): - return set(self.recipients) | set(self.bcc or ()) | set(self.cc or ()) - - def to_message(self): - """ - Returns raw email.Message instance.Validates message first. - """ - - self.validate() - - return self.get_response().to_message() - - def get_response(self): - """ - Creates a Lamson MailResponse instance - """ - - response = MailResponse(Subject=self.subject, - To=self.recipients, - From=self.sender, - Body=self.body, - Html=self.html, - separator=self.recipients_separator) - - if self.cc: - response.base['Cc'] = self.cc - - for attachment in self.attachments: - - response.attach(attachment.filename, - attachment.content_type, - attachment.data, - attachment.disposition) - - response.update(self.extra_headers) - - return response - - def is_bad_headers(self): - """ - Checks for bad headers i.e. newlines in subject, sender or recipients. - """ - - headers = [self.subject, self.sender] - headers += list(self.send_to) - headers += self.extra_headers.values() - - for val in headers: - for c in '\r\n': - if c in val: - return True - return False - - def validate(self): - """ - Checks if message is valid and raises appropriate exception. - """ - - if not self.recipients: - raise InvalidMessage("No recipients have been added") - - if not self.body and not self.html: - raise InvalidMessage("No body has been set") - - if not self.sender: - raise InvalidMessage("No sender address has been set") - - if self.is_bad_headers(): - raise BadHeaders - - def add_recipient(self, recipient): - """ - Adds another recipient to the message. - - :param recipient: email address of recipient. - """ - - self.recipients.append(recipient) - - def add_cc(self, recipient): - """ - Adds an email address to the CC list. - - :param recipient: email address of recipient. - """ - - self.cc.append(recipient) - - def add_bcc(self, recipient): - """ - Adds an email address to the BCC list. - - :param recipient: email address of recipient. - """ - - self.bcc.append(recipient) - - def attach(self, attachment): - """ - Adds an attachment to the message. - - :param attachment: an **Attachment** instance. - """ - - self.attachments.append(attachment) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/rcmail/response.py --- a/kallithea/lib/rcmail/response.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,456 +0,0 @@ -# The code in this module is entirely lifted from the Lamson project -# (http://lamsonproject.org/). Its copyright is: - -# Copyright (c) 2008, Zed A. Shaw -# All rights reserved. - -# It is provided under this license: - -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: - -# * Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. - -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. - -# * Neither the name of the Zed A. Shaw nor the names of its contributors may -# be used to endorse or promote products derived from this software without -# specific prior written permission. - -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, -# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -import mimetypes -import os -import string -from email import encoders -from email.charset import Charset -from email.mime.base import MIMEBase -from email.utils import parseaddr - - -ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc'] -DEFAULT_ENCODING = "utf-8" - -def VALUE_IS_EMAIL_ADDRESS(v): - return '@' in v - - -def normalize_header(header): - return string.capwords(header.lower(), '-') - - -class EncodingError(Exception): - """Thrown when there is an encoding error.""" - pass - - -class MailBase(object): - """MailBase is used as the basis of lamson.mail and contains the basics of - encoding an email. You actually can do all your email processing with this - class, but it's more raw. - """ - def __init__(self, items=()): - self.headers = dict(items) - self.parts = [] - self.body = None - self.content_encoding = {'Content-Type': (None, {}), - 'Content-Disposition': (None, {}), - 'Content-Transfer-Encoding': (None, {})} - - def __getitem__(self, key): - return self.headers.get(normalize_header(key), None) - - def __len__(self): - return len(self.headers) - - def __iter__(self): - return iter(self.headers) - - def __contains__(self, key): - return normalize_header(key) in self.headers - - def __setitem__(self, key, value): - self.headers[normalize_header(key)] = value - - def __delitem__(self, key): - del self.headers[normalize_header(key)] - - def __bool__(self): - return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0 - - def keys(self): - """Returns the sorted keys.""" - return sorted(self.headers.keys()) - - def attach_file(self, filename, data, ctype, disposition): - """ - A file attachment is a raw attachment with a disposition that - indicates the file name. - """ - assert filename, "You can't attach a file without a filename." - ctype = ctype.lower() - - part = MailBase() - part.body = data - part.content_encoding['Content-Type'] = (ctype, {'name': filename}) - part.content_encoding['Content-Disposition'] = (disposition, - {'filename': filename}) - self.parts.append(part) - - def attach_text(self, data, ctype): - """ - This attaches a simpler text encoded part, which doesn't have a - filename. - """ - ctype = ctype.lower() - - part = MailBase() - part.body = data - part.content_encoding['Content-Type'] = (ctype, {}) - self.parts.append(part) - - def walk(self): - for p in self.parts: - yield p - for x in p.walk(): - yield x - - -class MailResponse(object): - """ - You are given MailResponse objects from the lamson.view methods, and - whenever you want to generate an email to send to someone. It has the - same basic functionality as MailRequest, but it is designed to be written - to, rather than read from (although you can do both). - - You can easily set a Body or Html during creation or after by passing it - as __init__ parameters, or by setting those attributes. - - You can initially set the From, To, and Subject, but they are headers so - use the dict notation to change them: msg['From'] = 'joe@example.com'. - - The message is not fully crafted until right when you convert it with - MailResponse.to_message. This lets you change it and work with it, then - send it out when it's ready. - """ - def __init__(self, To=None, From=None, Subject=None, Body=None, Html=None, - separator="; "): - self.Body = Body - self.Html = Html - self.base = MailBase([('To', To), ('From', From), ('Subject', Subject)]) - self.multipart = self.Body and self.Html - self.attachments = [] - self.separator = separator - - def __contains__(self, key): - return self.base.__contains__(key) - - def __getitem__(self, key): - return self.base.__getitem__(key) - - def __setitem__(self, key, val): - return self.base.__setitem__(key, val) - - def __delitem__(self, name): - del self.base[name] - - def attach(self, filename=None, content_type=None, data=None, - disposition=None): - """ - - Simplifies attaching files from disk or data as files. To attach - simple text simple give data and a content_type. To attach a file, - give the data/content_type/filename/disposition combination. - - For convenience, if you don't give data and only a filename, then it - will read that file's contents when you call to_message() later. If - you give data and filename then it will assume you've filled data - with what the file's contents are and filename is just the name to - use. - """ - - assert filename or data, ("You must give a filename or some data to " - "attach.") - assert data or os.path.exists(filename), ("File doesn't exist, and no " - "data given.") - - self.multipart = True - - if filename and not content_type: - content_type, encoding = mimetypes.guess_type(filename) - - assert content_type, ("No content type given, and couldn't guess " - "from the filename: %r" % filename) - - self.attachments.append({'filename': filename, - 'content_type': content_type, - 'data': data, - 'disposition': disposition}) - - def attach_part(self, part): - """ - Attaches a raw MailBase part from a MailRequest (or anywhere) - so that you can copy it over. - """ - self.multipart = True - - self.attachments.append({'filename': None, - 'content_type': None, - 'data': None, - 'disposition': None, - 'part': part, - }) - - def attach_all_parts(self, mail_request): - """ - Used for copying the attachment parts of a mail.MailRequest - object for mailing lists that need to maintain attachments. - """ - for part in mail_request.all_parts(): - self.attach_part(part) - - self.base.content_encoding = mail_request.base.content_encoding.copy() - - def clear(self): - """ - Clears out the attachments so you can redo them. Use this to keep the - headers for a series of different messages with different attachments. - """ - del self.attachments[:] - del self.base.parts[:] - self.multipart = False - - def update(self, message): - """ - Used to easily set a bunch of heading from another dict - like object. - """ - for k in message.keys(): - self.base[k] = message[k] - - def __str__(self): - """ - Converts to a string. - """ - return self.to_message().as_string() - - def _encode_attachment(self, filename=None, content_type=None, data=None, - disposition=None, part=None): - """ - Used internally to take the attachments mentioned in self.attachments - and do the actual encoding in a lazy way when you call to_message. - """ - if part: - self.base.parts.append(part) - elif filename: - if not data: - data = open(filename).read() - - self.base.attach_file(filename, data, content_type, - disposition or 'attachment') - else: - self.base.attach_text(data, content_type) - - ctype = self.base.content_encoding['Content-Type'][0] - - if ctype and not ctype.startswith('multipart'): - self.base.content_encoding['Content-Type'] = ('multipart/mixed', {}) - - def to_message(self): - """ - Figures out all the required steps to finally craft the - message you need and return it. The resulting message - is also available as a self.base attribute. - - What is returned is a Python email API message you can - use with those APIs. The self.base attribute is the raw - lamson.encoding.MailBase. - """ - del self.base.parts[:] - - if self.Body and self.Html: - self.multipart = True - self.base.content_encoding['Content-Type'] = ( - 'multipart/alternative', {}) - - if self.multipart: - self.base.body = None - if self.Body: - self.base.attach_text(self.Body, 'text/plain') - - if self.Html: - self.base.attach_text(self.Html, 'text/html') - - for args in self.attachments: - self._encode_attachment(**args) - - elif self.Body: - self.base.body = self.Body - self.base.content_encoding['Content-Type'] = ('text/plain', {}) - - elif self.Html: - self.base.body = self.Html - self.base.content_encoding['Content-Type'] = ('text/html', {}) - - return to_message(self.base, separator=self.separator) - - def all_parts(self): - """ - Returns all the encoded parts. Only useful for debugging - or inspecting after calling to_message(). - """ - return self.base.parts - - def keys(self): - return self.base.keys() - - -def to_message(mail, separator="; "): - """ - Given a MailBase message, this will construct a MIMEPart - that is canonicalized for use with the Python email API. - """ - ctype, params = mail.content_encoding['Content-Type'] - - if not ctype: - if mail.parts: - ctype = 'multipart/mixed' - else: - ctype = 'text/plain' - else: - if mail.parts: - assert ctype.startswith(("multipart", "message")), \ - "Content type should be multipart or message, not %r" % ctype - - # adjust the content type according to what it should be now - mail.content_encoding['Content-Type'] = (ctype, params) - - try: - out = MIMEPart(ctype, **params) - except TypeError as e: # pragma: no cover - raise EncodingError("Content-Type malformed, not allowed: %r; " - "%r (Python ERROR: %s)" % - (ctype, params, e.args[0])) - - for k in mail.keys(): - if k in ADDRESS_HEADERS_WHITELIST: - out[k] = header_to_mime_encoding( - mail[k], - not_email=False, - separator=separator - ) - else: - out[k] = header_to_mime_encoding( - mail[k], - not_email=True - ) - - out.extract_payload(mail) - - # go through the children - for part in mail.parts: - out.attach(to_message(part)) - - return out - - -class MIMEPart(MIMEBase): - """ - A reimplementation of nearly everything in email.mime to be more useful - for actually attaching things. Rather than one class for every type of - thing you'd encode, there's just this one, and it figures out how to - encode what you ask it. - """ - def __init__(self, type, **params): - self.maintype, self.subtype = type.split('/') - MIMEBase.__init__(self, self.maintype, self.subtype, **params) - - def add_text(self, content): - # this is text, so encode it in canonical form - try: - encoded = content.encode('ascii') - charset = 'ascii' - except UnicodeError: - encoded = content.encode('utf-8') - charset = 'utf-8' - - self.set_payload(encoded, charset=charset) - - def extract_payload(self, mail): - if mail.body is None: - return # only None, '' is still ok - - ctype, _ctype_params = mail.content_encoding['Content-Type'] - cdisp, cdisp_params = mail.content_encoding['Content-Disposition'] - - assert ctype, ("Extract payload requires that mail.content_encoding " - "have a valid Content-Type.") - - if ctype.startswith("text/"): - self.add_text(mail.body) - else: - if cdisp: - # replicate the content-disposition settings - self.add_header('Content-Disposition', cdisp, **cdisp_params) - - self.set_payload(mail.body) - encoders.encode_base64(self) - - def __repr__(self): - return "" % ( - self.subtype, - self.maintype, - self['Content-Type'], - self['Content-Disposition'], - self.is_multipart()) - - -def header_to_mime_encoding(value, not_email=False, separator=", "): - if not value: - return "" - - encoder = Charset(DEFAULT_ENCODING) - if isinstance(value, list): - return separator.join(properly_encode_header( - v, encoder, not_email) for v in value) - else: - return properly_encode_header(value, encoder, not_email) - - -def properly_encode_header(value, encoder, not_email): - """ - The only thing special (weird) about this function is that it tries - to do a fast check to see if the header value has an email address in - it. Since random headers could have an email address, and email addresses - have weird special formatting rules, we have to check for it. - - Normally this works fine, but in Librelist, we need to "obfuscate" email - addresses by changing the '@' to '-AT-'. This is where - VALUE_IS_EMAIL_ADDRESS exists. It's a simple lambda returning True/False - to check if a header value has an email address. If you need to make this - check different, then change this. - """ - try: - value.encode("ascii") - return value - except UnicodeError: - if not not_email and VALUE_IS_EMAIL_ADDRESS(value): - # this could have an email address, make sure we don't screw it up - name, address = parseaddr(value) - return '"%s" <%s>' % (encoder.header_encode(name), address) - - return encoder.header_encode(value) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/rcmail/smtp_mailer.py --- a/kallithea/lib/rcmail/smtp_mailer.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,106 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -""" -kallithea.lib.rcmail.smtp_mailer -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Simple smtp mailer used in Kallithea - -This file was forked by the Kallithea project in July 2014. -Original author and date, and relevant copyright and licensing information is below: -:created_on: Sep 13, 2010 -:author: marcink -:copyright: (c) 2013 RhodeCode GmbH, and others. -:license: GPLv3, see LICENSE.md for more details. -""" - -import logging -import smtplib -import time -from email.utils import formatdate -from ssl import SSLError - -from kallithea.lib.rcmail.message import Message -from kallithea.lib.rcmail.utils import DNS_NAME - - -class SmtpMailer(object): - """SMTP mailer class - - mailer = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth - mail_port, ssl, tls) - mailer.send(recipients, subject, body, attachment_files) - - :param recipients might be a list of string or single string - :param attachment_files is a dict of {filename:location} - it tries to guess the mimetype and attach the file - - """ - - def __init__(self, mail_from, user, passwd, mail_server, smtp_auth=None, - mail_port=None, ssl=False, tls=False, debug=False): - - self.mail_from = mail_from - self.mail_server = mail_server - self.mail_port = mail_port - self.user = user - self.passwd = passwd - self.ssl = ssl - self.tls = tls - self.debug = debug - self.auth = smtp_auth - - def send(self, recipients=None, subject='', body='', html='', - attachment_files=None, headers=None): - recipients = recipients or [] - if isinstance(recipients, str): - recipients = [recipients] - if headers is None: - headers = {} - headers.setdefault('Date', formatdate(time.time())) - msg = Message(subject, recipients, body, html, self.mail_from, - recipients_separator=", ", extra_headers=headers) - raw_msg = msg.to_message() - - if self.ssl: - smtp_serv = smtplib.SMTP_SSL(self.mail_server, self.mail_port, - local_hostname=DNS_NAME.get_fqdn()) - else: - smtp_serv = smtplib.SMTP(self.mail_server, self.mail_port, - local_hostname=DNS_NAME.get_fqdn()) - - if self.tls: - smtp_serv.ehlo() - smtp_serv.starttls() - - if self.debug: - smtp_serv.set_debuglevel(1) - - smtp_serv.ehlo() - if self.auth: - smtp_serv.esmtp_features["auth"] = self.auth - - # if server requires authorization you must provide login and password - # but only if we have them - if self.user and self.passwd: - smtp_serv.login(self.user, self.passwd) - - smtp_serv.sendmail(msg.sender, msg.send_to, raw_msg.as_string()) - logging.info('MAIL SENT TO: %s' % recipients) - - try: - smtp_serv.quit() - except SSLError: - # SSL error might sometimes be raised in tls connections on closing - smtp_serv.close() diff -r c387989f868f -r 3669e58f3002 kallithea/lib/rcmail/utils.py --- a/kallithea/lib/rcmail/utils.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -""" -Email message and email sending related helper functions. -""" - -import socket - - -# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of -# seconds, which slows down the restart of the server. -class CachedDnsName(object): - def __str__(self): - return self.get_fqdn() - - def get_fqdn(self): - if not hasattr(self, '_fqdn'): - self._fqdn = socket.getfqdn() - return self._fqdn - - -DNS_NAME = CachedDnsName() diff -r c387989f868f -r 3669e58f3002 kallithea/lib/ssh.py --- a/kallithea/lib/ssh.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/ssh.py Fri Oct 30 23:44:18 2020 +0100 @@ -22,12 +22,14 @@ # along with this program. If not, see . import base64 +import binascii import logging import re +import struct from tg.i18n import ugettext as _ -from kallithea.lib.utils2 import ascii_bytes, ascii_str +from kallithea.lib.utils2 import ascii_str log = logging.getLogger(__name__) @@ -36,6 +38,14 @@ class SshKeyParseError(Exception): """Exception raised by parse_pub_key""" +algorithm_types = { # mapping name to number of data strings in key + # https://tools.ietf.org/html/rfc4253#section-6.6 + 'ssh-rsa': 2, # e, n + 'ssh-dss': 4, # p, q, g, y + # https://tools.ietf.org/html/rfc8709 + 'ssh-ed25519': 1, + 'ssh-ed448': 1, +} def parse_pub_key(ssh_key): r"""Parse SSH public key string, raise SshKeyParseError or return decoded keytype, data and comment @@ -48,26 +58,38 @@ >>> parse_pub_key('''AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''') Traceback (most recent call last): ... - kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp=' + kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp=' >>> parse_pub_key('''abc AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''') Traceback (most recent call last): ... - kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)' + kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - it must start with key type 'ssh-rsa', 'ssh-dss', 'ssh-ed448', or 'ssh-ed25519' >>> parse_pub_key('''ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''') Traceback (most recent call last): ... - kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - failed to decode base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ' + kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ' seems truncated (it can't be decoded) >>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==''') Traceback (most recent call last): ... - kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - base64 part is not 'ssh-rsa' as claimed but 'csh-rsa' + kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - base64 part 'AAAAB2NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==' seems truncated (it contains a partial string length) + >>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAANVGhpcyBpcyE=''') + Traceback (most recent call last): + ... + kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - base64 part 'AAAAB2NzaC1yc2EAAAANVGhpcyBpcyE=' seems truncated (it is too short for declared string length 13) + >>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQ==''') + Traceback (most recent call last): + ... + kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - base64 part 'AAAAB2NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQ==' seems truncated (it contains too few strings for a ssh-rsa key) + >>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQAAAANieWU=''') + Traceback (most recent call last): + ... + kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - it is a ssh-rsa key but the base64 part contains 'csh-rsa' >>> parse_pub_key('''ssh-rsa AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ''') Traceback (most recent call last): ... - kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ" - >>> parse_pub_key(''' ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ== and a comment + kallithea.lib.ssh.SshKeyParseError: Invalid SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ" + >>> parse_pub_key(''' ssh-rsa AAAAB3NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQAAAANieWU= and a comment ... ''') - ('ssh-rsa', b'\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x0bThis is fake!', 'and a comment\n') + ('ssh-rsa', b'\x00\x00\x00\x07ssh-rsa\x00\x00\x00\rThis is fake!\x00\x00\x00\x03bye', 'and a comment\n') >>> parse_pub_key('''ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIP1NA2kBQIKe74afUXmIWD9ByDYQJqUwW44Y4gJOBRuo''') ('ssh-ed25519', b'\x00\x00\x00\x0bssh-ed25519\x00\x00\x00 \xfdM\x03i\x01@\x82\x9e\xef\x86\x9fQy\x88X?A\xc86\x10&\xa50[\x8e\x18\xe2\x02N\x05\x1b\xa8', '') """ @@ -76,22 +98,40 @@ parts = ssh_key.split(None, 2) if len(parts) < 2: - raise SshKeyParseError(_("Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='")) + raise SshKeyParseError(_("Invalid SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='")) keytype, keyvalue, comment = (parts + [''])[:3] - if keytype not in ('ssh-rsa', 'ssh-dss', 'ssh-ed25519'): - raise SshKeyParseError(_("Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'")) + keytype_data_size = algorithm_types.get(keytype) + if keytype_data_size is None: + raise SshKeyParseError(_("Invalid SSH key - it must start with key type 'ssh-rsa', 'ssh-dss', 'ssh-ed448', or 'ssh-ed25519'")) - if re.search(r'[^a-zA-Z0-9+/=]', keyvalue): - raise SshKeyParseError(_("Incorrect SSH key - unexpected characters in base64 part %r") % keyvalue) + if re.search(r'[^a-zA-Z0-9+/=]', keyvalue): # make sure b64decode doesn't stop at the first invalid character and skip the rest + raise SshKeyParseError(_("Invalid SSH key - unexpected characters in base64 part %r") % keyvalue) try: key_bytes = base64.b64decode(keyvalue) - except base64.binascii.Error: - raise SshKeyParseError(_("Incorrect SSH key - failed to decode base64 part %r") % keyvalue) + except binascii.Error: # Must be caused by truncation - either "Invalid padding" or "Invalid base64-encoded string: number of data characters (x) cannot be 1 more than a multiple of 4" + raise SshKeyParseError(_("Invalid SSH key - base64 part %r seems truncated (it can't be decoded)") % keyvalue) - if not key_bytes.startswith(b'\x00\x00\x00%c%s\x00' % (len(keytype), ascii_bytes(keytype))): - raise SshKeyParseError(_("Incorrect SSH key - base64 part is not %r as claimed but %r") % (keytype, ascii_str(key_bytes[4:].split(b'\0', 1)[0]))) + # Check key internals to make sure the key wasn't truncated in a way that base64 can decode: + # Parse and verify key according to https://tools.ietf.org/html/rfc4253#section-6.6 + strings = [] + offset = 0 + while offset < len(key_bytes): + try: + string_length, = struct.unpack_from('!I', key_bytes, offset) + except struct.error: # unpack_from requires a buffer of at least 283 bytes for unpacking 4 bytes at offset 279 (actual buffer size is 280) + raise SshKeyParseError(_("Invalid SSH key - base64 part %r seems truncated (it contains a partial string length)") % keyvalue) + offset += 4 + string = key_bytes[offset:offset + string_length] + if len(string) != string_length: + raise SshKeyParseError(_("Invalid SSH key - base64 part %r seems truncated (it is too short for declared string length %s)") % (keyvalue, string_length)) + strings.append(string) + offset += string_length + if len(strings) != keytype_data_size + 1: + raise SshKeyParseError(_("Invalid SSH key - base64 part %r seems truncated (it contains too few strings for a %s key)") % (keyvalue, keytype)) + if ascii_str(strings[0]) != keytype: + raise SshKeyParseError(_("Invalid SSH key - it is a %s key but the base64 part contains %r") % (keytype, ascii_str(strings[0]))) return keytype, key_bytes, comment @@ -112,15 +152,16 @@ def authorized_keys_line(kallithea_cli_path, config_file, key): - """ + r""" Return a line as it would appear in .authorized_keys - >>> from kallithea.model.db import UserSshKeys, User - >>> user = User(user_id=7, username='uu') - >>> key = UserSshKeys(user_ssh_key_id=17, user=user, description='test key') - >>> key.public_key='''ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ== and a comment''' + >>> getfixture('doctest_mock_ugettext') + >>> from kallithea.model import db + >>> user = db.User(user_id=7, username='uu') + >>> key = db.UserSshKeys(user_ssh_key_id=17, user=user, description='test key') + >>> key.public_key='''ssh-rsa AAAAB3NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQAAAANieWU= and a comment''' >>> authorized_keys_line('/srv/kallithea/venv/bin/kallithea-cli', '/srv/kallithea/my.ini', key) - 'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,command="/srv/kallithea/venv/bin/kallithea-cli ssh-serve -c /srv/kallithea/my.ini 7 17" ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==\\n' + 'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,command="/srv/kallithea/venv/bin/kallithea-cli ssh-serve -c /srv/kallithea/my.ini 7 17" ssh-rsa AAAAB3NzaC1yc2EAAAANVGhpcyBpcyBmYWtlIQAAAANieWU=\n' """ try: keytype, key_bytes, comment = parse_pub_key(key.public_key) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/timerproxy.py --- a/kallithea/lib/timerproxy.py Wed Oct 28 14:58:18 2020 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import logging -import time - -from sqlalchemy.interfaces import ConnectionProxy - - -log = logging.getLogger('timerproxy') - -BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38) - - -def color_sql(sql): - COLOR_SEQ = "\033[1;%dm" - COLOR_SQL = YELLOW - normal = '\x1b[0m' - return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal]) - - -class TimerProxy(ConnectionProxy): - - def __init__(self): - super(TimerProxy, self).__init__() - - def cursor_execute(self, execute, cursor, statement, parameters, - context, executemany): - - now = time.time() - try: - log.info(color_sql(">>>>> STARTING QUERY >>>>>")) - return execute(cursor, statement, parameters, context) - finally: - total = time.time() - now - log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total)) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/utils.py --- a/kallithea/lib/utils.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/utils.py Fri Oct 30 23:44:18 2020 +0100 @@ -38,9 +38,9 @@ import mercurial.error import mercurial.ui -import kallithea.config.conf +import kallithea.lib.conf from kallithea.lib.exceptions import InvalidCloneUriException -from kallithea.lib.utils2 import ascii_bytes, aslist, get_current_authuser, safe_bytes, safe_str +from kallithea.lib.utils2 import ascii_bytes, aslist, extract_mentioned_usernames, get_current_authuser, safe_bytes, safe_str from kallithea.lib.vcs.backends.git.repository import GitRepository from kallithea.lib.vcs.backends.hg.repository import MercurialRepository from kallithea.lib.vcs.conf import settings @@ -48,7 +48,6 @@ from kallithea.lib.vcs.utils.fakemod import create_module from kallithea.lib.vcs.utils.helpers import get_scm from kallithea.model import db, meta -from kallithea.model.db import RepoGroup, Repository, Setting, Ui, User, UserGroup, UserLog log = logging.getLogger(__name__) @@ -75,7 +74,7 @@ def get_user_group_slug(request): _group = request.environ['pylons.routes_dict'].get('id') - _group = UserGroup.get(_group) + _group = db.UserGroup.get(_group) if _group: return _group.users_group_name return None @@ -105,7 +104,7 @@ rest = '/' + rest_ repo_id = _get_permanent_id(first) if repo_id is not None: - repo = Repository.get(repo_id) + repo = db.Repository.get(repo_id) if repo is not None: return repo.repo_name + rest return path @@ -131,23 +130,23 @@ ipaddr = getattr(get_current_authuser(), 'ip_addr', '') if getattr(user, 'user_id', None): - user_obj = User.get(user.user_id) + user_obj = db.User.get(user.user_id) elif isinstance(user, str): - user_obj = User.get_by_username(user) + user_obj = db.User.get_by_username(user) else: raise Exception('You have to provide a user object or a username') if getattr(repo, 'repo_id', None): - repo_obj = Repository.get(repo.repo_id) + repo_obj = db.Repository.get(repo.repo_id) repo_name = repo_obj.repo_name elif isinstance(repo, str): repo_name = repo.lstrip('/') - repo_obj = Repository.get_by_repo_name(repo_name) + repo_obj = db.Repository.get_by_repo_name(repo_name) else: repo_obj = None repo_name = '' - user_log = UserLog() + user_log = db.UserLog() user_log.user_id = user_obj.user_id user_log.username = user_obj.username user_log.action = action @@ -237,12 +236,6 @@ raise InvalidCloneUriException('URI %s URLError: %s' % (url, e)) except mercurial.error.RepoError as e: raise InvalidCloneUriException('Mercurial %s: %s' % (type(e).__name__, safe_str(bytes(e)))) - elif url.startswith('svn+http'): - try: - from hgsubversion.svnrepo import svnremoterepo - except ImportError: - raise InvalidCloneUriException('URI type %s not supported - hgsubversion is not available' % (url,)) - svnremoterepo(ui, url).svn.uuid elif url.startswith('git+http'): raise InvalidCloneUriException('URI type %s not implemented' % (url,)) else: @@ -256,8 +249,6 @@ GitRepository._check_url(url) except urllib.error.URLError as e: raise InvalidCloneUriException('URI %s URLError: %s' % (url, e)) - elif url.startswith('svn+http'): - raise InvalidCloneUriException('URI type %s not implemented' % (url,)) elif url.startswith('hg+http'): raise InvalidCloneUriException('URI type %s not implemented' % (url,)) else: @@ -330,7 +321,7 @@ baseui._tcfg = mercurial.config.config() sa = meta.Session() - for ui_ in sa.query(Ui).order_by(Ui.ui_section, Ui.ui_key): + for ui_ in sa.query(db.Ui).order_by(db.Ui.ui_section, db.Ui.ui_key): if ui_.ui_active: log.debug('config from db: [%s] %s=%r', ui_.ui_section, ui_.ui_key, ui_.ui_value) @@ -361,10 +352,10 @@ :param config: """ - hgsettings = Setting.get_app_settings() - for k, v in hgsettings.items(): + settings = db.Setting.get_app_settings() + for k, v in settings.items(): config[k] = v - config['base_path'] = Ui.get_repos_location() + config['base_path'] = db.Ui.get_repos_location() def set_vcs_config(config): @@ -391,10 +382,10 @@ :param config: kallithea.CONFIG """ log.debug('adding extra into INDEX_EXTENSIONS') - kallithea.config.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', ''))) + kallithea.lib.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', ''))) log.debug('adding extra into INDEX_FILENAMES') - kallithea.config.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', ''))) + kallithea.lib.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', ''))) def map_groups(path): @@ -407,17 +398,17 @@ """ from kallithea.model.repo_group import RepoGroupModel sa = meta.Session() - groups = path.split(db.URL_SEP) + groups = path.split(kallithea.URL_SEP) parent = None group = None # last element is repo in nested groups structure groups = groups[:-1] rgm = RepoGroupModel() - owner = User.get_first_admin() + owner = db.User.get_first_admin() for lvl, group_name in enumerate(groups): group_name = '/'.join(groups[:lvl] + [group_name]) - group = RepoGroup.get_by_group_name(group_name) + group = db.RepoGroup.get_by_group_name(group_name) desc = '%s group' % group_name # skip folders that are now removed repos @@ -427,7 +418,7 @@ if group is None: log.debug('creating group level: %s group_name: %s', lvl, group_name) - group = RepoGroup(group_name, parent) + group = db.RepoGroup(group_name, parent) group.group_description = desc group.owner = owner sa.add(group) @@ -457,16 +448,16 @@ sa = meta.Session() repo_model = RepoModel() if user is None: - user = User.get_first_admin() + user = db.User.get_first_admin() added = [] # creation defaults - defs = Setting.get_default_repo_settings(strip_prefix=True) + defs = db.Setting.get_default_repo_settings(strip_prefix=True) enable_statistics = defs.get('repo_enable_statistics') enable_downloads = defs.get('repo_enable_downloads') private = defs.get('repo_private') - for name, repo in initial_repo_dict.items(): + for name, repo in sorted(initial_repo_dict.items()): group = map_groups(name) db_repo = repo_model.get_by_repo_name(name) # found repo that is on filesystem not in Kallithea database @@ -486,7 +477,7 @@ enable_downloads=enable_downloads, enable_statistics=enable_statistics, private=private, - state=Repository.STATE_CREATED + state=db.Repository.STATE_CREATED ) sa.commit() # we added that repo just now, and make sure it has githook @@ -500,11 +491,11 @@ new_repo.update_changeset_cache() elif install_git_hooks: if db_repo.repo_type == 'git': - ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks) + ScmModel().install_git_hooks(db_repo.scm_instance, force=overwrite_git_hooks) removed = [] # remove from database those repositories that are not in the filesystem - for repo in sa.query(Repository).all(): + for repo in sa.query(db.Repository).all(): if repo.repo_name not in initial_repo_dict: if remove_obsolete: log.debug("Removing non-existing repository found in db `%s`", @@ -520,32 +511,30 @@ return added, removed -def load_rcextensions(root_path): - path = os.path.join(root_path, 'rcextensions', '__init__.py') - if os.path.isfile(path): - rcext = create_module('rc', path) - EXT = kallithea.EXTENSIONS = rcext - log.debug('Found rcextensions now loading %s...', rcext) - - # Additional mappings that are not present in the pygments lexers - kallithea.config.conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) - - # OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) +def load_extensions(root_path): + try: + ext = create_module('extensions', os.path.join(root_path, 'extensions.py')) + except FileNotFoundError: + try: + ext = create_module('rc', os.path.join(root_path, 'rcextensions', '__init__.py')) + log.warning('The name "rcextensions" is deprecated. Please use a file `extensions.py` instead of a directory `rcextensions`.') + except FileNotFoundError: + return - if getattr(EXT, 'INDEX_EXTENSIONS', []): - log.debug('settings custom INDEX_EXTENSIONS') - kallithea.config.conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) + log.info('Loaded Kallithea extensions from %s', ext) + kallithea.EXTENSIONS = ext + + # Additional mappings that are not present in the pygments lexers + kallithea.lib.conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(ext, 'EXTRA_MAPPINGS', {})) - # ADDITIONAL MAPPINGS - log.debug('adding extra into INDEX_EXTENSIONS') - kallithea.config.conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) + # Override any INDEX_EXTENSIONS + if getattr(ext, 'INDEX_EXTENSIONS', []): + log.debug('settings custom INDEX_EXTENSIONS') + kallithea.lib.conf.INDEX_EXTENSIONS = getattr(ext, 'INDEX_EXTENSIONS', []) - # auto check if the module is not missing any data, set to default if is - # this will help autoupdate new feature of rcext module - #from kallithea.config import rcextensions - #for k in dir(rcextensions): - # if not k.startswith('_') and not hasattr(EXT, k): - # setattr(EXT, k, getattr(rcextensions, k)) + # Additional INDEX_EXTENSIONS + log.debug('adding extra into INDEX_EXTENSIONS') + kallithea.lib.conf.INDEX_EXTENSIONS.extend(getattr(ext, 'EXTRA_INDEX_EXTENSIONS', [])) #============================================================================== @@ -601,3 +590,13 @@ settings.GIT_EXECUTABLE_PATH, output) return ver + + +def extract_mentioned_users(text): + """ Returns set of actual database Users @mentioned in given text. """ + result = set() + for name in extract_mentioned_usernames(text): + user = db.User.get_by_username(name, case_insensitive=True) + if user is not None and not user.is_default_user: + result.add(user) + return result diff -r c387989f868f -r 3669e58f3002 kallithea/lib/utils2.py --- a/kallithea/lib/utils2.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/utils2.py Fri Oct 30 23:44:18 2020 +0100 @@ -36,10 +36,17 @@ import urllib.parse import urlobject +from dateutil import relativedelta +from sqlalchemy.engine import url as sa_url +from sqlalchemy.exc import ArgumentError from tg.i18n import ugettext as _ from tg.i18n import ungettext +from tg.support.converters import asbool, aslist from webhelpers2.text import collapse, remove_formatting, strip_tags +import kallithea +from kallithea.lib.vcs.backends.base import BaseRepository, EmptyChangeset +from kallithea.lib.vcs.exceptions import RepositoryError from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export from kallithea.lib.vcs.utils.lazy import LazyProperty @@ -51,6 +58,8 @@ # mute pyflakes "imported but unused" +assert asbool +assert aslist assert ascii_bytes assert ascii_str assert safe_bytes @@ -58,44 +67,6 @@ assert LazyProperty -def str2bool(_str): - """ - returns True/False value from given string, it tries to translate the - string into boolean - - :param _str: string value to translate into boolean - :rtype: boolean - :returns: boolean from given string - """ - if _str is None: - return False - if _str in (True, False): - return _str - _str = str(_str).strip().lower() - return _str in ('t', 'true', 'y', 'yes', 'on', '1') - - -def aslist(obj, sep=None, strip=True): - """ - Returns given string separated by sep as list - - :param obj: - :param sep: - :param strip: - """ - if isinstance(obj, (str)): - lst = obj.split(sep) - if strip: - lst = [v.strip() for v in lst] - return lst - elif isinstance(obj, (list, tuple)): - return obj - elif obj is None: - return [] - else: - return [obj] - - def convert_line_endings(line, mode): """ Converts a given line "line end" according to given mode @@ -204,7 +175,6 @@ if future: prevdate = prevdate.replace(microsecond=0) # Get date parts deltas - from dateutil import relativedelta for part in order: d = relativedelta.relativedelta(now, prevdate) deltas[part] = getattr(d, part + 's') @@ -366,9 +336,6 @@ :param repo: :param rev: """ - from kallithea.lib.vcs.backends.base import BaseRepository - from kallithea.lib.vcs.exceptions import RepositoryError - from kallithea.lib.vcs.backends.base import EmptyChangeset if not isinstance(repo, BaseRepository): raise Exception('You must pass an Repository ' 'object as first argument got %s' % type(repo)) @@ -411,17 +378,6 @@ return MENTIONS_REGEX.findall(text) -def extract_mentioned_users(text): - """ Returns set of actual database Users @mentioned in given text. """ - from kallithea.model.db import User - result = set() - for name in extract_mentioned_usernames(text): - user = User.get_by_username(name, case_insensitive=True) - if user is not None and not user.is_default_user: - result.add(user) - return result - - class AttributeDict(dict): def __getattr__(self, attr): return self.get(attr, None) @@ -430,8 +386,6 @@ def obfuscate_url_pw(engine): - from sqlalchemy.engine import url as sa_url - from sqlalchemy.exc import ArgumentError try: _url = sa_url.make_url(engine or '') except ArgumentError: @@ -478,14 +432,13 @@ Must always be called before anything with hooks are invoked. """ - from kallithea import CONFIG extras = { 'ip': ip_addr, # used in log_push/pull_action action_logger 'username': username, 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger 'repository': repo_name, 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids - 'config': CONFIG['__file__'], # used by git hook to read config + 'config': kallithea.CONFIG['__file__'], # used by git hook to read config } os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) @@ -502,75 +455,6 @@ return None -class OptionalAttr(object): - """ - Special Optional Option that defines other attribute. Example:: - - def test(apiuser, userid=Optional(OAttr('apiuser')): - user = Optional.extract(userid) - # calls - - """ - - def __init__(self, attr_name): - self.attr_name = attr_name - - def __repr__(self): - return '' % self.attr_name - - def __call__(self): - return self - - -# alias -OAttr = OptionalAttr - - -class Optional(object): - """ - Defines an optional parameter:: - - param = param.getval() if isinstance(param, Optional) else param - param = param() if isinstance(param, Optional) else param - - is equivalent of:: - - param = Optional.extract(param) - - """ - - def __init__(self, type_): - self.type_ = type_ - - def __repr__(self): - return '' % self.type_.__repr__() - - def __call__(self): - return self.getval() - - def getval(self): - """ - returns value from this Optional instance - """ - if isinstance(self.type_, OAttr): - # use params name - return self.type_.attr_name - return self.type_ - - @classmethod - def extract(cls, val): - """ - Extracts value from Optional() instance - - :param val: - :return: original value if it's not Optional instance else - value of instance - """ - if isinstance(val, cls): - return val.getval() - return val - - def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): return _cleanstringsub('_', s).rstrip('_') diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/backends/base.py --- a/kallithea/lib/vcs/backends/base.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/backends/base.py Fri Oct 30 23:44:18 2020 +0100 @@ -12,6 +12,7 @@ import datetime import itertools +from kallithea.lib.vcs.backends import get_backend from kallithea.lib.vcs.conf import settings from kallithea.lib.vcs.exceptions import (ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, RepositoryError) @@ -27,7 +28,7 @@ **Attributes** ``DEFAULT_BRANCH_NAME`` - name of default branch (i.e. "trunk" for svn, "master" for git etc. + name of default branch (i.e. "master" for git etc. ``scm`` alias of scm, i.e. *git* or *hg* @@ -169,6 +170,20 @@ """ raise NotImplementedError + def get_diff_changesets(self, org_rev, other_repo, other_rev): + """ + Returns lists of changesets that can be merged from this repo @org_rev + to other_repo @other_rev + ... and the other way + ... and the ancestors that would be used for merge + + :param org_rev: the revision we want our compare to be made + :param other_repo: repo object, most likely the fork of org_repo. It has + all changesets that we need to obtain + :param other_rev: revision we want out compare to be made on other_repo + """ + raise NotImplementedError + def __getitem__(self, key): if isinstance(key, slice): return (self.get_changeset(rev) for rev in self.revisions[key]) @@ -324,8 +339,7 @@ ``short_id`` shortened (if apply) version of ``raw_id``; it would be simple - shortcut for ``raw_id[:12]`` for git/mercurial backends or same - as ``raw_id`` for subversion + shortcut for ``raw_id[:12]`` for git/mercurial backends ``revision`` revision number as integer @@ -1008,12 +1022,10 @@ @LazyProperty def branch(self): - from kallithea.lib.vcs.backends import get_backend return get_backend(self.alias).DEFAULT_BRANCH_NAME @LazyProperty def branches(self): - from kallithea.lib.vcs.backends import get_backend return [get_backend(self.alias).DEFAULT_BRANCH_NAME] @LazyProperty diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/backends/git/changeset.py --- a/kallithea/lib/vcs/backends/git/changeset.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/backends/git/changeset.py Fri Oct 30 23:44:18 2020 +0100 @@ -5,6 +5,7 @@ from dulwich import objects from dulwich.config import ConfigFile +from dulwich.walk import Walker from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset from kallithea.lib.vcs.conf import settings @@ -294,7 +295,6 @@ """ self._get_filectx(path) - from dulwich.walk import Walker include = [self.raw_id] walker = Walker(self.repository._repo.object_store, include, paths=[path], max_entries=1) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/backends/git/repository.py --- a/kallithea/lib/vcs/backends/git/repository.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/backends/git/repository.py Fri Oct 30 23:44:18 2020 +0100 @@ -20,16 +20,18 @@ from collections import OrderedDict import mercurial.util # import url as hg_url +from dulwich.client import SubprocessGitClient from dulwich.config import ConfigFile from dulwich.objects import Tag from dulwich.repo import NotGitRepository, Repo +from dulwich.server import update_server_info from kallithea.lib.vcs import subprocessio from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator from kallithea.lib.vcs.conf import settings from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError) -from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str +from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str from kallithea.lib.vcs.utils.helpers import get_urllib_request_handlers from kallithea.lib.vcs.utils.lazy import LazyProperty from kallithea.lib.vcs.utils.paths import abspath, get_user_home @@ -543,6 +545,58 @@ return CollectionGenerator(self, revs) + def get_diff_changesets(self, org_rev, other_repo, other_rev): + """ + Returns lists of changesets that can be merged from this repo @org_rev + to other_repo @other_rev + ... and the other way + ... and the ancestors that would be used for merge + + :param org_rev: the revision we want our compare to be made + :param other_repo: repo object, most likely the fork of org_repo. It has + all changesets that we need to obtain + :param other_rev: revision we want out compare to be made on other_repo + """ + org_changesets = [] + ancestors = None + if org_rev == other_rev: + other_changesets = [] + elif self != other_repo: + gitrepo = Repo(self.path) + SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo) + + gitrepo_remote = Repo(other_repo.path) + SubprocessGitClient(thin_packs=False).fetch(self.path, gitrepo_remote) + + revs = [ + ascii_str(x.commit.id) + for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)], + exclude=[ascii_bytes(org_rev)]) + ] + other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)] + if other_changesets: + ancestors = [other_changesets[0].parents[0].raw_id] + else: + # no changesets from other repo, ancestor is the other_rev + ancestors = [other_rev] + + gitrepo.close() + gitrepo_remote.close() + + else: + so = self.run_git_command( + ['log', '--reverse', '--pretty=format:%H', + '-s', '%s..%s' % (org_rev, other_rev)] + ) + other_changesets = [self.get_changeset(cs) + for cs in re.findall(r'[0-9a-fA-F]{40}', so)] + so = self.run_git_command( + ['merge-base', org_rev, other_rev] + ) + ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]] + + return other_changesets, org_changesets, ancestors + def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, context=3): """ @@ -661,7 +715,6 @@ """ runs gits update-server-info command in this repo instance """ - from dulwich.server import update_server_info try: update_server_info(self._repo) except OSError as e: diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/backends/hg/repository.py --- a/kallithea/lib/vcs/backends/hg/repository.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/backends/hg/repository.py Fri Oct 30 23:44:18 2020 +0100 @@ -33,12 +33,13 @@ import mercurial.sshpeer import mercurial.tags import mercurial.ui +import mercurial.unionrepo import mercurial.util from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError) -from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str +from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str from kallithea.lib.vcs.utils.helpers import get_urllib_request_handlers from kallithea.lib.vcs.utils.lazy import LazyProperty from kallithea.lib.vcs.utils.paths import abspath @@ -272,7 +273,7 @@ self.get_changeset(rev1) self.get_changeset(rev2) if path: - file_filter = mercurial.match.exact(path) + file_filter = mercurial.match.exact([safe_bytes(path)]) else: file_filter = None @@ -335,7 +336,7 @@ # means it cannot be cloned raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) - if not url_prefix: # skip svn+http://... (and git+... too) + if not url_prefix: # skip git+http://... etc # now check if it's a proper hg repo try: mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip') @@ -545,6 +546,60 @@ return CollectionGenerator(self, revs) + def get_diff_changesets(self, org_rev, other_repo, other_rev): + """ + Returns lists of changesets that can be merged from this repo @org_rev + to other_repo @other_rev + ... and the other way + ... and the ancestors that would be used for merge + + :param org_rev: the revision we want our compare to be made + :param other_repo: repo object, most likely the fork of org_repo. It has + all changesets that we need to obtain + :param other_rev: revision we want out compare to be made on other_repo + """ + ancestors = None + if org_rev == other_rev: + org_changesets = [] + other_changesets = [] + + else: + # case two independent repos + if self != other_repo: + hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui, + safe_bytes(other_repo.path), + safe_bytes(self.path)) + # all ancestors of other_rev will be in other_repo and + # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot + + # no remote compare do it on the same repository + else: + hgrepo = other_repo._repo + + ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in + hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))] + if ancestors: + log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev) + else: + log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev) + ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in + hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive! + + other_changesets = [ + other_repo.get_changeset(rev) + for rev in hgrepo.revs( + b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", + ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev)) + ] + org_changesets = [ + self.get_changeset(ascii_str(hgrepo[rev].hex())) + for rev in hgrepo.revs( + b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", + ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev)) + ] + + return other_changesets, org_changesets, ancestors + def pull(self, url): """ Tries to pull changes from external location. diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/backends/ssh.py --- a/kallithea/lib/vcs/backends/ssh.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/backends/ssh.py Fri Oct 30 23:44:18 2020 +0100 @@ -25,8 +25,7 @@ from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware from kallithea.lib.utils2 import set_hook_environment -from kallithea.model.db import Repository, User, UserSshKeys -from kallithea.model.meta import Session +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -62,7 +61,7 @@ """Verify basic sanity of the repository, and that the user is valid and has access - then serve the native VCS protocol for repository access.""" - dbuser = User.get(user_id) + dbuser = db.User.get(user_id) if dbuser is None: self.exit('User %r not found' % user_id) self.authuser = AuthUser.make(dbuser=dbuser, ip_addr=client_ip) @@ -70,11 +69,11 @@ if self.authuser is None: # not ok ... but already kind of authenticated by SSH ... but not really not authorized ... self.exit('User %s from %s cannot be authorized' % (dbuser.username, client_ip)) - ssh_key = UserSshKeys.get(key_id) + ssh_key = db.UserSshKeys.get(key_id) if ssh_key is None: self.exit('SSH key %r not found' % key_id) ssh_key.last_seen = datetime.datetime.now() - Session().commit() + meta.Session().commit() if HasPermissionAnyMiddleware('repository.write', 'repository.admin')(self.authuser, self.repo_name): @@ -84,7 +83,7 @@ else: self.exit('Access to %r denied' % self.repo_name) - self.db_repo = Repository.get_by_repo_name(self.repo_name) + self.db_repo = db.Repository.get_by_repo_name(self.repo_name) if self.db_repo is None: self.exit("Repository '%s' not found" % self.repo_name) assert self.db_repo.repo_name == self.repo_name diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/nodes.py --- a/kallithea/lib/vcs/nodes.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/nodes.py Fri Oct 30 23:44:18 2020 +0100 @@ -14,6 +14,8 @@ import posixpath import stat +from pygments import lexers + from kallithea.lib.vcs.backends.base import EmptyChangeset from kallithea.lib.vcs.exceptions import NodeError, RemovedFileNodeError from kallithea.lib.vcs.utils import safe_bytes, safe_str @@ -305,7 +307,6 @@ encoding = None # try with pygments - from pygments import lexers try: mt = lexers.get_lexer_for_filename(self.name).mimetypes except lexers.ClassNotFound: @@ -335,7 +336,6 @@ Returns pygment's lexer class. Would try to guess lexer taking file's content, name and mimetype. """ - from pygments import lexers try: lexer = lexers.guess_lexer_for_filename(self.name, safe_str(self.content), stripnl=False) except lexers.ClassNotFound: @@ -587,7 +587,7 @@ self.path = name.rstrip('/') self.kind = NodeKind.SUBMODULE self.alias = alias - # we have to use emptyChangeset here since this can point to svn/git/hg + # we have to use emptyChangeset here since this can point to git/hg # submodules we cannot get from repository self.changeset = EmptyChangeset(changeset, alias=alias) self.url = url diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/subprocessio.py --- a/kallithea/lib/vcs/subprocessio.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/subprocessio.py Fri Oct 30 23:44:18 2020 +0100 @@ -380,23 +380,23 @@ if (returncode is not None # process has terminated and returncode != 0 ): # and it failed - self.output.stop() + getattr(self.output, 'stop', lambda: None)() self.error.stop() err = ''.join(self.error) raise EnvironmentError("Subprocess exited due to an error:\n" + err) return next(self.output) def throw(self, type, value=None, traceback=None): - if self.output.length or not self.output.done_reading: + if getattr(self.output, 'length') or not getattr(self.output, 'done_reading'): raise type(value) def close(self): try: - self.process.terminate() + getattr(self.output, 'terminate', lambda: None)() except: pass try: - self.output.close() + getattr(self.output, 'close', lambda: None)() except: pass try: diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/utils/__init__.py --- a/kallithea/lib/vcs/utils/__init__.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/utils/__init__.py Fri Oct 30 23:44:18 2020 +0100 @@ -9,6 +9,10 @@ import re import time +import chardet + +from kallithea.lib.vcs.conf import settings + def makedate(): lt = time.localtime() @@ -81,7 +85,6 @@ if not isinstance(s, bytes): # use __str__ and don't expect UnicodeDecodeError return str(s) - from kallithea.lib.vcs.conf import settings for enc in settings.DEFAULT_ENCODINGS: try: return str(s, enc) @@ -89,11 +92,10 @@ pass try: - import chardet encoding = chardet.detect(s)['encoding'] if encoding is not None: return s.decode(encoding) - except (ImportError, UnicodeDecodeError): + except UnicodeDecodeError: pass return str(s, settings.DEFAULT_ENCODINGS[0], 'replace') @@ -110,7 +112,6 @@ assert isinstance(s, str), repr(s) # bytes cannot coerse with __str__ or handle None or int - from kallithea.lib.vcs.conf import settings for enc in settings.DEFAULT_ENCODINGS: try: return s.encode(enc) diff -r c387989f868f -r 3669e58f3002 kallithea/lib/vcs/utils/helpers.py --- a/kallithea/lib/vcs/utils/helpers.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/lib/vcs/utils/helpers.py Fri Oct 30 23:44:18 2020 +0100 @@ -3,12 +3,16 @@ """ import datetime +import logging import os import re import time import urllib.request import mercurial.url +from pygments import highlight +from pygments.formatters import TerminalFormatter +from pygments.lexers import ClassNotFound, guess_lexer_for_filename from kallithea.lib.vcs.exceptions import RepositoryError, VCSError from kallithea.lib.vcs.utils import safe_str @@ -105,16 +109,6 @@ then returned output is colored. Otherwise unchanged content is returned. """ - import logging - try: - import pygments - pygments - except ImportError: - return code - from pygments import highlight - from pygments.lexers import guess_lexer_for_filename, ClassNotFound - from pygments.formatters import TerminalFormatter - try: lexer = guess_lexer_for_filename(name, code) formatter = TerminalFormatter() diff -r c387989f868f -r 3669e58f3002 kallithea/lib/webutils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/kallithea/lib/webutils.py Fri Oct 30 23:44:18 2020 +0100 @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +""" +kallithea.lib.webutils +~~~~~~~~~~~~~~~~~~~~ + +Helper functions that rely on the current WSGI request, exposed in the TG2 +thread-local "global" variables. It should have few dependencies so it can be +imported anywhere - just like the global variables can be used everywhere. +""" + +from tg import request + + +class UrlGenerator(object): + """Emulate pylons.url in providing a wrapper around routes.url + + This code was added during migration from Pylons to Turbogears2. Pylons + already provided a wrapper like this, but Turbogears2 does not. + + When the routing of Kallithea is changed to use less Routes and more + Turbogears2-style routing, this class may disappear or change. + + url() (the __call__ method) returns the URL based on a route name and + arguments. + url.current() returns the URL of the current page with arguments applied. + + Refer to documentation of Routes for details: + https://routes.readthedocs.io/en/latest/generating.html#generation + """ + def __call__(self, *args, **kwargs): + return request.environ['routes.url'](*args, **kwargs) + + def current(self, *args, **kwargs): + return request.environ['routes.url'].current(*args, **kwargs) + + +url = UrlGenerator() diff -r c387989f868f -r 3669e58f3002 kallithea/model/api_key.py --- a/kallithea/model/api_key.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/api_key.py Fri Oct 30 23:44:18 2020 +0100 @@ -29,8 +29,7 @@ import time from kallithea.lib.utils2 import generate_api_key -from kallithea.model.db import User, UserApiKeys -from kallithea.model.meta import Session +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -44,14 +43,14 @@ :param description: description of ApiKey :param lifetime: expiration time in seconds """ - user = User.guess_instance(user) + user = db.User.guess_instance(user) - new_api_key = UserApiKeys() + new_api_key = db.UserApiKeys() new_api_key.api_key = generate_api_key() new_api_key.user_id = user.user_id new_api_key.description = description new_api_key.expires = time.time() + (lifetime * 60) if lifetime != -1 else -1 - Session().add(new_api_key) + meta.Session().add(new_api_key) return new_api_key @@ -60,19 +59,19 @@ Deletes given api_key, if user is set it also filters the object for deletion by given user. """ - api_key = UserApiKeys.query().filter(UserApiKeys.api_key == api_key) + api_key = db.UserApiKeys.query().filter(db.UserApiKeys.api_key == api_key) if user is not None: - user = User.guess_instance(user) - api_key = api_key.filter(UserApiKeys.user_id == user.user_id) + user = db.User.guess_instance(user) + api_key = api_key.filter(db.UserApiKeys.user_id == user.user_id) api_key = api_key.scalar() - Session().delete(api_key) + meta.Session().delete(api_key) def get_api_keys(self, user, show_expired=True): - user = User.guess_instance(user) - user_api_keys = UserApiKeys.query() \ - .filter(UserApiKeys.user_id == user.user_id) + user = db.User.guess_instance(user) + user_api_keys = db.UserApiKeys.query() \ + .filter(db.UserApiKeys.user_id == user.user_id) if not show_expired: user_api_keys = user_api_keys.filter_by(is_expired=False) return user_api_keys diff -r c387989f868f -r 3669e58f3002 kallithea/model/changeset_status.py --- a/kallithea/model/changeset_status.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/changeset_status.py Fri Oct 30 23:44:18 2020 +0100 @@ -29,7 +29,7 @@ from sqlalchemy.orm import joinedload -from kallithea.model.db import ChangesetStatus, PullRequest, Repository, Session, User +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -39,22 +39,22 @@ def _get_status_query(self, repo, revision, pull_request, with_revisions=False): - repo = Repository.guess_instance(repo) + repo = db.Repository.guess_instance(repo) - q = ChangesetStatus.query() \ - .filter(ChangesetStatus.repo == repo) + q = db.ChangesetStatus.query() \ + .filter(db.ChangesetStatus.repo == repo) if not with_revisions: # only report the latest vote across all users! TODO: be smarter! - q = q.filter(ChangesetStatus.version == 0) + q = q.filter(db.ChangesetStatus.version == 0) if revision: - q = q.filter(ChangesetStatus.revision == revision) + q = q.filter(db.ChangesetStatus.revision == revision) elif pull_request: - pull_request = PullRequest.guess_instance(pull_request) - q = q.filter(ChangesetStatus.pull_request == pull_request) + pull_request = db.PullRequest.guess_instance(pull_request) + q = q.filter(db.ChangesetStatus.pull_request == pull_request) else: raise Exception('Please specify revision or pull_request') - q = q.order_by(ChangesetStatus.version.asc()) + q = q.order_by(db.ChangesetStatus.version.asc()) return q def _calculate_status(self, statuses): @@ -64,15 +64,15 @@ """ if not statuses: - return ChangesetStatus.STATUS_UNDER_REVIEW + return db.ChangesetStatus.STATUS_UNDER_REVIEW - if all(st and st.status == ChangesetStatus.STATUS_APPROVED for st in statuses): - return ChangesetStatus.STATUS_APPROVED + if all(st and st.status == db.ChangesetStatus.STATUS_APPROVED for st in statuses): + return db.ChangesetStatus.STATUS_APPROVED - if any(st and st.status == ChangesetStatus.STATUS_REJECTED for st in statuses): - return ChangesetStatus.STATUS_REJECTED + if any(st and st.status == db.ChangesetStatus.STATUS_REJECTED for st in statuses): + return db.ChangesetStatus.STATUS_REJECTED - return ChangesetStatus.STATUS_UNDER_REVIEW + return db.ChangesetStatus.STATUS_UNDER_REVIEW def calculate_pull_request_result(self, pull_request): """ @@ -94,9 +94,9 @@ for user in pull_request.get_reviewer_users(): st = cs_statuses.get(user.username) relevant_statuses.append(st) - status = ChangesetStatus.STATUS_NOT_REVIEWED if st is None else st.status - if status in (ChangesetStatus.STATUS_NOT_REVIEWED, - ChangesetStatus.STATUS_UNDER_REVIEW): + status = db.ChangesetStatus.STATUS_NOT_REVIEWED if st is None else st.status + if status in (db.ChangesetStatus.STATUS_NOT_REVIEWED, + db.ChangesetStatus.STATUS_UNDER_REVIEW): pull_request_pending_reviewers.append(user) pull_request_reviewers.append((user, status)) @@ -130,7 +130,7 @@ # returned from pull_request status = q.first() if as_str: - return str(status.status) if status else ChangesetStatus.DEFAULT + return str(status.status) if status else db.ChangesetStatus.DEFAULT return status def set_status(self, repo, status, user, comment, revision=None, @@ -146,20 +146,20 @@ :param revision: :param pull_request: """ - repo = Repository.guess_instance(repo) + repo = db.Repository.guess_instance(repo) - q = ChangesetStatus.query() + q = db.ChangesetStatus.query() if revision is not None: assert pull_request is None - q = q.filter(ChangesetStatus.repo == repo) - q = q.filter(ChangesetStatus.revision == revision) + q = q.filter(db.ChangesetStatus.repo == repo) + q = q.filter(db.ChangesetStatus.revision == revision) revisions = [revision] else: assert pull_request is not None - pull_request = PullRequest.guess_instance(pull_request) + pull_request = db.PullRequest.guess_instance(pull_request) repo = pull_request.org_repo - q = q.filter(ChangesetStatus.repo == repo) - q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions)) + q = q.filter(db.ChangesetStatus.repo == repo) + q = q.filter(db.ChangesetStatus.revision.in_(pull_request.revisions)) revisions = pull_request.revisions cur_statuses = q.all() @@ -169,14 +169,14 @@ new_statuses = [] for rev in revisions: - new_status = ChangesetStatus() + new_status = db.ChangesetStatus() new_status.version = 0 # default - new_status.author = User.guess_instance(user) - new_status.repo = Repository.guess_instance(repo) + new_status.author = db.User.guess_instance(user) + new_status.repo = db.Repository.guess_instance(repo) new_status.status = status new_status.comment = comment new_status.revision = rev new_status.pull_request = pull_request new_statuses.append(new_status) - Session().add(new_status) + meta.Session().add(new_status) return new_statuses diff -r c387989f868f -r 3669e58f3002 kallithea/model/comment.py --- a/kallithea/model/comment.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/comment.py Fri Oct 30 23:44:18 2020 +0100 @@ -31,9 +31,8 @@ from tg.i18n import ugettext as _ from kallithea.lib import helpers as h -from kallithea.lib.utils2 import extract_mentioned_users -from kallithea.model.db import ChangesetComment, PullRequest, Repository, User -from kallithea.model.meta import Session +from kallithea.lib.utils import extract_mentioned_users +from kallithea.model import db, meta from kallithea.model.notification import NotificationModel @@ -41,15 +40,15 @@ def _list_changeset_commenters(revision): - return (Session().query(User) - .join(ChangesetComment.author) - .filter(ChangesetComment.revision == revision) + return (meta.Session().query(db.User) + .join(db.ChangesetComment.author) + .filter(db.ChangesetComment.revision == revision) .all()) def _list_pull_request_commenters(pull_request): - return (Session().query(User) - .join(ChangesetComment.author) - .filter(ChangesetComment.pull_request_id == pull_request.pull_request_id) + return (meta.Session().query(db.User) + .join(db.ChangesetComment.author) + .filter(db.ChangesetComment.pull_request_id == pull_request.pull_request_id) .all()) @@ -88,7 +87,7 @@ # get the current participants of this changeset recipients = _list_changeset_commenters(revision) # add changeset author if it's known locally - cs_author = User.get_from_cs_author(cs.author) + cs_author = db.User.get_from_cs_author(cs.author) if not cs_author: # use repo owner if we cannot extract the author correctly # FIXME: just use committer name even if not a user @@ -105,6 +104,7 @@ 'message': cs.message, 'message_short': h.shorter(cs.message, 50, firstline=True), 'cs_author': cs_author, + 'cs_author_username': cs_author.username, 'repo_name': repo.repo_name, 'short_id': h.short_id(revision), 'branch': cs.branch, @@ -175,9 +175,9 @@ log.warning('Missing text for comment, skipping...') return None - repo = Repository.guess_instance(repo) - author = User.guess_instance(author) - comment = ChangesetComment() + repo = db.Repository.guess_instance(repo) + author = db.User.guess_instance(author) + comment = db.ChangesetComment() comment.repo = repo comment.author = author comment.text = text @@ -187,13 +187,13 @@ if revision is not None: comment.revision = revision elif pull_request is not None: - pull_request = PullRequest.guess_instance(pull_request) + pull_request = db.PullRequest.guess_instance(pull_request) comment.pull_request = pull_request else: raise Exception('Please specify revision or pull_request_id') - Session().add(comment) - Session().flush() + meta.Session().add(comment) + meta.Session().flush() if send_email: (subj, body, recipients, notification_type, @@ -228,8 +228,8 @@ return comment def delete(self, comment): - comment = ChangesetComment.guess_instance(comment) - Session().delete(comment) + comment = db.ChangesetComment.guess_instance(comment) + meta.Session().delete(comment) return comment @@ -269,34 +269,34 @@ if inline is None and f_path is not None: raise Exception("f_path only makes sense for inline comments.") - q = Session().query(ChangesetComment) + q = meta.Session().query(db.ChangesetComment) if inline: if f_path is not None: # inline comments for a given file... - q = q.filter(ChangesetComment.f_path == f_path) + q = q.filter(db.ChangesetComment.f_path == f_path) if line_no is None: # ... on any line - q = q.filter(ChangesetComment.line_no != None) + q = q.filter(db.ChangesetComment.line_no != None) else: # ... on specific line - q = q.filter(ChangesetComment.line_no == line_no) + q = q.filter(db.ChangesetComment.line_no == line_no) else: # all inline comments - q = q.filter(ChangesetComment.line_no != None) \ - .filter(ChangesetComment.f_path != None) + q = q.filter(db.ChangesetComment.line_no != None) \ + .filter(db.ChangesetComment.f_path != None) else: # all general comments - q = q.filter(ChangesetComment.line_no == None) \ - .filter(ChangesetComment.f_path == None) + q = q.filter(db.ChangesetComment.line_no == None) \ + .filter(db.ChangesetComment.f_path == None) if revision is not None: - q = q.filter(ChangesetComment.revision == revision) \ - .filter(ChangesetComment.repo_id == repo_id) + q = q.filter(db.ChangesetComment.revision == revision) \ + .filter(db.ChangesetComment.repo_id == repo_id) elif pull_request is not None: - pull_request = PullRequest.guess_instance(pull_request) - q = q.filter(ChangesetComment.pull_request == pull_request) + pull_request = db.PullRequest.guess_instance(pull_request) + q = q.filter(db.ChangesetComment.pull_request == pull_request) else: raise Exception('Please specify either revision or pull_request') - return q.order_by(ChangesetComment.created_on).all() + return q.order_by(db.ChangesetComment.created_on).all() diff -r c387989f868f -r 3669e58f3002 kallithea/model/db.py --- a/kallithea/model/db.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/db.py Fri Oct 30 23:44:18 2020 +0100 @@ -44,27 +44,21 @@ from webob.exc import HTTPNotFound import kallithea -from kallithea.lib import ext_json +from kallithea.lib import ext_json, ssh from kallithea.lib.exceptions import DefaultUserException -from kallithea.lib.utils2 import (Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, str2bool, - urlreadable) +from kallithea.lib.utils2 import asbool, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, urlreadable from kallithea.lib.vcs import get_backend from kallithea.lib.vcs.backends.base import EmptyChangeset from kallithea.lib.vcs.utils.helpers import get_scm -from kallithea.model.meta import Base, Session +from kallithea.model import meta -URL_SEP = '/' log = logging.getLogger(__name__) #============================================================================== # BASE CLASSES #============================================================================== -def _hash_key(k): - return hashlib.md5(safe_bytes(k)).hexdigest() - - class BaseDbModel(object): """ Base Model for all classes @@ -113,7 +107,7 @@ @classmethod def query(cls): - return Session().query(cls) + return meta.Session().query(cls) @classmethod def get(cls, id_): @@ -163,7 +157,7 @@ @classmethod def delete(cls, id_): obj = cls.query().get(id_) - Session().delete(obj) + meta.Session().delete(obj) def __repr__(self): return '' % (self.__class__.__name__) @@ -171,11 +165,10 @@ _table_args_default_dict = {'extend_existing': True, 'mysql_engine': 'InnoDB', - 'mysql_charset': 'utf8', 'sqlite_autoincrement': True, } -class Setting(Base, BaseDbModel): +class Setting(meta.Base, BaseDbModel): __tablename__ = 'settings' __table_args__ = ( _table_args_default_dict, @@ -185,10 +178,9 @@ 'str': safe_bytes, 'int': safe_int, 'unicode': safe_str, - 'bool': str2bool, + 'bool': asbool, 'list': functools.partial(aslist, sep=',') } - DEFAULT_UPDATE_URL = '' app_settings_id = Column(Integer(), primary_key=True) app_settings_name = Column(String(255), nullable=False, unique=True) @@ -249,10 +241,10 @@ return res @classmethod - def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')): + def create_or_update(cls, key, val=None, type=None): """ Creates or updates Kallithea setting. If updates are triggered, it will only - update parameters that are explicitly set. Optional instance will be skipped. + update parameters that are explicitly set. 'None' values will be skipped. :param key: :param val: @@ -261,16 +253,16 @@ """ res = cls.get_by_name(key) if res is None: - val = Optional.extract(val) - type = Optional.extract(type) + # new setting + val = val if val is not None else '' + type = type if type is not None else 'unicode' res = cls(key, val, type) - Session().add(res) + meta.Session().add(res) else: - res.app_settings_name = key - if not isinstance(val, Optional): + if val is not None: # update if set res.app_settings_value = val - if not isinstance(type, Optional): + if type is not None: # update if set res.app_settings_type = type return res @@ -312,8 +304,10 @@ @classmethod def get_server_info(cls): + import platform + import pkg_resources - import platform + from kallithea.lib.utils import check_git_version mods = [(p.project_name, p.version) for p in pkg_resources.working_set] info = { @@ -327,7 +321,7 @@ return info -class Ui(Base, BaseDbModel): +class Ui(meta.Base, BaseDbModel): __tablename__ = 'ui' __table_args__ = ( Index('ui_ui_section_ui_key_idx', 'ui_section', 'ui_key'), @@ -355,7 +349,7 @@ setting = cls.get_by_key(section, key) if setting is None: setting = cls(ui_section=section, ui_key=key) - Session().add(setting) + meta.Session().add(setting) return setting @classmethod @@ -390,7 +384,7 @@ self.ui_section, self.ui_key, self.ui_value) -class User(Base, BaseDbModel): +class User(meta.Base, BaseDbModel): __tablename__ = 'users' __table_args__ = ( Index('u_username_idx', 'username'), @@ -600,7 +594,8 @@ :param author: """ - from kallithea.lib.helpers import email, author_name + from kallithea.lib.helpers import author_name, email + # Valid email in the attribute passed, see if they're in the system _email = email(author) if _email: @@ -669,7 +664,7 @@ return data -class UserApiKeys(Base, BaseDbModel): +class UserApiKeys(meta.Base, BaseDbModel): __tablename__ = 'user_api_keys' __table_args__ = ( Index('uak_api_key_idx', 'api_key'), @@ -691,7 +686,7 @@ return (self.expires != -1) & (time.time() > self.expires) -class UserEmailMap(Base, BaseDbModel): +class UserEmailMap(meta.Base, BaseDbModel): __tablename__ = 'user_email_map' __table_args__ = ( Index('uem_email_idx', 'email'), @@ -706,7 +701,7 @@ @validates('_email') def validate_email(self, key, email): # check if this email is not main one - main_email = Session().query(User).filter(User.email == email).scalar() + main_email = meta.Session().query(User).filter(User.email == email).scalar() if main_email is not None: raise AttributeError('email %s is present is user table' % email) return email @@ -720,7 +715,7 @@ self._email = val.lower() if val else None -class UserIpMap(Base, BaseDbModel): +class UserIpMap(meta.Base, BaseDbModel): __tablename__ = 'user_ip_map' __table_args__ = ( UniqueConstraint('user_id', 'ip_addr'), @@ -748,7 +743,7 @@ return "<%s %s: %s>" % (self.__class__.__name__, self.user_id, self.ip_addr) -class UserLog(Base, BaseDbModel): +class UserLog(meta.Base, BaseDbModel): __tablename__ = 'user_logs' __table_args__ = ( _table_args_default_dict, @@ -776,7 +771,7 @@ repository = relationship('Repository', cascade='') -class UserGroup(Base, BaseDbModel): +class UserGroup(meta.Base, BaseDbModel): __tablename__ = 'users_groups' __table_args__ = ( _table_args_default_dict, @@ -857,7 +852,7 @@ return data -class UserGroupMember(Base, BaseDbModel): +class UserGroupMember(meta.Base, BaseDbModel): __tablename__ = 'users_groups_members' __table_args__ = ( _table_args_default_dict, @@ -875,7 +870,7 @@ self.user_id = u_id -class RepositoryField(Base, BaseDbModel): +class RepositoryField(meta.Base, BaseDbModel): __tablename__ = 'repositories_fields' __table_args__ = ( UniqueConstraint('repository_id', 'field_key'), # no-multi field @@ -913,7 +908,7 @@ return row -class Repository(Base, BaseDbModel): +class Repository(meta.Base, BaseDbModel): __tablename__ = 'repositories' __table_args__ = ( Index('r_repo_name_idx', 'repo_name'), @@ -1029,7 +1024,7 @@ :param cls: :param repo_name: """ - return URL_SEP.join(repo_name.split(os.sep)) + return kallithea.URL_SEP.join(repo_name.split(os.sep)) @classmethod def guess_instance(cls, value): @@ -1040,9 +1035,9 @@ """Get the repo, defaulting to database case sensitivity. case_insensitive will be slower and should only be specified if necessary.""" if case_insensitive: - q = Session().query(cls).filter(sqlalchemy.func.lower(cls.repo_name) == sqlalchemy.func.lower(repo_name)) + q = meta.Session().query(cls).filter(sqlalchemy.func.lower(cls.repo_name) == sqlalchemy.func.lower(repo_name)) else: - q = Session().query(cls).filter(cls.repo_name == repo_name) + q = meta.Session().query(cls).filter(cls.repo_name == repo_name) q = q.options(joinedload(Repository.fork)) \ .options(joinedload(Repository.owner)) \ .options(joinedload(Repository.group)) @@ -1055,7 +1050,7 @@ assert repo_full_path.startswith(base_full_path + os.path.sep) repo_name = repo_full_path[len(base_full_path) + 1:] repo_name = cls.normalize_repo_name(repo_name) - return cls.get_by_repo_name(repo_name.strip(URL_SEP)) + return cls.get_by_repo_name(repo_name.strip(kallithea.URL_SEP)) @classmethod def get_repo_forks(cls, repo_id): @@ -1077,7 +1072,7 @@ @property def just_name(self): - return self.repo_name.split(URL_SEP)[-1] + return self.repo_name.split(kallithea.URL_SEP)[-1] @property def groups_with_parents(self): @@ -1100,7 +1095,7 @@ # we need to split the name by / since this is how we store the # names in the database, but that eventually needs to be converted # into a valid system path - p += self.repo_name.split(URL_SEP) + p += self.repo_name.split(kallithea.URL_SEP) return os.path.join(*p) def get_new_name(self, repo_name): @@ -1110,7 +1105,7 @@ :param group_name: """ path_prefix = self.group.full_path_splitted if self.group else [] - return URL_SEP.join(path_prefix + [repo_name]) + return kallithea.URL_SEP.join(path_prefix + [repo_name]) @property def _ui(self): @@ -1163,8 +1158,8 @@ )) if with_pullrequests: data['pull_requests'] = repo.pull_requests_other - rc_config = Setting.get_app_settings() - repository_fields = str2bool(rc_config.get('repository_fields')) + settings = Setting.get_app_settings() + repository_fields = asbool(settings.get('repository_fields')) if repository_fields: for f in self.extra_fields: data[f.field_key_prefixed] = f.field_value @@ -1253,7 +1248,7 @@ self.repo_name, cs_cache) self.updated_on = last_change self.changeset_cache = cs_cache - Session().commit() + meta.Session().commit() else: log.debug('changeset_cache for %s already up to date with %s', self.repo_name, cs_cache['raw_id']) @@ -1362,7 +1357,7 @@ ) -class RepoGroup(Base, BaseDbModel): +class RepoGroup(meta.Base, BaseDbModel): __tablename__ = 'groups' __table_args__ = ( _table_args_default_dict, @@ -1449,7 +1444,7 @@ @property def name(self): - return self.group_name.split(URL_SEP)[-1] + return self.group_name.split(kallithea.URL_SEP)[-1] @property def full_path(self): @@ -1457,7 +1452,7 @@ @property def full_path_splitted(self): - return self.group_name.split(URL_SEP) + return self.group_name.split(kallithea.URL_SEP) @property def repositories(self): @@ -1512,7 +1507,7 @@ """ path_prefix = (self.parent_group.full_path_splitted if self.parent_group else []) - return URL_SEP.join(path_prefix + [group_name]) + return kallithea.URL_SEP.join(path_prefix + [group_name]) def get_api_data(self): """ @@ -1531,7 +1526,7 @@ return data -class Permission(Base, BaseDbModel): +class Permission(meta.Base, BaseDbModel): __tablename__ = 'permissions' __table_args__ = ( Index('p_perm_name_idx', 'permission_name'), @@ -1556,18 +1551,12 @@ ('usergroup.write', _('Default user has write access to new user groups')), ('usergroup.admin', _('Default user has admin access to new user groups')), - ('hg.repogroup.create.false', _('Only admins can create repository groups')), - ('hg.repogroup.create.true', _('Non-admins can create repository groups')), - ('hg.usergroup.create.false', _('Only admins can create user groups')), ('hg.usergroup.create.true', _('Non-admins can create user groups')), ('hg.create.none', _('Only admins can create top level repositories')), ('hg.create.repository', _('Non-admins can create top level repositories')), - ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), - ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), - ('hg.fork.none', _('Only admins can fork repositories')), ('hg.fork.repository', _('Non-admins can fork repositories')), @@ -1585,7 +1574,6 @@ 'group.read', 'usergroup.read', 'hg.create.repository', - 'hg.create.write_on_repogroup.true', 'hg.fork.repository', 'hg.register.manual_activate', 'hg.extern_activate.auto', @@ -1610,9 +1598,6 @@ 'usergroup.write': 3, 'usergroup.admin': 4, - 'hg.repogroup.create.false': 0, - 'hg.repogroup.create.true': 1, - 'hg.usergroup.create.false': 0, 'hg.usergroup.create.true': 1, @@ -1622,9 +1607,6 @@ 'hg.create.none': 0, 'hg.create.repository': 1, - 'hg.create.write_on_repogroup.false': 0, - 'hg.create.write_on_repogroup.true': 1, - 'hg.register.none': 0, 'hg.register.manual_activate': 1, 'hg.register.auto_activate': 2, @@ -1651,7 +1633,7 @@ @classmethod def get_default_perms(cls, default_user_id): - q = Session().query(UserRepoToPerm) \ + q = meta.Session().query(UserRepoToPerm) \ .options(joinedload(UserRepoToPerm.repository)) \ .options(joinedload(UserRepoToPerm.permission)) \ .filter(UserRepoToPerm.user_id == default_user_id) @@ -1660,7 +1642,7 @@ @classmethod def get_default_group_perms(cls, default_user_id): - q = Session().query(UserRepoGroupToPerm) \ + q = meta.Session().query(UserRepoGroupToPerm) \ .options(joinedload(UserRepoGroupToPerm.group)) \ .options(joinedload(UserRepoGroupToPerm.permission)) \ .filter(UserRepoGroupToPerm.user_id == default_user_id) @@ -1669,7 +1651,7 @@ @classmethod def get_default_user_group_perms(cls, default_user_id): - q = Session().query(UserUserGroupToPerm) \ + q = meta.Session().query(UserUserGroupToPerm) \ .options(joinedload(UserUserGroupToPerm.user_group)) \ .options(joinedload(UserUserGroupToPerm.permission)) \ .filter(UserUserGroupToPerm.user_id == default_user_id) @@ -1677,7 +1659,7 @@ return q.all() -class UserRepoToPerm(Base, BaseDbModel): +class UserRepoToPerm(meta.Base, BaseDbModel): __tablename__ = 'repo_to_perm' __table_args__ = ( UniqueConstraint('user_id', 'repository_id', 'permission_id'), @@ -1699,7 +1681,7 @@ n.user = user n.repository = repository n.permission = permission - Session().add(n) + meta.Session().add(n) return n def __repr__(self): @@ -1707,7 +1689,7 @@ self.__class__.__name__, self.user, self.repository, self.permission) -class UserUserGroupToPerm(Base, BaseDbModel): +class UserUserGroupToPerm(meta.Base, BaseDbModel): __tablename__ = 'user_user_group_to_perm' __table_args__ = ( UniqueConstraint('user_id', 'user_group_id', 'permission_id'), @@ -1729,7 +1711,7 @@ n.user = user n.user_group = user_group n.permission = permission - Session().add(n) + meta.Session().add(n) return n def __repr__(self): @@ -1737,7 +1719,7 @@ self.__class__.__name__, self.user, self.user_group, self.permission) -class UserToPerm(Base, BaseDbModel): +class UserToPerm(meta.Base, BaseDbModel): __tablename__ = 'user_to_perm' __table_args__ = ( UniqueConstraint('user_id', 'permission_id'), @@ -1756,7 +1738,7 @@ self.__class__.__name__, self.user, self.permission) -class UserGroupRepoToPerm(Base, BaseDbModel): +class UserGroupRepoToPerm(meta.Base, BaseDbModel): __tablename__ = 'users_group_repo_to_perm' __table_args__ = ( UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), @@ -1778,7 +1760,7 @@ n.users_group = users_group n.repository = repository n.permission = permission - Session().add(n) + meta.Session().add(n) return n def __repr__(self): @@ -1786,7 +1768,7 @@ self.__class__.__name__, self.users_group, self.repository, self.permission) -class UserGroupUserGroupToPerm(Base, BaseDbModel): +class UserGroupUserGroupToPerm(meta.Base, BaseDbModel): __tablename__ = 'user_group_user_group_to_perm' __table_args__ = ( UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), @@ -1808,7 +1790,7 @@ n.target_user_group = target_user_group n.user_group = user_group n.permission = permission - Session().add(n) + meta.Session().add(n) return n def __repr__(self): @@ -1816,7 +1798,7 @@ self.__class__.__name__, self.user_group, self.target_user_group, self.permission) -class UserGroupToPerm(Base, BaseDbModel): +class UserGroupToPerm(meta.Base, BaseDbModel): __tablename__ = 'users_group_to_perm' __table_args__ = ( UniqueConstraint('users_group_id', 'permission_id',), @@ -1831,7 +1813,7 @@ permission = relationship('Permission') -class UserRepoGroupToPerm(Base, BaseDbModel): +class UserRepoGroupToPerm(meta.Base, BaseDbModel): __tablename__ = 'user_repo_group_to_perm' __table_args__ = ( UniqueConstraint('user_id', 'group_id', 'permission_id'), @@ -1853,11 +1835,11 @@ n.user = user n.group = repository_group n.permission = permission - Session().add(n) + meta.Session().add(n) return n -class UserGroupRepoGroupToPerm(Base, BaseDbModel): +class UserGroupRepoGroupToPerm(meta.Base, BaseDbModel): __tablename__ = 'users_group_repo_group_to_perm' __table_args__ = ( UniqueConstraint('users_group_id', 'group_id'), @@ -1879,11 +1861,11 @@ n.users_group = user_group n.group = repository_group n.permission = permission - Session().add(n) + meta.Session().add(n) return n -class Statistics(Base, BaseDbModel): +class Statistics(meta.Base, BaseDbModel): __tablename__ = 'statistics' __table_args__ = ( _table_args_default_dict, @@ -1899,7 +1881,7 @@ repository = relationship('Repository', single_parent=True) -class UserFollowing(Base, BaseDbModel): +class UserFollowing(meta.Base, BaseDbModel): __tablename__ = 'user_followings' __table_args__ = ( UniqueConstraint('user_id', 'follows_repository_id', name='uq_user_followings_user_repo'), @@ -1923,7 +1905,7 @@ return cls.query().filter(cls.follows_repository_id == repo_id) -class ChangesetComment(Base, BaseDbModel): +class ChangesetComment(meta.Base, BaseDbModel): __tablename__ = 'changeset_comments' __table_args__ = ( Index('cc_revision_idx', 'revision'), @@ -1970,7 +1952,7 @@ return self.created_on > datetime.datetime.now() - datetime.timedelta(minutes=5) -class ChangesetStatus(Base, BaseDbModel): +class ChangesetStatus(meta.Base, BaseDbModel): __tablename__ = 'changeset_statuses' __table_args__ = ( Index('cs_revision_idx', 'revision'), @@ -2033,7 +2015,7 @@ ) -class PullRequest(Base, BaseDbModel): +class PullRequest(meta.Base, BaseDbModel): __tablename__ = 'pull_requests' __table_args__ = ( Index('pr_org_repo_id_idx', 'org_repo_id'), @@ -2155,6 +2137,8 @@ status=self.status, comments=self.comments, statuses=self.statuses, + created_on=self.created_on.replace(microsecond=0), + updated_on=self.updated_on.replace(microsecond=0), ) def url(self, **kwargs): @@ -2173,10 +2157,11 @@ pull_request_id=self.pull_request_id, **kwargs) -class PullRequestReviewer(Base, BaseDbModel): +class PullRequestReviewer(meta.Base, BaseDbModel): __tablename__ = 'pull_request_reviewers' __table_args__ = ( Index('pull_request_reviewers_user_id_idx', 'user_id'), + UniqueConstraint('pull_request_id', 'user_id'), _table_args_default_dict, ) @@ -2204,7 +2189,7 @@ __tablename__ = 'user_to_notification' -class Gist(Base, BaseDbModel): +class Gist(meta.Base, BaseDbModel): __tablename__ = 'gists' __table_args__ = ( Index('g_gist_access_id_idx', 'gist_access_id'), @@ -2291,7 +2276,7 @@ return get_repo(os.path.join(gist_base_path, self.gist_access_id)) -class UserSshKeys(Base, BaseDbModel): +class UserSshKeys(meta.Base, BaseDbModel): __tablename__ = 'user_ssh_keys' __table_args__ = ( Index('usk_fingerprint_idx', 'fingerprint'), @@ -2315,8 +2300,12 @@ @public_key.setter def public_key(self, full_key): - # the full public key is too long to be suitable as database key - instead, - # use fingerprints similar to 'ssh-keygen -E sha256 -lf ~/.ssh/id_rsa.pub' + """The full public key is too long to be suitable as database key. + Instead, as a side-effect of setting the public key string, compute the + fingerprints according to https://tools.ietf.org/html/rfc4716#section-4 + BUT using sha256 instead of md5, similar to 'ssh-keygen -E sha256 -lf + ~/.ssh/id_rsa.pub' . + """ + keytype, key_bytes, comment = ssh.parse_pub_key(full_key) self._public_key = full_key - enc_key = safe_bytes(full_key.split(" ")[1]) - self.fingerprint = base64.b64encode(hashlib.sha256(base64.b64decode(enc_key)).digest()).replace(b'\n', b'').rstrip(b'=').decode() + self.fingerprint = base64.b64encode(hashlib.sha256(key_bytes).digest()).replace(b'\n', b'').rstrip(b'=').decode() diff -r c387989f868f -r 3669e58f3002 kallithea/model/forms.py --- a/kallithea/model/forms.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/forms.py Fri Oct 30 23:44:18 2020 +0100 @@ -39,7 +39,7 @@ from formencode import All from tg.i18n import ugettext as _ -from kallithea import BACKENDS +import kallithea from kallithea.model import validators as v @@ -238,7 +238,7 @@ return _PasswordResetConfirmationForm -def RepoForm(edit=False, old_data=None, supported_backends=BACKENDS, +def RepoForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS, repo_groups=None, landing_revs=None): old_data = old_data or {} repo_groups = repo_groups or [] @@ -315,7 +315,7 @@ return _RepoFieldForm -def RepoForkForm(edit=False, old_data=None, supported_backends=BACKENDS, +def RepoForkForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS, repo_groups=None, landing_revs=None): old_data = old_data or {} repo_groups = repo_groups or [] @@ -388,7 +388,6 @@ hooks_changegroup_repo_size = v.StringBoolean(if_missing=False) extensions_largefiles = v.StringBoolean(if_missing=False) - extensions_hgsubversion = v.StringBoolean(if_missing=False) extensions_hggit = v.StringBoolean(if_missing=False) return _ApplicationUiSettingsForm @@ -396,7 +395,6 @@ def DefaultPermissionsForm(repo_perms_choices, group_perms_choices, user_group_perms_choices, create_choices, - create_on_write_choices, repo_group_create_choices, user_group_create_choices, fork_choices, register_choices, extern_activate_choices): class _DefaultPermissionsForm(formencode.Schema): @@ -411,9 +409,7 @@ default_user_group_perm = v.OneOf(user_group_perms_choices) default_repo_create = v.OneOf(create_choices) - create_on_write = v.OneOf(create_on_write_choices) default_user_group_create = v.OneOf(user_group_create_choices) - #default_repo_group_create = v.OneOf(repo_group_create_choices) #not impl. yet default_fork = v.OneOf(fork_choices) default_register = v.OneOf(register_choices) @@ -435,7 +431,7 @@ return _CustomDefaultPermissionsForm -def DefaultsForm(edit=False, old_data=None, supported_backends=BACKENDS): +def DefaultsForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS): class _DefaultsForm(formencode.Schema): allow_extra_fields = True filter_extra_fields = True diff -r c387989f868f -r 3669e58f3002 kallithea/model/gist.py --- a/kallithea/model/gist.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/gist.py Fri Oct 30 23:44:18 2020 +0100 @@ -34,7 +34,7 @@ from kallithea.lib import ext_json from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, time_to_datetime -from kallithea.model.db import Gist, Session, User +from kallithea.model import db, meta from kallithea.model.repo import RepoModel from kallithea.model.scm import ScmModel @@ -85,7 +85,7 @@ f.write(ascii_bytes(ext_json.dumps(metadata))) def get_gist(self, gist): - return Gist.guess_instance(gist) + return db.Gist.guess_instance(gist) def get_gist_files(self, gist_access_id, revision=None): """ @@ -93,12 +93,12 @@ :param gist_access_id: """ - repo = Gist.get_by_access_id(gist_access_id) + repo = db.Gist.get_by_access_id(gist_access_id) cs = repo.scm_instance.get_changeset(revision) return cs, [n for n in cs.get_node('/')] def create(self, description, owner, ip_addr, gist_mapping, - gist_type=Gist.GIST_PUBLIC, lifetime=-1): + gist_type=db.Gist.GIST_PUBLIC, lifetime=-1): """ :param description: description of the gist @@ -107,7 +107,7 @@ :param gist_type: type of gist private/public :param lifetime: in minutes, -1 == forever """ - owner = User.guess_instance(owner) + owner = db.User.guess_instance(owner) gist_access_id = make_gist_access_id() lifetime = safe_int(lifetime, -1) gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1 @@ -115,15 +115,15 @@ time_to_datetime(gist_expires) if gist_expires != -1 else 'forever') # create the Database version - gist = Gist() + gist = db.Gist() gist.gist_description = description gist.gist_access_id = gist_access_id gist.owner_id = owner.user_id gist.gist_expires = gist_expires gist.gist_type = gist_type - Session().add(gist) - Session().flush() # make database assign gist.gist_id - if gist_type == Gist.GIST_PUBLIC: + meta.Session().add(gist) + meta.Session().flush() # make database assign gist.gist_id + if gist_type == db.Gist.GIST_PUBLIC: # use DB ID for easy to use GIST ID gist.gist_access_id = str(gist.gist_id) @@ -170,9 +170,9 @@ return gist def delete(self, gist, fs_remove=True): - gist = Gist.guess_instance(gist) + gist = db.Gist.guess_instance(gist) try: - Session().delete(gist) + meta.Session().delete(gist) if fs_remove: self.__delete_gist(gist) else: @@ -183,7 +183,7 @@ def update(self, gist, description, owner, ip_addr, gist_mapping, gist_type, lifetime): - gist = Gist.guess_instance(gist) + gist = db.Gist.guess_instance(gist) gist_repo = gist.scm_instance lifetime = safe_int(lifetime, -1) diff -r c387989f868f -r 3669e58f3002 kallithea/model/notification.py --- a/kallithea/model/notification.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/notification.py Fri Oct 30 23:44:18 2020 +0100 @@ -34,7 +34,7 @@ from tg.i18n import ugettext as _ from kallithea.lib import helpers as h -from kallithea.model.db import User +from kallithea.model import db log = logging.getLogger(__name__) @@ -71,12 +71,12 @@ if recipients and not getattr(recipients, '__iter__', False): raise Exception('recipients must be a list or iterable') - created_by_obj = User.guess_instance(created_by) + created_by_obj = db.User.guess_instance(created_by) recipients_objs = set() if recipients: for u in recipients: - obj = User.guess_instance(u) + obj = db.User.guess_instance(u) if obj is not None: recipients_objs.add(obj) else: @@ -87,7 +87,7 @@ ) elif recipients is None: # empty recipients means to all admins - recipients_objs = User.query().filter(User.admin == True).all() + recipients_objs = db.User.query().filter(db.User.admin == True).all() log.debug('sending notifications %s to admins: %s', type_, recipients_objs ) @@ -126,12 +126,18 @@ email_html_body = EmailNotificationModel() \ .get_email_tmpl(type_, 'html', **html_kwargs) - # don't send email to person who created this comment - rec_objs = set(recipients_objs).difference(set([created_by_obj])) + # don't send email to the person who caused the notification, except for + # notifications about new pull requests where the author is explicitly + # added. + rec_mails = set(obj.email for obj in recipients_objs) + if type_ == NotificationModel.TYPE_PULL_REQUEST: + rec_mails.add(created_by_obj.email) + else: + rec_mails.discard(created_by_obj.email) - # send email with notification to all other participants - for rec in rec_objs: - tasks.send_email([rec.email], email_subject, email_txt_body, + # send email with notification to participants + for rec_mail in sorted(rec_mails): + tasks.send_email([rec_mail], email_subject, email_txt_body, email_html_body, headers, from_name=created_by_obj.full_name_or_username) @@ -159,7 +165,7 @@ self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment', } self._subj_map = { - self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s'), + self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s by %(cs_author_username)s'), self.TYPE_MESSAGE: 'Test Message', # self.TYPE_PASSWORD_RESET self.TYPE_REGISTRATION: _('New user %(new_username)s registered'), @@ -198,7 +204,7 @@ return generated template for email based on given type """ - base = 'email_templates/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type + base = 'email/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type email_template = self._tmpl_lookup.get_template(base) # translator and helpers inject _kwargs = {'_': _, diff -r c387989f868f -r 3669e58f3002 kallithea/model/permission.py --- a/kallithea/model/permission.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/permission.py Fri Oct 30 23:44:18 2020 +0100 @@ -31,8 +31,8 @@ from sqlalchemy.exc import DatabaseError -from kallithea.lib.utils2 import str2bool -from kallithea.model.db import Permission, Session, User, UserRepoGroupToPerm, UserRepoToPerm, UserToPerm, UserUserGroupToPerm +from kallithea.lib.utils2 import asbool +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -47,11 +47,11 @@ """ Create permissions for whole system """ - for p in Permission.PERMS: - if not Permission.get_by_key(p[0]): - new_perm = Permission() + for p in db.Permission.PERMS: + if not db.Permission.get_by_key(p[0]): + new_perm = db.Permission() new_perm.permission_name = p[0] - Session().add(new_perm) + meta.Session().add(new_perm) def create_default_permissions(self, user, force=False): """ @@ -61,80 +61,78 @@ :param user: """ - user = User.guess_instance(user) + user = db.User.guess_instance(user) def _make_perm(perm): - new_perm = UserToPerm() + new_perm = db.UserToPerm() new_perm.user = user - new_perm.permission = Permission.get_by_key(perm) + new_perm.permission = db.Permission.get_by_key(perm) return new_perm def _get_group(perm_name): return '.'.join(perm_name.split('.')[:1]) - perms = UserToPerm.query().filter(UserToPerm.user == user).all() + perms = db.UserToPerm.query().filter(db.UserToPerm.user == user).all() defined_perms_groups = set(_get_group(x.permission.permission_name) for x in perms) log.debug('GOT ALREADY DEFINED:%s', perms) if force: for perm in perms: - Session().delete(perm) - Session().commit() + meta.Session().delete(perm) + meta.Session().commit() defined_perms_groups = [] # For every default permission that needs to be created, we check if # its group is already defined. If it's not, we create default permission. - for perm_name in Permission.DEFAULT_USER_PERMISSIONS: + for perm_name in db.Permission.DEFAULT_USER_PERMISSIONS: gr = _get_group(perm_name) if gr not in defined_perms_groups: log.debug('GR:%s not found, creating permission %s', gr, perm_name) new_perm = _make_perm(perm_name) - Session().add(new_perm) + meta.Session().add(new_perm) def update(self, form_result): - perm_user = User.get_by_username(username=form_result['perm_user_name']) + perm_user = db.User.get_by_username(username=form_result['perm_user_name']) try: # stage 1 set anonymous access if perm_user.is_default_user: - perm_user.active = str2bool(form_result['anonymous']) + perm_user.active = asbool(form_result['anonymous']) # stage 2 reset defaults and set them from form data def _make_new(usr, perm_name): log.debug('Creating new permission:%s', perm_name) - new = UserToPerm() + new = db.UserToPerm() new.user = usr - new.permission = Permission.get_by_key(perm_name) + new.permission = db.Permission.get_by_key(perm_name) return new # clear current entries, to make this function idempotent # it will fix even if we define more permissions or permissions # are somehow missing - u2p = UserToPerm.query() \ - .filter(UserToPerm.user == perm_user) \ + u2p = db.UserToPerm.query() \ + .filter(db.UserToPerm.user == perm_user) \ .all() for p in u2p: - Session().delete(p) + meta.Session().delete(p) # create fresh set of permissions for def_perm_key in ['default_repo_perm', 'default_group_perm', 'default_user_group_perm', 'default_repo_create', - 'create_on_write', # special case for create repos on write access to group - #'default_repo_group_create', # not implemented yet 'default_user_group_create', 'default_fork', 'default_register', 'default_extern_activate']: p = _make_new(perm_user, form_result[def_perm_key]) - Session().add(p) + meta.Session().add(p) # stage 3 update all default permissions for repos if checked if form_result['overwrite_default_repo']: _def_name = form_result['default_repo_perm'].split('repository.')[-1] - _def = Permission.get_by_key('repository.' + _def_name) + _def = db.Permission.get_by_key('repository.' + _def_name) # repos - for r2p in UserRepoToPerm.query() \ - .filter(UserRepoToPerm.user == perm_user) \ + for r2p in db.UserRepoToPerm.query() \ + .filter(db.UserRepoToPerm.user == perm_user) \ .all(): # don't reset PRIVATE repositories @@ -144,23 +142,23 @@ if form_result['overwrite_default_group']: _def_name = form_result['default_group_perm'].split('group.')[-1] # groups - _def = Permission.get_by_key('group.' + _def_name) - for g2p in UserRepoGroupToPerm.query() \ - .filter(UserRepoGroupToPerm.user == perm_user) \ + _def = db.Permission.get_by_key('group.' + _def_name) + for g2p in db.UserRepoGroupToPerm.query() \ + .filter(db.UserRepoGroupToPerm.user == perm_user) \ .all(): g2p.permission = _def if form_result['overwrite_default_user_group']: _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1] # groups - _def = Permission.get_by_key('usergroup.' + _def_name) - for g2p in UserUserGroupToPerm.query() \ - .filter(UserUserGroupToPerm.user == perm_user) \ + _def = db.Permission.get_by_key('usergroup.' + _def_name) + for g2p in db.UserUserGroupToPerm.query() \ + .filter(db.UserUserGroupToPerm.user == perm_user) \ .all(): g2p.permission = _def - Session().commit() + meta.Session().commit() except (DatabaseError,): log.error(traceback.format_exc()) - Session().rollback() + meta.Session().rollback() raise diff -r c387989f868f -r 3669e58f3002 kallithea/model/pull_request.py --- a/kallithea/model/pull_request.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/pull_request.py Fri Oct 30 23:44:18 2020 +0100 @@ -33,9 +33,10 @@ from tg.i18n import ugettext as _ from kallithea.lib import helpers as h -from kallithea.lib.utils2 import ascii_bytes, extract_mentioned_users -from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, User -from kallithea.model.meta import Session +from kallithea.lib.hooks import log_create_pullrequest +from kallithea.lib.utils import extract_mentioned_users +from kallithea.lib.utils2 import ascii_bytes +from kallithea.model import db, meta from kallithea.model.notification import NotificationModel @@ -58,10 +59,21 @@ mention_recipients = set(mention_recipients) - reviewers _assert_valid_reviewers(mention_recipients) - # members + redundant_reviewers = set(db.User.query() \ + .join(db.PullRequestReviewer) \ + .filter(db.PullRequestReviewer.pull_request == pr) \ + .filter(db.PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) + .all()) + + if redundant_reviewers: + log.debug('Following reviewers were already part of pull request %s: %s', pr.pull_request_id, redundant_reviewers) + + reviewers -= redundant_reviewers + + log.debug('Adding reviewers to pull request %s: %s', pr.pull_request_id, reviewers) for reviewer in reviewers: - prr = PullRequestReviewer(reviewer, pr) - Session().add(prr) + prr = db.PullRequestReviewer(reviewer, pr) + meta.Session().add(prr) # notification to reviewers pr_url = pr.url(canonical=True) @@ -116,6 +128,8 @@ type_=NotificationModel.TYPE_PULL_REQUEST, email_kwargs=email_kwargs) + return reviewers, redundant_reviewers + def mention_from_description(self, user, pr, old_description=''): mention_recipients = (extract_mentioned_users(pr.description) - extract_mentioned_users(old_description)) @@ -128,14 +142,14 @@ if not reviewers: return # avoid SQLAlchemy warning about empty sequence for IN-predicate - PullRequestReviewer.query() \ + db.PullRequestReviewer.query() \ .filter_by(pull_request=pull_request) \ - .filter(PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) \ + .filter(db.PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) \ .delete(synchronize_session='fetch') # the default of 'evaluate' is not available def delete(self, pull_request): - pull_request = PullRequest.guess_instance(pull_request) - Session().delete(pull_request) + pull_request = db.PullRequest.guess_instance(pull_request) + meta.Session().delete(pull_request) if pull_request.org_repo.scm_instance.alias == 'git': # remove a ref under refs/pull/ so that commits can be garbage-collected try: @@ -144,8 +158,8 @@ pass def close_pull_request(self, pull_request): - pull_request = PullRequest.guess_instance(pull_request) - pull_request.status = PullRequest.STATUS_CLOSED + pull_request = db.PullRequest.guess_instance(pull_request) + pull_request.status = db.PullRequest.STATUS_CLOSED pull_request.updated_on = datetime.datetime.now() @@ -177,7 +191,6 @@ return False def __init__(self, org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers): - from kallithea.controllers.compare import CompareController reviewers = set(reviewers) _assert_valid_reviewers(reviewers) @@ -199,10 +212,7 @@ other_display = h.short_ref(other_ref_type, other_ref_name) cs_ranges, _cs_ranges_not, ancestor_revs = \ - CompareController._get_changesets(org_repo.scm_instance.alias, - other_repo.scm_instance, other_rev, # org and other "swapped" - org_repo.scm_instance, org_rev, - ) + org_repo.scm_instance.get_diff_changesets(other_rev, org_repo.scm_instance, org_rev) # org and other "swapped" if not cs_ranges: raise self.Empty(_('Cannot create empty pull request')) @@ -243,9 +253,9 @@ raise self.Unauthorized(_('You are not authorized to create the pull request')) def execute(self): - created_by = User.get(request.authuser.user_id) + created_by = db.User.get(request.authuser.user_id) - pr = PullRequest() + pr = db.PullRequest() pr.org_repo = self.org_repo pr.org_ref = self.org_ref pr.other_repo = self.other_repo @@ -254,8 +264,8 @@ pr.title = self.title pr.description = self.description pr.owner = self.owner - Session().add(pr) - Session().flush() # make database assign pull_request_id + meta.Session().add(pr) + meta.Session().flush() # make database assign pull_request_id if self.org_repo.scm_instance.alias == 'git': # create a ref under refs/pull/ so that commits don't get garbage-collected @@ -270,11 +280,11 @@ author=created_by, pull_request=pr, send_email=False, - status_change=ChangesetStatus.STATUS_UNDER_REVIEW, + status_change=db.ChangesetStatus.STATUS_UNDER_REVIEW, ) ChangesetStatusModel().set_status( self.org_repo, - ChangesetStatus.STATUS_UNDER_REVIEW, + db.ChangesetStatus.STATUS_UNDER_REVIEW, created_by, comment, pull_request=pr, @@ -283,6 +293,8 @@ mention_recipients = extract_mentioned_users(self.description) PullRequestModel().add_reviewers(created_by, pr, self.reviewers, mention_recipients) + log_create_pullrequest(pr.get_dict(), created_by) + return pr diff -r c387989f868f -r 3669e58f3002 kallithea/model/repo.py --- a/kallithea/model/repo.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/repo.py Fri Oct 30 23:44:18 2020 +0100 @@ -40,8 +40,7 @@ from kallithea.lib.utils import is_valid_repo_uri, make_ui from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix from kallithea.lib.vcs.backends import get_backend -from kallithea.model.db import (URL_SEP, Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm, - UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm) +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -49,12 +48,10 @@ class RepoModel(object): - URL_SEPARATOR = URL_SEP - def _create_default_perms(self, repository, private): # create default permission default = 'repository.read' - def_user = User.get_default_user() + def_user = db.User.get_default_user() for p in def_user.user_perms: if p.permission.permission_name.startswith('repository.'): default = p.permission.permission_name @@ -62,12 +59,12 @@ default_perm = 'repository.none' if private else default - repo_to_perm = UserRepoToPerm() - repo_to_perm.permission = Permission.get_by_key(default_perm) + repo_to_perm = db.UserRepoToPerm() + repo_to_perm.permission = db.Permission.get_by_key(default_perm) repo_to_perm.repository = repository repo_to_perm.user_id = def_user.user_id - Session().add(repo_to_perm) + meta.Session().add(repo_to_perm) return repo_to_perm @@ -77,20 +74,20 @@ Gets the repositories root path from database """ - q = Ui.query().filter(Ui.ui_key == '/').one() + q = db.Ui.query().filter(db.Ui.ui_key == '/').one() return q.ui_value def get(self, repo_id): - repo = Repository.query() \ - .filter(Repository.repo_id == repo_id) + repo = db.Repository.query() \ + .filter(db.Repository.repo_id == repo_id) return repo.scalar() def get_repo(self, repository): - return Repository.guess_instance(repository) + return db.Repository.guess_instance(repository) def get_by_repo_name(self, repo_name): - repo = Repository.query() \ - .filter(Repository.repo_name == repo_name) + repo = db.Repository.query() \ + .filter(db.Repository.repo_name == repo_name) return repo.scalar() def get_all_user_repos(self, user): @@ -100,16 +97,17 @@ :param user: """ from kallithea.lib.auth import AuthUser - auth_user = AuthUser(dbuser=User.guess_instance(user)) + auth_user = AuthUser(dbuser=db.User.guess_instance(user)) repos = [repo_name - for repo_name, perm in auth_user.permissions['repositories'].items() + for repo_name, perm in auth_user.repository_permissions.items() if perm in ['repository.read', 'repository.write', 'repository.admin'] ] - return Repository.query().filter(Repository.repo_name.in_(repos)) + return db.Repository.query().filter(db.Repository.repo_name.in_(repos)) @classmethod def _render_datatable(cls, tmpl, *args, **kwargs): - from tg import tmpl_context as c, request, app_globals + from tg import app_globals, request + from tg import tmpl_context as c from tg.i18n import ugettext as _ _tmpl_lookup = app_globals.mako_lookup @@ -128,7 +126,9 @@ admin: return data for action column. """ _render = self._render_datatable - from tg import tmpl_context as c, request + from tg import request + from tg import tmpl_context as c + from kallithea.model.scm import ScmModel def repo_lnk(name, rtype, rstate, private, fork_of): @@ -218,7 +218,7 @@ :param repo_name: """ - repo_info = Repository.get_by_repo_name(repo_name) + repo_info = db.Repository.get_by_repo_name(repo_name) if repo_info is None: return None @@ -246,7 +246,7 @@ if repo_info.owner: defaults.update({'owner': repo_info.owner.username}) else: - replacement_user = User.query().filter(User.admin == + replacement_user = db.User.query().filter(db.User.admin == True).first().username defaults.update({'owner': replacement_user}) @@ -264,14 +264,14 @@ def update(self, repo, **kwargs): try: - cur_repo = Repository.guess_instance(repo) + cur_repo = db.Repository.guess_instance(repo) org_repo_name = cur_repo.repo_name if 'owner' in kwargs: - cur_repo.owner = User.get_by_username(kwargs['owner']) + cur_repo.owner = db.User.get_by_username(kwargs['owner']) if 'repo_group' in kwargs: assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None - cur_repo.group = RepoGroup.get(kwargs['repo_group']) + cur_repo.group = db.RepoGroup.get(kwargs['repo_group']) cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name) log.debug('Updating repo %s with params:%s', cur_repo, kwargs) for k in ['repo_enable_downloads', @@ -303,9 +303,9 @@ repo=cur_repo, user='default', perm=EMPTY_PERM ) # handle extra fields - for field in [k for k in kwargs if k.startswith(RepositoryField.PREFIX)]: - k = RepositoryField.un_prefix_key(field) - ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo) + for field in [k for k in kwargs if k.startswith(db.RepositoryField.PREFIX)]: + k = db.RepositoryField.un_prefix_key(field) + ex_field = db.RepositoryField.get_by_key_name(key=k, repo=cur_repo) if ex_field: ex_field.field_value = kwargs[field] @@ -323,7 +323,7 @@ landing_rev='rev:tip', fork_of=None, copy_fork_permissions=False, enable_statistics=False, enable_downloads=False, - copy_group_permissions=False, state=Repository.STATE_PENDING): + copy_group_permissions=False, state=db.Repository.STATE_PENDING): """ Create repository inside database with PENDING state. This should only be executed by create() repo, with exception of importing existing repos. @@ -331,9 +331,9 @@ """ from kallithea.model.scm import ScmModel - owner = User.guess_instance(owner) - fork_of = Repository.guess_instance(fork_of) - repo_group = RepoGroup.guess_instance(repo_group) + owner = db.User.guess_instance(owner) + fork_of = db.Repository.guess_instance(fork_of) + repo_group = db.RepoGroup.guess_instance(repo_group) try: repo_name = repo_name description = description @@ -341,11 +341,11 @@ # while repo_name_full is a full qualified name that is combined # with name and path of group repo_name_full = repo_name - repo_name = repo_name.split(URL_SEP)[-1] + repo_name = repo_name.split(kallithea.URL_SEP)[-1] if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: raise Exception('invalid repo name %s' % repo_name) - new_repo = Repository() + new_repo = db.Repository() new_repo.repo_state = state new_repo.enable_statistics = False new_repo.repo_name = repo_name_full @@ -367,39 +367,39 @@ parent_repo = fork_of new_repo.fork = parent_repo - Session().add(new_repo) + meta.Session().add(new_repo) if fork_of and copy_fork_permissions: repo = fork_of - user_perms = UserRepoToPerm.query() \ - .filter(UserRepoToPerm.repository == repo).all() - group_perms = UserGroupRepoToPerm.query() \ - .filter(UserGroupRepoToPerm.repository == repo).all() + user_perms = db.UserRepoToPerm.query() \ + .filter(db.UserRepoToPerm.repository == repo).all() + group_perms = db.UserGroupRepoToPerm.query() \ + .filter(db.UserGroupRepoToPerm.repository == repo).all() for perm in user_perms: - UserRepoToPerm.create(perm.user, new_repo, perm.permission) + db.UserRepoToPerm.create(perm.user, new_repo, perm.permission) for perm in group_perms: - UserGroupRepoToPerm.create(perm.users_group, new_repo, + db.UserGroupRepoToPerm.create(perm.users_group, new_repo, perm.permission) elif repo_group and copy_group_permissions: - user_perms = UserRepoGroupToPerm.query() \ - .filter(UserRepoGroupToPerm.group == repo_group).all() + user_perms = db.UserRepoGroupToPerm.query() \ + .filter(db.UserRepoGroupToPerm.group == repo_group).all() - group_perms = UserGroupRepoGroupToPerm.query() \ - .filter(UserGroupRepoGroupToPerm.group == repo_group).all() + group_perms = db.UserGroupRepoGroupToPerm.query() \ + .filter(db.UserGroupRepoGroupToPerm.group == repo_group).all() for perm in user_perms: perm_name = perm.permission.permission_name.replace('group.', 'repository.') - perm_obj = Permission.get_by_key(perm_name) - UserRepoToPerm.create(perm.user, new_repo, perm_obj) + perm_obj = db.Permission.get_by_key(perm_name) + db.UserRepoToPerm.create(perm.user, new_repo, perm_obj) for perm in group_perms: perm_name = perm.permission.permission_name.replace('group.', 'repository.') - perm_obj = Permission.get_by_key(perm_name) - UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) + perm_obj = db.Permission.get_by_key(perm_name) + db.UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) else: self._create_default_perms(new_repo, private) @@ -408,7 +408,7 @@ ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id) # we need to flush here, in order to check if database won't # throw any exceptions, create filesystem dirs at the very end - Session().flush() + meta.Session().flush() return new_repo except Exception: log.error(traceback.format_exc()) @@ -479,7 +479,7 @@ """ if not cur_user: cur_user = getattr(get_current_authuser(), 'username', None) - repo = Repository.guess_instance(repo) + repo = db.Repository.guess_instance(repo) if repo is not None: if forks == 'detach': for r in repo.forks: @@ -492,7 +492,7 @@ old_repo_dict = repo.get_dict() try: - Session().delete(repo) + meta.Session().delete(repo) if fs_remove: self._delete_filesystem_repo(repo) else: @@ -512,19 +512,19 @@ :param user: Instance of User, user_id or username :param perm: Instance of Permission, or permission_name """ - user = User.guess_instance(user) - repo = Repository.guess_instance(repo) - permission = Permission.guess_instance(perm) + user = db.User.guess_instance(user) + repo = db.Repository.guess_instance(repo) + permission = db.Permission.guess_instance(perm) # check if we have that permission already - obj = UserRepoToPerm.query() \ - .filter(UserRepoToPerm.user == user) \ - .filter(UserRepoToPerm.repository == repo) \ + obj = db.UserRepoToPerm.query() \ + .filter(db.UserRepoToPerm.user == user) \ + .filter(db.UserRepoToPerm.repository == repo) \ .scalar() if obj is None: # create new ! - obj = UserRepoToPerm() - Session().add(obj) + obj = db.UserRepoToPerm() + meta.Session().add(obj) obj.repository = repo obj.user = user obj.permission = permission @@ -539,15 +539,15 @@ :param user: Instance of User, user_id or username """ - user = User.guess_instance(user) - repo = Repository.guess_instance(repo) + user = db.User.guess_instance(user) + repo = db.Repository.guess_instance(repo) - obj = UserRepoToPerm.query() \ - .filter(UserRepoToPerm.repository == repo) \ - .filter(UserRepoToPerm.user == user) \ + obj = db.UserRepoToPerm.query() \ + .filter(db.UserRepoToPerm.repository == repo) \ + .filter(db.UserRepoToPerm.user == user) \ .scalar() if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) log.debug('Revoked perm on %s on %s', repo, user) def grant_user_group_permission(self, repo, group_name, perm): @@ -560,20 +560,20 @@ or user group name :param perm: Instance of Permission, or permission_name """ - repo = Repository.guess_instance(repo) - group_name = UserGroup.guess_instance(group_name) - permission = Permission.guess_instance(perm) + repo = db.Repository.guess_instance(repo) + group_name = db.UserGroup.guess_instance(group_name) + permission = db.Permission.guess_instance(perm) # check if we have that permission already - obj = UserGroupRepoToPerm.query() \ - .filter(UserGroupRepoToPerm.users_group == group_name) \ - .filter(UserGroupRepoToPerm.repository == repo) \ + obj = db.UserGroupRepoToPerm.query() \ + .filter(db.UserGroupRepoToPerm.users_group == group_name) \ + .filter(db.UserGroupRepoToPerm.repository == repo) \ .scalar() if obj is None: # create new - obj = UserGroupRepoToPerm() - Session().add(obj) + obj = db.UserGroupRepoToPerm() + meta.Session().add(obj) obj.repository = repo obj.users_group = group_name @@ -589,15 +589,15 @@ :param group_name: Instance of UserGroup, users_group_id, or user group name """ - repo = Repository.guess_instance(repo) - group_name = UserGroup.guess_instance(group_name) + repo = db.Repository.guess_instance(repo) + group_name = db.UserGroup.guess_instance(group_name) - obj = UserGroupRepoToPerm.query() \ - .filter(UserGroupRepoToPerm.repository == repo) \ - .filter(UserGroupRepoToPerm.users_group == group_name) \ + obj = db.UserGroupRepoToPerm.query() \ + .filter(db.UserGroupRepoToPerm.repository == repo) \ + .filter(db.UserGroupRepoToPerm.users_group == group_name) \ .scalar() if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) log.debug('Revoked perm to %s on %s', repo, group_name) def delete_stats(self, repo_name): @@ -606,12 +606,12 @@ :param repo_name: """ - repo = Repository.guess_instance(repo_name) + repo = db.Repository.guess_instance(repo_name) try: - obj = Statistics.query() \ - .filter(Statistics.repository == repo).scalar() + obj = db.Statistics.query() \ + .filter(db.Statistics.repository == repo).scalar() if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) except Exception: log.error(traceback.format_exc()) raise @@ -630,7 +630,7 @@ if '/' in repo_name: raise ValueError('repo_name must not contain groups got `%s`' % repo_name) - if isinstance(repo_group, RepoGroup): + if isinstance(repo_group, db.RepoGroup): new_parent_path = os.sep.join(repo_group.full_path_splitted) else: new_parent_path = repo_group or '' @@ -666,7 +666,7 @@ elif repo_type == 'git': repo = backend(repo_path, create=True, src_url=clone_uri, bare=True) # add kallithea hook into this repo - ScmModel().install_git_hooks(repo=repo) + ScmModel().install_git_hooks(repo) else: raise Exception('Not supported repo_type %s expected hg/git' % repo_type) diff -r c387989f868f -r 3669e58f3002 kallithea/model/repo_group.py --- a/kallithea/model/repo_group.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/repo_group.py Fri Oct 30 23:44:18 2020 +0100 @@ -34,8 +34,7 @@ import kallithea.lib.utils2 from kallithea.lib.utils2 import LazyProperty -from kallithea.model import db -from kallithea.model.db import Permission, RepoGroup, Repository, Session, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserRepoGroupToPerm +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -49,24 +48,24 @@ Gets the repositories root path from database """ - q = Ui.get_by_key('paths', '/') + q = db.Ui.get_by_key('paths', '/') return q.ui_value def _create_default_perms(self, new_group): # create default permission default_perm = 'group.read' - def_user = User.get_default_user() + def_user = db.User.get_default_user() for p in def_user.user_perms: if p.permission.permission_name.startswith('group.'): default_perm = p.permission.permission_name break - repo_group_to_perm = UserRepoGroupToPerm() - repo_group_to_perm.permission = Permission.get_by_key(default_perm) + repo_group_to_perm = db.UserRepoGroupToPerm() + repo_group_to_perm.permission = db.Permission.get_by_key(default_perm) repo_group_to_perm.group = new_group repo_group_to_perm.user_id = def_user.user_id - Session().add(repo_group_to_perm) + meta.Session().add(repo_group_to_perm) return repo_group_to_perm def _create_group(self, group_name): @@ -116,7 +115,7 @@ :param group: instance of group from database :param force_delete: use shutil rmtree to remove all objects """ - paths = group.full_path.split(db.URL_SEP) + paths = group.full_path.split(kallithea.URL_SEP) paths = os.sep.join(paths) rm_path = os.path.join(self.repos_path, paths) @@ -139,15 +138,15 @@ if kallithea.lib.utils2.repo_name_slug(group_name) != group_name: raise Exception('invalid repo group name %s' % group_name) - owner = User.guess_instance(owner) - parent_group = RepoGroup.guess_instance(parent) - new_repo_group = RepoGroup() + owner = db.User.guess_instance(owner) + parent_group = db.RepoGroup.guess_instance(parent) + new_repo_group = db.RepoGroup() new_repo_group.owner = owner new_repo_group.group_description = group_description or group_name new_repo_group.parent_group = parent_group new_repo_group.group_name = new_repo_group.get_new_name(group_name) - Session().add(new_repo_group) + meta.Session().add(new_repo_group) # create an ADMIN permission for owner except if we're super admin, # later owner should go into the owner field of groups @@ -157,28 +156,28 @@ if parent_group and copy_permissions: # copy permissions from parent - user_perms = UserRepoGroupToPerm.query() \ - .filter(UserRepoGroupToPerm.group == parent_group).all() + user_perms = db.UserRepoGroupToPerm.query() \ + .filter(db.UserRepoGroupToPerm.group == parent_group).all() - group_perms = UserGroupRepoGroupToPerm.query() \ - .filter(UserGroupRepoGroupToPerm.group == parent_group).all() + group_perms = db.UserGroupRepoGroupToPerm.query() \ + .filter(db.UserGroupRepoGroupToPerm.group == parent_group).all() for perm in user_perms: # don't copy over the permission for user who is creating # this group, if he is not super admin he get's admin # permission set above if perm.user != owner or owner.is_admin: - UserRepoGroupToPerm.create(perm.user, new_repo_group, perm.permission) + db.UserRepoGroupToPerm.create(perm.user, new_repo_group, perm.permission) for perm in group_perms: - UserGroupRepoGroupToPerm.create(perm.users_group, new_repo_group, perm.permission) + db.UserGroupRepoGroupToPerm.create(perm.users_group, new_repo_group, perm.permission) else: self._create_default_perms(new_repo_group) if not just_db: # we need to flush here, in order to check if database won't # throw any exceptions, create filesystem dirs at the very end - Session().flush() + meta.Session().flush() self._create_group(new_repo_group.group_name) return new_repo_group @@ -189,8 +188,8 @@ def _update_permissions(self, repo_group, perms_new=None, perms_updates=None, recursive=None, check_perms=True): + from kallithea.lib.auth import HasUserGroupPermissionLevel from kallithea.model.repo import RepoModel - from kallithea.lib.auth import HasUserGroupPermissionLevel if not perms_new: perms_new = [] @@ -198,10 +197,10 @@ perms_updates = [] def _set_perm_user(obj, user, perm): - if isinstance(obj, RepoGroup): + if isinstance(obj, db.RepoGroup): self.grant_user_permission(repo_group=obj, user=user, perm=perm) - elif isinstance(obj, Repository): - user = User.guess_instance(user) + elif isinstance(obj, db.Repository): + user = db.User.guess_instance(user) # private repos will not allow to change the default permissions # using recursive mode @@ -216,11 +215,11 @@ ) def _set_perm_group(obj, users_group, perm): - if isinstance(obj, RepoGroup): + if isinstance(obj, db.RepoGroup): self.grant_user_group_permission(repo_group=obj, group_name=users_group, perm=perm) - elif isinstance(obj, Repository): + elif isinstance(obj, db.Repository): # we set group permission but we have to switch to repo # permission perm = perm.replace('group.', 'repository.') @@ -240,11 +239,11 @@ pass elif recursive == 'repos': # skip groups, other than this one - if isinstance(obj, RepoGroup) and not obj == repo_group: + if isinstance(obj, db.RepoGroup) and not obj == repo_group: continue elif recursive == 'groups': # skip repos - if isinstance(obj, Repository): + if isinstance(obj, db.Repository): continue else: # recursive == 'none': # DEFAULT don't apply to iterated objects obj = repo_group @@ -279,7 +278,7 @@ def update(self, repo_group, repo_group_args): try: - repo_group = RepoGroup.guess_instance(repo_group) + repo_group = db.RepoGroup.guess_instance(repo_group) old_path = repo_group.full_path # change properties @@ -290,14 +289,14 @@ if 'parent_group_id' in repo_group_args: assert repo_group_args['parent_group_id'] != '-1', repo_group_args # RepoGroupForm should have converted to None - repo_group.parent_group = RepoGroup.get(repo_group_args['parent_group_id']) + repo_group.parent_group = db.RepoGroup.get(repo_group_args['parent_group_id']) if 'group_name' in repo_group_args: group_name = repo_group_args['group_name'] if kallithea.lib.utils2.repo_name_slug(group_name) != group_name: raise Exception('invalid repo group name %s' % group_name) repo_group.group_name = repo_group.get_new_name(group_name) new_path = repo_group.full_path - Session().add(repo_group) + meta.Session().add(repo_group) # iterate over all members of this groups and do fixes # if obj is a repoGroup also fix the name of the group according @@ -306,12 +305,12 @@ # this can be potentially heavy operation for obj in repo_group.recursive_groups_and_repos(): # set the value from it's parent - if isinstance(obj, RepoGroup): + if isinstance(obj, db.RepoGroup): new_name = obj.get_new_name(obj.name) log.debug('Fixing group %s to new name %s' % (obj.group_name, new_name)) obj.group_name = new_name - elif isinstance(obj, Repository): + elif isinstance(obj, db.Repository): # we need to get all repositories from this new group and # rename them accordingly to new group path new_name = obj.get_new_name(obj.just_name) @@ -327,9 +326,9 @@ raise def delete(self, repo_group, force_delete=False): - repo_group = RepoGroup.guess_instance(repo_group) + repo_group = db.RepoGroup.guess_instance(repo_group) try: - Session().delete(repo_group) + meta.Session().delete(repo_group) self._delete_group(repo_group, force_delete) except Exception: log.error('Error removing repo_group %s', repo_group) @@ -337,8 +336,8 @@ def add_permission(self, repo_group, obj, obj_type, perm, recursive): from kallithea.model.repo import RepoModel - repo_group = RepoGroup.guess_instance(repo_group) - perm = Permission.guess_instance(perm) + repo_group = db.RepoGroup.guess_instance(repo_group) + perm = db.Permission.guess_instance(perm) for el in repo_group.recursive_groups_and_repos(): # iterated obj is an instance of a repos group or repository in @@ -347,24 +346,24 @@ pass elif recursive == 'repos': # skip groups, other than this one - if isinstance(el, RepoGroup) and not el == repo_group: + if isinstance(el, db.RepoGroup) and not el == repo_group: continue elif recursive == 'groups': # skip repos - if isinstance(el, Repository): + if isinstance(el, db.Repository): continue else: # recursive == 'none': # DEFAULT don't apply to iterated objects el = repo_group # also we do a break at the end of this loop. - if isinstance(el, RepoGroup): + if isinstance(el, db.RepoGroup): if obj_type == 'user': RepoGroupModel().grant_user_permission(el, user=obj, perm=perm) elif obj_type == 'user_group': RepoGroupModel().grant_user_group_permission(el, group_name=obj, perm=perm) else: raise Exception('undefined object type %s' % obj_type) - elif isinstance(el, Repository): + elif isinstance(el, db.Repository): # for repos we need to hotfix the name of permission _perm = perm.permission_name.replace('group.', 'repository.') if obj_type == 'user': @@ -393,7 +392,7 @@ :param recursive: recurse to all children of group """ from kallithea.model.repo import RepoModel - repo_group = RepoGroup.guess_instance(repo_group) + repo_group = db.RepoGroup.guess_instance(repo_group) for el in repo_group.recursive_groups_and_repos(): # iterated obj is an instance of a repos group or repository in @@ -402,24 +401,24 @@ pass elif recursive == 'repos': # skip groups, other than this one - if isinstance(el, RepoGroup) and not el == repo_group: + if isinstance(el, db.RepoGroup) and not el == repo_group: continue elif recursive == 'groups': # skip repos - if isinstance(el, Repository): + if isinstance(el, db.Repository): continue else: # recursive == 'none': # DEFAULT don't apply to iterated objects el = repo_group # also we do a break at the end of this loop. - if isinstance(el, RepoGroup): + if isinstance(el, db.RepoGroup): if obj_type == 'user': RepoGroupModel().revoke_user_permission(el, user=obj) elif obj_type == 'user_group': RepoGroupModel().revoke_user_group_permission(el, group_name=obj) else: raise Exception('undefined object type %s' % obj_type) - elif isinstance(el, Repository): + elif isinstance(el, db.Repository): if obj_type == 'user': RepoModel().revoke_user_permission(el, user=obj) elif obj_type == 'user_group': @@ -446,19 +445,19 @@ :param perm: Instance of Permission, or permission_name """ - repo_group = RepoGroup.guess_instance(repo_group) - user = User.guess_instance(user) - permission = Permission.guess_instance(perm) + repo_group = db.RepoGroup.guess_instance(repo_group) + user = db.User.guess_instance(user) + permission = db.Permission.guess_instance(perm) # check if we have that permission already - obj = UserRepoGroupToPerm.query() \ - .filter(UserRepoGroupToPerm.user == user) \ - .filter(UserRepoGroupToPerm.group == repo_group) \ + obj = db.UserRepoGroupToPerm.query() \ + .filter(db.UserRepoGroupToPerm.user == user) \ + .filter(db.UserRepoGroupToPerm.group == repo_group) \ .scalar() if obj is None: # create new ! - obj = UserRepoGroupToPerm() - Session().add(obj) + obj = db.UserRepoGroupToPerm() + meta.Session().add(obj) obj.group = repo_group obj.user = user obj.permission = permission @@ -474,15 +473,15 @@ :param user: Instance of User, user_id or username """ - repo_group = RepoGroup.guess_instance(repo_group) - user = User.guess_instance(user) + repo_group = db.RepoGroup.guess_instance(repo_group) + user = db.User.guess_instance(user) - obj = UserRepoGroupToPerm.query() \ - .filter(UserRepoGroupToPerm.user == user) \ - .filter(UserRepoGroupToPerm.group == repo_group) \ + obj = db.UserRepoGroupToPerm.query() \ + .filter(db.UserRepoGroupToPerm.user == user) \ + .filter(db.UserRepoGroupToPerm.group == repo_group) \ .scalar() if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) log.debug('Revoked perm on %s on %s', repo_group, user) def grant_user_group_permission(self, repo_group, group_name, perm): @@ -496,20 +495,20 @@ or user group name :param perm: Instance of Permission, or permission_name """ - repo_group = RepoGroup.guess_instance(repo_group) - group_name = UserGroup.guess_instance(group_name) - permission = Permission.guess_instance(perm) + repo_group = db.RepoGroup.guess_instance(repo_group) + group_name = db.UserGroup.guess_instance(group_name) + permission = db.Permission.guess_instance(perm) # check if we have that permission already - obj = UserGroupRepoGroupToPerm.query() \ - .filter(UserGroupRepoGroupToPerm.group == repo_group) \ - .filter(UserGroupRepoGroupToPerm.users_group == group_name) \ + obj = db.UserGroupRepoGroupToPerm.query() \ + .filter(db.UserGroupRepoGroupToPerm.group == repo_group) \ + .filter(db.UserGroupRepoGroupToPerm.users_group == group_name) \ .scalar() if obj is None: # create new - obj = UserGroupRepoGroupToPerm() - Session().add(obj) + obj = db.UserGroupRepoGroupToPerm() + meta.Session().add(obj) obj.group = repo_group obj.users_group = group_name @@ -526,13 +525,13 @@ :param group_name: Instance of UserGroup, users_group_id, or user group name """ - repo_group = RepoGroup.guess_instance(repo_group) - group_name = UserGroup.guess_instance(group_name) + repo_group = db.RepoGroup.guess_instance(repo_group) + group_name = db.UserGroup.guess_instance(group_name) - obj = UserGroupRepoGroupToPerm.query() \ - .filter(UserGroupRepoGroupToPerm.group == repo_group) \ - .filter(UserGroupRepoGroupToPerm.users_group == group_name) \ + obj = db.UserGroupRepoGroupToPerm.query() \ + .filter(db.UserGroupRepoGroupToPerm.group == repo_group) \ + .filter(db.UserGroupRepoGroupToPerm.users_group == group_name) \ .scalar() if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) log.debug('Revoked perm to %s on %s', repo_group, group_name) diff -r c387989f868f -r 3669e58f3002 kallithea/model/scm.py --- a/kallithea/model/scm.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/scm.py Fri Oct 30 23:44:18 2020 +0100 @@ -36,7 +36,6 @@ from tg.i18n import ugettext as _ import kallithea -from kallithea import BACKENDS from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel from kallithea.lib.exceptions import IMCCommitError, NonRelativePathError from kallithea.lib.hooks import process_pushed_raw_ids @@ -47,7 +46,7 @@ from kallithea.lib.vcs.exceptions import RepositoryError from kallithea.lib.vcs.nodes import FileNode from kallithea.lib.vcs.utils.lazy import LazyProperty -from kallithea.model.db import PullRequest, RepoGroup, Repository, Session, Ui, User, UserFollowing, UserLog +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -136,7 +135,7 @@ """ def __get_repo(self, instance): - cls = Repository + cls = db.Repository if isinstance(instance, cls): return instance elif isinstance(instance, int): @@ -145,9 +144,8 @@ if instance.isdigit(): return cls.get(int(instance)) return cls.get_by_repo_name(instance) - elif instance is not None: - raise Exception('given object must be int, basestr or Instance' - ' of %s got %s' % (type(cls), type(instance))) + raise Exception('given object must be int, basestr or Instance' + ' of %s got %s' % (type(cls), type(instance))) @LazyProperty def repos_path(self): @@ -155,7 +153,7 @@ Gets the repositories root path from database """ - q = Ui.query().filter(Ui.ui_key == '/').one() + q = db.Ui.query().filter(db.Ui.ui_key == '/').one() return q.ui_value @@ -179,7 +177,7 @@ for name, path in get_filesystem_repos(repos_path): # name need to be decomposed and put back together using the / # since this is internal storage separator for kallithea - name = Repository.normalize_repo_name(name) + name = db.Repository.normalize_repo_name(name) try: if name in repos: @@ -189,10 +187,10 @@ klass = get_backend(path[0]) - if path[0] == 'hg' and path[0] in BACKENDS: + if path[0] == 'hg' and path[0] in kallithea.BACKENDS: repos[name] = klass(path[1], baseui=baseui) - if path[0] == 'git' and path[0] in BACKENDS: + if path[0] == 'git' and path[0] in kallithea.BACKENDS: repos[name] = klass(path[1]) except OSError: continue @@ -208,8 +206,8 @@ If no groups are specified, use top level groups. """ if groups is None: - groups = RepoGroup.query() \ - .filter(RepoGroup.parent_group_id == None).all() + groups = db.RepoGroup.query() \ + .filter(db.RepoGroup.parent_group_id == None).all() return RepoGroupList(groups, perm_level='read') def mark_for_invalidation(self, repo_name): @@ -219,20 +217,20 @@ :param repo_name: the repo for which caches should be marked invalid """ log.debug("Marking %s as invalidated and update cache", repo_name) - repo = Repository.get_by_repo_name(repo_name) + repo = db.Repository.get_by_repo_name(repo_name) if repo is not None: repo.set_invalidate() repo.update_changeset_cache() def toggle_following_repo(self, follow_repo_id, user_id): - f = UserFollowing.query() \ - .filter(UserFollowing.follows_repository_id == follow_repo_id) \ - .filter(UserFollowing.user_id == user_id).scalar() + f = db.UserFollowing.query() \ + .filter(db.UserFollowing.follows_repository_id == follow_repo_id) \ + .filter(db.UserFollowing.user_id == user_id).scalar() if f is not None: try: - Session().delete(f) + meta.Session().delete(f) action_logger(UserTemp(user_id), 'stopped_following_repo', RepoTemp(follow_repo_id)) @@ -242,10 +240,10 @@ raise try: - f = UserFollowing() + f = db.UserFollowing() f.user_id = user_id f.follows_repository_id = follow_repo_id - Session().add(f) + meta.Session().add(f) action_logger(UserTemp(user_id), 'started_following_repo', @@ -255,62 +253,62 @@ raise def toggle_following_user(self, follow_user_id, user_id): - f = UserFollowing.query() \ - .filter(UserFollowing.follows_user_id == follow_user_id) \ - .filter(UserFollowing.user_id == user_id).scalar() + f = db.UserFollowing.query() \ + .filter(db.UserFollowing.follows_user_id == follow_user_id) \ + .filter(db.UserFollowing.user_id == user_id).scalar() if f is not None: try: - Session().delete(f) + meta.Session().delete(f) return except Exception: log.error(traceback.format_exc()) raise try: - f = UserFollowing() + f = db.UserFollowing() f.user_id = user_id f.follows_user_id = follow_user_id - Session().add(f) + meta.Session().add(f) except Exception: log.error(traceback.format_exc()) raise def is_following_repo(self, repo_name, user_id): - r = Repository.query() \ - .filter(Repository.repo_name == repo_name).scalar() + r = db.Repository.query() \ + .filter(db.Repository.repo_name == repo_name).scalar() - f = UserFollowing.query() \ - .filter(UserFollowing.follows_repository == r) \ - .filter(UserFollowing.user_id == user_id).scalar() + f = db.UserFollowing.query() \ + .filter(db.UserFollowing.follows_repository == r) \ + .filter(db.UserFollowing.user_id == user_id).scalar() return f is not None def is_following_user(self, username, user_id): - u = User.get_by_username(username) + u = db.User.get_by_username(username) - f = UserFollowing.query() \ - .filter(UserFollowing.follows_user == u) \ - .filter(UserFollowing.user_id == user_id).scalar() + f = db.UserFollowing.query() \ + .filter(db.UserFollowing.follows_user == u) \ + .filter(db.UserFollowing.user_id == user_id).scalar() return f is not None def get_followers(self, repo): - repo = Repository.guess_instance(repo) + repo = db.Repository.guess_instance(repo) - return UserFollowing.query() \ - .filter(UserFollowing.follows_repository == repo).count() + return db.UserFollowing.query() \ + .filter(db.UserFollowing.follows_repository == repo).count() def get_forks(self, repo): - repo = Repository.guess_instance(repo) - return Repository.query() \ - .filter(Repository.fork == repo).count() + repo = db.Repository.guess_instance(repo) + return db.Repository.query() \ + .filter(db.Repository.fork == repo).count() def get_pull_requests(self, repo): - repo = Repository.guess_instance(repo) - return PullRequest.query() \ - .filter(PullRequest.other_repo == repo) \ - .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() + repo = db.Repository.guess_instance(repo) + return db.PullRequest.query() \ + .filter(db.PullRequest.other_repo == repo) \ + .filter(db.PullRequest.status != db.PullRequest.STATUS_CLOSED).count() def mark_as_fork(self, repo, fork, user): repo = self.__get_repo(repo) @@ -394,7 +392,7 @@ :param repo: a db_repo.scm_instance """ - user = User.guess_instance(user) + user = db.User.guess_instance(user) IMC = self._get_IMC_module(repo.alias) imc = IMC(repo) imc.change(FileNode(f_path, content, mode=cs.get_file_mode(f_path))) @@ -466,7 +464,7 @@ :returns: new committed changeset """ - user = User.guess_instance(user) + user = db.User.guess_instance(user) scm_instance = repo.scm_instance_no_cache() processed_nodes = [] @@ -518,7 +516,7 @@ """ Commits specified nodes to repo. Again. """ - user = User.guess_instance(user) + user = db.User.guess_instance(user) scm_instance = repo.scm_instance_no_cache() message = message @@ -591,7 +589,7 @@ :returns: new committed changeset after deletion """ - user = User.guess_instance(user) + user = db.User.guess_instance(user) scm_instance = repo.scm_instance_no_cache() processed_nodes = [] @@ -639,7 +637,7 @@ return tip def get_unread_journal(self): - return UserLog.query().count() + return db.UserLog.query().count() def get_repo_landing_revs(self, repo=None): """ @@ -651,12 +649,12 @@ hist_l = [] choices = [] - repo = self.__get_repo(repo) hist_l.append(('rev:tip', _('latest tip'))) choices.append('rev:tip') if repo is None: return choices, hist_l + repo = self.__get_repo(repo) repo = repo.scm_instance branches_group = ([('branch:%s' % k, k) for k, v in @@ -691,77 +689,73 @@ or sys.executable or '/usr/bin/env python3') - def install_git_hooks(self, repo, force_create=False): + def install_git_hooks(self, repo, force=False): """ Creates a kallithea hook inside a git repository :param repo: Instance of VCS repo - :param force_create: Create even if same name hook exists + :param force: Overwrite existing non-Kallithea hooks """ - loc = os.path.join(repo.path, 'hooks') + hooks_path = os.path.join(repo.path, 'hooks') if not repo.bare: - loc = os.path.join(repo.path, '.git', 'hooks') - if not os.path.isdir(loc): - os.makedirs(loc) + hooks_path = os.path.join(repo.path, '.git', 'hooks') + if not os.path.isdir(hooks_path): + os.makedirs(hooks_path) tmpl_post = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter()) tmpl_post += pkg_resources.resource_string( - 'kallithea', os.path.join('config', 'post_receive_tmpl.py') + 'kallithea', os.path.join('templates', 'py', 'git_post_receive_hook.py') ) tmpl_pre = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter()) tmpl_pre += pkg_resources.resource_string( - 'kallithea', os.path.join('config', 'pre_receive_tmpl.py') + 'kallithea', os.path.join('templates', 'py', 'git_pre_receive_hook.py') ) for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: - _hook_file = os.path.join(loc, '%s-receive' % h_type) - has_hook = False + hook_file = os.path.join(hooks_path, '%s-receive' % h_type) + other_hook = False log.debug('Installing git hook in repo %s', repo) - if os.path.exists(_hook_file): + if os.path.exists(hook_file): # let's take a look at this hook, maybe it's kallithea ? log.debug('hook exists, checking if it is from kallithea') - with open(_hook_file, 'rb') as f: + with open(hook_file, 'rb') as f: data = f.read() matches = re.search(br'^KALLITHEA_HOOK_VER\s*=\s*(.*)$', data, flags=re.MULTILINE) if matches: - try: - ver = matches.groups()[0] - log.debug('Found Kallithea hook - it has KALLITHEA_HOOK_VER %r', ver) - has_hook = True - except Exception: - log.error(traceback.format_exc()) + ver = matches.groups()[0] + log.debug('Found Kallithea hook - it has KALLITHEA_HOOK_VER %r', ver) + else: + log.debug('Found non-Kallithea hook at %s', hook_file) + other_hook = True + + if other_hook and not force: + log.warning('skipping overwriting hook file %s', hook_file) else: - # there is no hook in this dir, so we want to create one - has_hook = True - - if has_hook or force_create: log.debug('writing %s hook file !', h_type) try: - with open(_hook_file, 'wb') as f: + with open(hook_file, 'wb') as f: tmpl = tmpl.replace(b'_TMPL_', safe_bytes(kallithea.__version__)) f.write(tmpl) - os.chmod(_hook_file, 0o755) + os.chmod(hook_file, 0o755) except IOError as e: - log.error('error writing %s: %s', _hook_file, e) - else: - log.debug('skipping writing hook file') + log.error('error writing hook %s: %s', hook_file, e) -def AvailableRepoGroupChoices(top_perms, repo_group_perm_level, extras=()): +def AvailableRepoGroupChoices(repo_group_perm_level, extras=()): """Return group_id,string tuples with choices for all the repo groups where the user has the necessary permissions. Top level is -1. """ - groups = RepoGroup.query().all() + groups = db.RepoGroup.query().all() if HasPermissionAny('hg.admin')('available repo groups'): groups.append(None) else: groups = list(RepoGroupList(groups, perm_level=repo_group_perm_level)) - if top_perms and HasPermissionAny(*top_perms)('available repo groups'): + if HasPermissionAny('hg.create.repository')('available repo groups'): groups.append(None) for extra in extras: if not any(rg == extra for rg in groups): groups.append(extra) - return RepoGroup.groups_choices(groups=groups) + return db.RepoGroup.groups_choices(groups=groups) diff -r c387989f868f -r 3669e58f3002 kallithea/model/ssh_key.py --- a/kallithea/model/ssh_key.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/ssh_key.py Fri Oct 30 23:44:18 2020 +0100 @@ -29,10 +29,9 @@ from tg.i18n import ugettext as _ from kallithea.lib import ssh -from kallithea.lib.utils2 import str2bool +from kallithea.lib.utils2 import asbool from kallithea.lib.vcs.exceptions import RepositoryError -from kallithea.model.db import User, UserSshKeys -from kallithea.model.meta import Session +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -58,18 +57,18 @@ if not description.strip(): description = comment.strip() - user = User.guess_instance(user) + user = db.User.guess_instance(user) - new_ssh_key = UserSshKeys() + new_ssh_key = db.UserSshKeys() new_ssh_key.user_id = user.user_id new_ssh_key.description = description new_ssh_key.public_key = public_key - for ssh_key in UserSshKeys.query().filter(UserSshKeys.fingerprint == new_ssh_key.fingerprint).all(): + for ssh_key in db.UserSshKeys.query().filter(db.UserSshKeys.fingerprint == new_ssh_key.fingerprint).all(): raise SshKeyModelException(_('SSH key %s is already used by %s') % (new_ssh_key.fingerprint, ssh_key.user.username)) - Session().add(new_ssh_key) + meta.Session().add(new_ssh_key) return new_ssh_key @@ -78,24 +77,24 @@ Deletes ssh key with given fingerprint for the given user. Will raise SshKeyModelException on errors """ - ssh_key = UserSshKeys.query().filter(UserSshKeys.fingerprint == fingerprint) + ssh_key = db.UserSshKeys.query().filter(db.UserSshKeys.fingerprint == fingerprint) - user = User.guess_instance(user) - ssh_key = ssh_key.filter(UserSshKeys.user_id == user.user_id) + user = db.User.guess_instance(user) + ssh_key = ssh_key.filter(db.UserSshKeys.user_id == user.user_id) ssh_key = ssh_key.scalar() if ssh_key is None: raise SshKeyModelException(_('SSH key with fingerprint %r found') % fingerprint) - Session().delete(ssh_key) + meta.Session().delete(ssh_key) def get_ssh_keys(self, user): - user = User.guess_instance(user) - user_ssh_keys = UserSshKeys.query() \ - .filter(UserSshKeys.user_id == user.user_id).all() + user = db.User.guess_instance(user) + user_ssh_keys = db.UserSshKeys.query() \ + .filter(db.UserSshKeys.user_id == user.user_id).all() return user_ssh_keys def write_authorized_keys(self): - if not str2bool(config.get('ssh_enabled', False)): + if not asbool(config.get('ssh_enabled', False)): log.error("Will not write SSH authorized_keys file - ssh_enabled is not configured") return authorized_keys = config.get('ssh_authorized_keys') @@ -131,7 +130,7 @@ fh, tmp_authorized_keys = tempfile.mkstemp('.authorized_keys', dir=os.path.dirname(authorized_keys)) with os.fdopen(fh, 'w') as f: f.write("# WARNING: This .ssh/authorized_keys file is managed by Kallithea. Manual editing or adding new entries will make Kallithea back off.\n") - for key in UserSshKeys.query().join(UserSshKeys.user).filter(User.active == True): + for key in db.UserSshKeys.query().join(db.UserSshKeys.user).filter(db.User.active == True): f.write(ssh.authorized_keys_line(kallithea_cli_path, config['__file__'], key)) os.chmod(tmp_authorized_keys, stat.S_IRUSR | stat.S_IWUSR) # Note: simple overwrite / rename isn't enough to replace the file on Windows diff -r c387989f868f -r 3669e58f3002 kallithea/model/user.py --- a/kallithea/model/user.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/user.py Fri Oct 30 23:44:18 2020 +0100 @@ -38,8 +38,7 @@ from kallithea.lib.exceptions import DefaultUserException, UserOwnsReposException from kallithea.lib.utils2 import generate_api_key, get_current_authuser -from kallithea.model.db import Permission, User, UserEmailMap, UserIpMap, UserToPerm -from kallithea.model.meta import Session +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -49,18 +48,17 @@ password_reset_token_lifetime = 86400 # 24 hours def get(self, user_id): - user = User.query() + user = db.User.query() return user.get(user_id) def get_user(self, user): - return User.guess_instance(user) + return db.User.guess_instance(user) def create(self, form_data, cur_user=None): if not cur_user: cur_user = getattr(get_current_authuser(), 'username', None) - from kallithea.lib.hooks import log_create_user, \ - check_allowed_create_user + from kallithea.lib.hooks import check_allowed_create_user, log_create_user _fd = form_data user_data = { 'username': _fd['username'], @@ -75,7 +73,7 @@ check_allowed_create_user(user_data, cur_user) from kallithea.lib.auth import get_crypt_password - new_user = User() + new_user = db.User() for k, v in form_data.items(): if k == 'password': v = get_crypt_password(v) @@ -84,8 +82,8 @@ setattr(new_user, k, v) new_user.api_key = generate_api_key() - Session().add(new_user) - Session().flush() # make database assign new_user.user_id + meta.Session().add(new_user) + meta.Session().flush() # make database assign new_user.user_id log_create_user(new_user.get_dict(), cur_user) return new_user @@ -111,9 +109,8 @@ if not cur_user: cur_user = getattr(get_current_authuser(), 'username', None) - from kallithea.lib.auth import get_crypt_password, check_password - from kallithea.lib.hooks import log_create_user, \ - check_allowed_create_user + from kallithea.lib.auth import check_password, get_crypt_password + from kallithea.lib.hooks import check_allowed_create_user, log_create_user user_data = { 'username': username, 'password': password, 'email': email, 'firstname': firstname, 'lastname': lastname, @@ -123,10 +120,10 @@ check_allowed_create_user(user_data, cur_user) log.debug('Checking for %s account in Kallithea database', username) - user = User.get_by_username(username, case_insensitive=True) + user = db.User.get_by_username(username, case_insensitive=True) if user is None: log.debug('creating new user %s', username) - new_user = User() + new_user = db.User() edit = False else: log.debug('updating user %s', username) @@ -156,8 +153,8 @@ if password else '' if user is None: - Session().add(new_user) - Session().flush() # make database assign new_user.user_id + meta.Session().add(new_user) + meta.Session().flush() # make database assign new_user.user_id if not edit: log_create_user(new_user.get_dict(), cur_user) @@ -168,11 +165,11 @@ raise def create_registration(self, form_data): + import kallithea.lib.helpers as h from kallithea.model.notification import NotificationModel - import kallithea.lib.helpers as h form_data['admin'] = False - form_data['extern_type'] = User.DEFAULT_AUTH_TYPE + form_data['extern_type'] = db.User.DEFAULT_AUTH_TYPE form_data['extern_name'] = '' new_user = self.create(form_data) @@ -220,7 +217,7 @@ def update_user(self, user, **kwargs): from kallithea.lib.auth import get_crypt_password - user = User.guess_instance(user) + user = db.User.guess_instance(user) if user.is_default_user: raise DefaultUserException( _("You can't edit this user since it's" @@ -237,7 +234,7 @@ def delete(self, user, cur_user=None): if cur_user is None: cur_user = getattr(get_current_authuser(), 'username', None) - user = User.guess_instance(user) + user = db.User.guess_instance(user) if user.is_default_user: raise DefaultUserException( @@ -261,7 +258,7 @@ _('User "%s" still owns %s user groups and cannot be ' 'removed. Switch owners or remove those user groups: %s') % (user.username, len(usergroups), ', '.join(usergroups))) - Session().delete(user) + meta.Session().delete(user) from kallithea.lib.hooks import log_delete_user log_delete_user(user.get_dict(), cur_user) @@ -303,8 +300,8 @@ guaranteed not to occur in any of the values. """ app_secret = config.get('app_instance_uuid') - return hmac.HMAC( - key='\0'.join([app_secret, user.password]).encode('utf-8'), + return hmac.new( + '\0'.join([app_secret, user.password]).encode('utf-8'), msg='\0'.join([session_id, str(user.user_id), user.email, str(timestamp)]).encode('utf-8'), digestmod=hashlib.sha1, ).hexdigest() @@ -317,12 +314,12 @@ allowing users to copy-paste or manually enter the token from the email. """ + import kallithea.lib.helpers as h from kallithea.lib.celerylib import tasks from kallithea.model.notification import EmailNotificationModel - import kallithea.lib.helpers as h user_email = data['email'] - user = User.get_by_email(user_email) + user = db.User.get_by_email(user_email) timestamp = int(time.time()) if user is not None: if self.can_change_password(user): @@ -363,7 +360,7 @@ def verify_reset_password_token(self, email, timestamp, token): import kallithea.lib.helpers as h - user = User.get_by_email(email) + user = db.User.get_by_email(email) if user is None: log.debug("user with email %s not found", email) return False @@ -386,14 +383,14 @@ return expected_token == token def reset_password(self, user_email, new_passwd): + from kallithea.lib import auth from kallithea.lib.celerylib import tasks - from kallithea.lib import auth - user = User.get_by_email(user_email) + user = db.User.get_by_email(user_email) if user is not None: if not self.can_change_password(user): raise Exception('trying to change password for external user') user.password = auth.get_crypt_password(new_passwd) - Session().commit() + meta.Session().commit() log.info('change password for %s', user_email) if new_passwd is None: raise Exception('unable to set new password') @@ -406,11 +403,11 @@ return True def has_perm(self, user, perm): - perm = Permission.guess_instance(perm) - user = User.guess_instance(user) + perm = db.Permission.guess_instance(perm) + user = db.User.guess_instance(user) - return UserToPerm.query().filter(UserToPerm.user == user) \ - .filter(UserToPerm.permission == perm).scalar() is not None + return db.UserToPerm.query().filter(db.UserToPerm.user == user) \ + .filter(db.UserToPerm.permission == perm).scalar() is not None def grant_perm(self, user, perm): """ @@ -419,19 +416,19 @@ :param user: :param perm: """ - user = User.guess_instance(user) - perm = Permission.guess_instance(perm) + user = db.User.guess_instance(user) + perm = db.Permission.guess_instance(perm) # if this permission is already granted skip it - _perm = UserToPerm.query() \ - .filter(UserToPerm.user == user) \ - .filter(UserToPerm.permission == perm) \ + _perm = db.UserToPerm.query() \ + .filter(db.UserToPerm.user == user) \ + .filter(db.UserToPerm.permission == perm) \ .scalar() if _perm: return - new = UserToPerm() + new = db.UserToPerm() new.user = user new.permission = perm - Session().add(new) + meta.Session().add(new) return new def revoke_perm(self, user, perm): @@ -441,12 +438,12 @@ :param user: :param perm: """ - user = User.guess_instance(user) - perm = Permission.guess_instance(perm) + user = db.User.guess_instance(user) + perm = db.Permission.guess_instance(perm) - UserToPerm.query().filter( - UserToPerm.user == user, - UserToPerm.permission == perm, + db.UserToPerm.query().filter( + db.UserToPerm.user == user, + db.UserToPerm.permission == perm, ).delete() def add_extra_email(self, user, email): @@ -459,12 +456,12 @@ from kallithea.model import forms form = forms.UserExtraEmailForm()() data = form.to_python(dict(email=email)) - user = User.guess_instance(user) + user = db.User.guess_instance(user) - obj = UserEmailMap() + obj = db.UserEmailMap() obj.user = user obj.email = data['email'] - Session().add(obj) + meta.Session().add(obj) return obj def delete_extra_email(self, user, email_id): @@ -474,10 +471,10 @@ :param user: :param email_id: """ - user = User.guess_instance(user) - obj = UserEmailMap.query().get(email_id) + user = db.User.guess_instance(user) + obj = db.UserEmailMap.query().get(email_id) if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) def add_extra_ip(self, user, ip): """ @@ -489,12 +486,12 @@ from kallithea.model import forms form = forms.UserExtraIpForm()() data = form.to_python(dict(ip=ip)) - user = User.guess_instance(user) + user = db.User.guess_instance(user) - obj = UserIpMap() + obj = db.UserIpMap() obj.user = user obj.ip_addr = data['ip'] - Session().add(obj) + meta.Session().add(obj) return obj def delete_extra_ip(self, user, ip_id): @@ -504,7 +501,7 @@ :param user: :param ip_id: """ - user = User.guess_instance(user) - obj = UserIpMap.query().get(ip_id) + user = db.User.guess_instance(user) + obj = db.UserIpMap.query().get(ip_id) if obj: - Session().delete(obj) + meta.Session().delete(obj) diff -r c387989f868f -r 3669e58f3002 kallithea/model/user_group.py --- a/kallithea/model/user_group.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/user_group.py Fri Oct 30 23:44:18 2020 +0100 @@ -28,8 +28,7 @@ import traceback from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException -from kallithea.model.db import (Permission, Session, User, UserGroup, UserGroupMember, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm, - UserUserGroupToPerm) +from kallithea.model import db, meta log = logging.getLogger(__name__) @@ -40,18 +39,18 @@ def _create_default_perms(self, user_group): # create default permission default_perm = 'usergroup.read' - def_user = User.get_default_user() + def_user = db.User.get_default_user() for p in def_user.user_perms: if p.permission.permission_name.startswith('usergroup.'): default_perm = p.permission.permission_name break - user_group_to_perm = UserUserGroupToPerm() - user_group_to_perm.permission = Permission.get_by_key(default_perm) + user_group_to_perm = db.UserUserGroupToPerm() + user_group_to_perm.permission = db.Permission.get_by_key(default_perm) user_group_to_perm.user_group = user_group user_group_to_perm.user_id = def_user.user_id - Session().add(user_group_to_perm) + meta.Session().add(user_group_to_perm) return user_group_to_perm def _update_permissions(self, user_group, perms_new=None, @@ -89,24 +88,24 @@ ) def get(self, user_group_id): - return UserGroup.get(user_group_id) + return db.UserGroup.get(user_group_id) def get_group(self, user_group): - return UserGroup.guess_instance(user_group) + return db.UserGroup.guess_instance(user_group) def get_by_name(self, name, case_insensitive=False): - return UserGroup.get_by_group_name(name, case_insensitive=case_insensitive) + return db.UserGroup.get_by_group_name(name, case_insensitive=case_insensitive) def create(self, name, description, owner, active=True, group_data=None): try: - new_user_group = UserGroup() - new_user_group.owner = User.guess_instance(owner) + new_user_group = db.UserGroup() + new_user_group.owner = db.User.guess_instance(owner) new_user_group.users_group_name = name new_user_group.user_group_description = description new_user_group.users_group_active = active if group_data: new_user_group.group_data = group_data - Session().add(new_user_group) + meta.Session().add(new_user_group) self._create_default_perms(new_user_group) self.grant_user_permission(user_group=new_user_group, @@ -120,7 +119,7 @@ def update(self, user_group, form_data): try: - user_group = UserGroup.guess_instance(user_group) + user_group = db.UserGroup.guess_instance(user_group) for k, v in form_data.items(): if k == 'users_group_members': @@ -128,15 +127,15 @@ if v: v = [v] if isinstance(v, str) else v for u_id in set(v): - member = UserGroupMember(user_group.users_group_id, u_id) + member = db.UserGroupMember(user_group.users_group_id, u_id) members_list.append(member) - Session().add(member) + meta.Session().add(member) user_group.members = members_list setattr(user_group, k, v) # Flush to make db assign users_group_member_id to newly # created UserGroupMembers. - Session().flush() + meta.Session().flush() except Exception: log.error(traceback.format_exc()) raise @@ -150,25 +149,25 @@ :param user_group: :param force: """ - user_group = UserGroup.guess_instance(user_group) + user_group = db.UserGroup.guess_instance(user_group) try: # check if this group is not assigned to repo - assigned_groups = UserGroupRepoToPerm.query() \ - .filter(UserGroupRepoToPerm.users_group == user_group).all() + assigned_groups = db.UserGroupRepoToPerm.query() \ + .filter(db.UserGroupRepoToPerm.users_group == user_group).all() assigned_groups = [x.repository.repo_name for x in assigned_groups] if assigned_groups and not force: raise UserGroupsAssignedException( 'User Group assigned to %s' % ", ".join(assigned_groups)) - Session().delete(user_group) + meta.Session().delete(user_group) except Exception: log.error(traceback.format_exc()) raise def add_user_to_group(self, user_group, user): """Return True if user already is in the group - else return the new UserGroupMember""" - user_group = UserGroup.guess_instance(user_group) - user = User.guess_instance(user) + user_group = db.UserGroup.guess_instance(user_group) + user = db.User.guess_instance(user) for m in user_group.members: u = m.user @@ -177,22 +176,22 @@ return True try: - user_group_member = UserGroupMember() + user_group_member = db.UserGroupMember() user_group_member.user = user user_group_member.users_group = user_group user_group.members.append(user_group_member) user.group_member.append(user_group_member) - Session().add(user_group_member) + meta.Session().add(user_group_member) return user_group_member except Exception: log.error(traceback.format_exc()) raise def remove_user_from_group(self, user_group, user): - user_group = UserGroup.guess_instance(user_group) - user = User.guess_instance(user) + user_group = db.UserGroup.guess_instance(user_group) + user = db.User.guess_instance(user) user_group_member = None for m in user_group.members: @@ -203,7 +202,7 @@ if user_group_member: try: - Session().delete(user_group_member) + meta.Session().delete(user_group_member) return True except Exception: log.error(traceback.format_exc()) @@ -213,40 +212,40 @@ return False def has_perm(self, user_group, perm): - user_group = UserGroup.guess_instance(user_group) - perm = Permission.guess_instance(perm) + user_group = db.UserGroup.guess_instance(user_group) + perm = db.Permission.guess_instance(perm) - return UserGroupToPerm.query() \ - .filter(UserGroupToPerm.users_group == user_group) \ - .filter(UserGroupToPerm.permission == perm).scalar() is not None + return db.UserGroupToPerm.query() \ + .filter(db.UserGroupToPerm.users_group == user_group) \ + .filter(db.UserGroupToPerm.permission == perm).scalar() is not None def grant_perm(self, user_group, perm): - user_group = UserGroup.guess_instance(user_group) - perm = Permission.guess_instance(perm) + user_group = db.UserGroup.guess_instance(user_group) + perm = db.Permission.guess_instance(perm) # if this permission is already granted skip it - _perm = UserGroupToPerm.query() \ - .filter(UserGroupToPerm.users_group == user_group) \ - .filter(UserGroupToPerm.permission == perm) \ + _perm = db.UserGroupToPerm.query() \ + .filter(db.UserGroupToPerm.users_group == user_group) \ + .filter(db.UserGroupToPerm.permission == perm) \ .scalar() if _perm: return - new = UserGroupToPerm() + new = db.UserGroupToPerm() new.users_group = user_group new.permission = perm - Session().add(new) + meta.Session().add(new) return new def revoke_perm(self, user_group, perm): - user_group = UserGroup.guess_instance(user_group) - perm = Permission.guess_instance(perm) + user_group = db.UserGroup.guess_instance(user_group) + perm = db.Permission.guess_instance(perm) - obj = UserGroupToPerm.query() \ - .filter(UserGroupToPerm.users_group == user_group) \ - .filter(UserGroupToPerm.permission == perm).scalar() + obj = db.UserGroupToPerm.query() \ + .filter(db.UserGroupToPerm.users_group == user_group) \ + .filter(db.UserGroupToPerm.permission == perm).scalar() if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) def grant_user_permission(self, user_group, user, perm): """ @@ -259,19 +258,19 @@ :param perm: Instance of Permission, or permission_name """ - user_group = UserGroup.guess_instance(user_group) - user = User.guess_instance(user) - permission = Permission.guess_instance(perm) + user_group = db.UserGroup.guess_instance(user_group) + user = db.User.guess_instance(user) + permission = db.Permission.guess_instance(perm) # check if we have that permission already - obj = UserUserGroupToPerm.query() \ - .filter(UserUserGroupToPerm.user == user) \ - .filter(UserUserGroupToPerm.user_group == user_group) \ + obj = db.UserUserGroupToPerm.query() \ + .filter(db.UserUserGroupToPerm.user == user) \ + .filter(db.UserUserGroupToPerm.user_group == user_group) \ .scalar() if obj is None: # create new ! - obj = UserUserGroupToPerm() - Session().add(obj) + obj = db.UserUserGroupToPerm() + meta.Session().add(obj) obj.user_group = user_group obj.user = user obj.permission = permission @@ -287,15 +286,15 @@ :param user: Instance of User, user_id or username """ - user_group = UserGroup.guess_instance(user_group) - user = User.guess_instance(user) + user_group = db.UserGroup.guess_instance(user_group) + user = db.User.guess_instance(user) - obj = UserUserGroupToPerm.query() \ - .filter(UserUserGroupToPerm.user == user) \ - .filter(UserUserGroupToPerm.user_group == user_group) \ + obj = db.UserUserGroupToPerm.query() \ + .filter(db.UserUserGroupToPerm.user == user) \ + .filter(db.UserUserGroupToPerm.user_group == user_group) \ .scalar() if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) log.debug('Revoked perm on %s on %s', user_group, user) def grant_user_group_permission(self, target_user_group, user_group, perm): @@ -306,23 +305,23 @@ :param user_group: :param perm: """ - target_user_group = UserGroup.guess_instance(target_user_group) - user_group = UserGroup.guess_instance(user_group) - permission = Permission.guess_instance(perm) + target_user_group = db.UserGroup.guess_instance(target_user_group) + user_group = db.UserGroup.guess_instance(user_group) + permission = db.Permission.guess_instance(perm) # forbid assigning same user group to itself if target_user_group == user_group: raise RepoGroupAssignmentError('target repo:%s cannot be ' 'assigned to itself' % target_user_group) # check if we have that permission already - obj = UserGroupUserGroupToPerm.query() \ - .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group) \ - .filter(UserGroupUserGroupToPerm.user_group == user_group) \ + obj = db.UserGroupUserGroupToPerm.query() \ + .filter(db.UserGroupUserGroupToPerm.target_user_group == target_user_group) \ + .filter(db.UserGroupUserGroupToPerm.user_group == user_group) \ .scalar() if obj is None: # create new ! - obj = UserGroupUserGroupToPerm() - Session().add(obj) + obj = db.UserGroupUserGroupToPerm() + meta.Session().add(obj) obj.user_group = user_group obj.target_user_group = target_user_group obj.permission = permission @@ -336,19 +335,19 @@ :param target_user_group: :param user_group: """ - target_user_group = UserGroup.guess_instance(target_user_group) - user_group = UserGroup.guess_instance(user_group) + target_user_group = db.UserGroup.guess_instance(target_user_group) + user_group = db.UserGroup.guess_instance(user_group) - obj = UserGroupUserGroupToPerm.query() \ - .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group) \ - .filter(UserGroupUserGroupToPerm.user_group == user_group) \ + obj = db.UserGroupUserGroupToPerm.query() \ + .filter(db.UserGroupUserGroupToPerm.target_user_group == target_user_group) \ + .filter(db.UserGroupUserGroupToPerm.user_group == user_group) \ .scalar() if obj is not None: - Session().delete(obj) + meta.Session().delete(obj) log.debug('Revoked perm on %s on %s', target_user_group, user_group) def enforce_groups(self, user, groups, extern_type=None): - user = User.guess_instance(user) + user = db.User.guess_instance(user) log.debug('Enforcing groups %s on user %s', user, groups) current_groups = user.group_member # find the external created groups @@ -363,9 +362,9 @@ self.remove_user_from_group(gr, user) # now we calculate in which groups user should be == groups params - owner = User.get_first_admin().username + owner = db.User.get_first_admin().username for gr in set(groups): - existing_group = UserGroup.get_by_group_name(gr) + existing_group = db.UserGroup.get_by_group_name(gr) if not existing_group: desc = 'Automatically created from plugin:%s' % extern_type # we use first admin account to set the owner of the group diff -r c387989f868f -r 3669e58f3002 kallithea/model/validators.py --- a/kallithea/model/validators.py Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/model/validators.py Fri Oct 30 23:44:18 2020 +0100 @@ -27,14 +27,13 @@ from sqlalchemy import func from tg.i18n import ugettext as _ -from kallithea.config.routing import ADMIN_PREFIX +import kallithea from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel from kallithea.lib.compat import OrderedSet from kallithea.lib.exceptions import InvalidCloneUriException, LdapImportError from kallithea.lib.utils import is_valid_repo_uri -from kallithea.lib.utils2 import aslist, repo_name_slug, str2bool +from kallithea.lib.utils2 import asbool, aslist, repo_name_slug from kallithea.model import db -from kallithea.model.db import RepoGroup, Repository, User, UserGroup # silence warnings and pylint @@ -86,10 +85,10 @@ # check if user is unique old_un = None if edit: - old_un = User.get(old_data.get('user_id')).username + old_un = db.User.get(old_data.get('user_id')).username if old_un != value or not edit: - if User.get_by_username(value, case_insensitive=True): + if db.User.get_by_username(value, case_insensitive=True): msg = self.message('username_exists', state, username=value) raise formencode.Invalid(msg, value, state) @@ -113,8 +112,8 @@ def _validate_python(self, value, state): try: - User.query().filter(User.active == True) \ - .filter(User.username == value).one() + db.User.query().filter(db.User.active == True) \ + .filter(db.User.username == value).one() except sqlalchemy.exc.InvalidRequestError: # NoResultFound/MultipleResultsFound msg = self.message('invalid_username', state, username=value) raise formencode.Invalid(msg, value, state, @@ -147,10 +146,10 @@ old_ugname = None if edit: old_id = old_data.get('users_group_id') - old_ugname = UserGroup.get(old_id).users_group_name + old_ugname = db.UserGroup.get(old_id).users_group_name if old_ugname != value or not edit: - is_existing_group = UserGroup.get_by_group_name(value, + is_existing_group = db.UserGroup.get_by_group_name(value, case_insensitive=True) if is_existing_group: msg = self.message('group_exist', state, usergroup=value) @@ -195,14 +194,14 @@ old_gname = None if edit: - old_gname = RepoGroup.get(old_data.get('group_id')).group_name + old_gname = db.RepoGroup.get(old_data.get('group_id')).group_name if old_gname != group_name or not edit: # check group - gr = RepoGroup.query() \ - .filter(func.lower(RepoGroup.group_name) == func.lower(slug)) \ - .filter(RepoGroup.parent_group_id == parent_group_id) \ + gr = db.RepoGroup.query() \ + .filter(func.lower(db.RepoGroup.group_name) == func.lower(slug)) \ + .filter(db.RepoGroup.parent_group_id == parent_group_id) \ .scalar() if gr is not None: msg = self.message('group_exists', state, group_name=slug) @@ -211,8 +210,8 @@ ) # check for same repo - repo = Repository.query() \ - .filter(func.lower(Repository.repo_name) == func.lower(slug)) \ + repo = db.Repository.query() \ + .filter(func.lower(db.Repository.repo_name) == func.lower(slug)) \ .scalar() if repo is not None: msg = self.message('repo_exists', state, group_name=slug) @@ -285,7 +284,7 @@ # authenticate returns unused dict but has called # plugin._authenticate which has create_or_update'ed the username user in db if auth_modules.authenticate(username, password) is None: - user = User.get_by_username_or_email(username) + user = db.User.get_by_username_or_email(username) if user and not user.active: log.warning('user %s is disabled', username) msg = self.message('invalid_auth', state) @@ -320,13 +319,13 @@ repo_name = repo_name_slug(value.get('repo_name', '')) repo_group = value.get('repo_group') if repo_group: - gr = RepoGroup.get(repo_group) + gr = db.RepoGroup.get(repo_group) group_path = gr.full_path group_name = gr.group_name # value needs to be aware of group name in order to check # db key This is an actual just the name to store in the # database - repo_name_full = group_path + db.URL_SEP + repo_name + repo_name_full = group_path + kallithea.URL_SEP + repo_name else: group_name = group_path = '' repo_name_full = repo_name @@ -343,7 +342,7 @@ group_path = value.get('group_path') group_name = value.get('group_name') - if repo_name in [ADMIN_PREFIX, '']: + if repo_name in [kallithea.ADMIN_PREFIX, '']: msg = self.message('invalid_repo_name', state, repo=repo_name) raise formencode.Invalid(msg, value, state, error_dict=dict(repo_name=msg) @@ -352,8 +351,8 @@ rename = old_data.get('repo_name') != repo_name_full create = not edit if rename or create: - repo = Repository.get_by_repo_name(repo_name_full, case_insensitive=True) - repo_group = RepoGroup.get_by_group_name(repo_name_full, case_insensitive=True) + repo = db.Repository.get_by_repo_name(repo_name_full, case_insensitive=True) + repo_group = db.RepoGroup.get_by_group_name(repo_name_full, case_insensitive=True) if group_path != '': if repo is not None: msg = self.message('repository_in_group_exists', state, @@ -400,7 +399,7 @@ messages = { 'clone_uri': _('Invalid repository URL'), 'invalid_clone_uri': _('Invalid repository URL. It must be a ' - 'valid http, https, ssh, svn+http or svn+https URL'), + 'valid http, https, or ssh URL'), } def _validate_python(self, value, state): @@ -452,16 +451,15 @@ return value def _validate_python(self, value, state): - gr = RepoGroup.get(value) + gr = db.RepoGroup.get(value) gr_name = gr.group_name if gr is not None else None # None means ROOT location # create repositories with write permission on group is set to true - create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')() group_admin = HasRepoGroupPermissionLevel('admin')(gr_name, 'can write into group validator') group_write = HasRepoGroupPermissionLevel('write')(gr_name, 'can write into group validator') - forbidden = not (group_admin or (group_write and create_on_write)) + forbidden = not (group_admin or group_write) can_create_repos = HasPermissionAny('hg.admin', 'hg.create.repository') gid = (old_data['repo_group'].get('group_id') if (old_data and 'repo_group' in old_data) else None) @@ -502,7 +500,7 @@ return value def _validate_python(self, value, state): - gr = RepoGroup.get(value) + gr = db.RepoGroup.get(value) gr_name = gr.group_name if gr is not None else None # None means ROOT location if can_create_in_root and gr is None: @@ -567,8 +565,8 @@ t = {'u': 'user', 'g': 'users_group' }[k[0]] - if member_name == User.DEFAULT_USER_NAME: - if str2bool(value.get('repo_private')): + if member_name == db.User.DEFAULT_USER_NAME: + if asbool(value.get('repo_private')): # set none for default when updating to # private repo protects against form manipulation v = EMPTY_PERM @@ -581,13 +579,13 @@ for k, v, t in perms_new: try: if t == 'user': - _user_db = User.query() \ - .filter(User.active == True) \ - .filter(User.username == k).one() + _user_db = db.User.query() \ + .filter(db.User.active == True) \ + .filter(db.User.username == k).one() if t == 'users_group': - _user_db = UserGroup.query() \ - .filter(UserGroup.users_group_active == True) \ - .filter(UserGroup.users_group_name == k).one() + _user_db = db.UserGroup.query() \ + .filter(db.UserGroup.users_group_active == True) \ + .filter(db.UserGroup.users_group_name == k).one() except Exception as e: log.warning('Error validating %s permission %s', t, k) @@ -649,7 +647,7 @@ def _validate_python(self, value, state): if (old_data.get('email') or '').lower() != value: - user = User.get_by_email(value) + user = db.User.get_by_email(value) if user is not None: msg = self.message('email_taken', state) raise formencode.Invalid(msg, value, state, @@ -668,7 +666,7 @@ return value.lower() def _validate_python(self, value, state): - user = User.get_by_email(value) + user = db.User.get_by_email(value) if user is None: msg = self.message('non_existing_email', state, email=value) raise formencode.Invalid(msg, value, state, diff -r c387989f868f -r 3669e58f3002 kallithea/templates/admin/admin.html --- a/kallithea/templates/admin/admin.html Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/templates/admin/admin.html Fri Oct 30 23:44:18 2020 +0100 @@ -27,7 +27,8 @@ - diff -r c387989f868f -r 3669e58f3002 kallithea/templates/admin/user_groups/user_groups.html --- a/kallithea/templates/admin/user_groups/user_groups.html Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/templates/admin/user_groups/user_groups.html Fri Oct 30 23:44:18 2020 +0100 @@ -29,7 +29,8 @@
''' % { - 'a_id': anchor_old_id, - 'olc': no_lineno_class if no_lineno else old_lineno_class, - 'colspan': 'colspan="2"' if no_lineno else '' - }) - - _html.append('''%(link)s''' % { - 'link': _link_to_if(not no_lineno, change['old_lineno'], - '#%s' % anchor_old) - }) - _html.append('''''' % { + 'a_id': anchor_old_id, + 'olc': old_lineno_class, + }) + _html.append('''''' % { + 'label': change['old_lineno'], + 'url': '#%s' % anchor_old, + }) + _html.append('''''' % { 'a_id': anchor_new_id, 'nlc': new_lineno_class }) - - _html.append('''%(link)s''' % { - 'link': _link_to_if(True, change['new_lineno'], - '#%s' % anchor_new) + _html.append('''''' % { + 'label': change['new_lineno'], + 'url': '#%s' % anchor_new, + }) + _html.append('''''' % { + 'anchor': anchor, + 'olc': no_lineno_class, }) _html.append('''
- ## EXTRA FOR JS <%block name="js_extra"/> - - @@ -47,8 +48,8 @@ - ${c.ignorews_url(request.GET)} - ${c.context_url(request.GET)} + ${h.ignore_whitespace_link(request.GET)} + ${h.increase_context_link(request.GET)}
@@ -138,8 +139,8 @@ % else: ${ungettext('%s file changed with %s insertions and %s deletions', '%s files changed with %s insertions and %s deletions', len(file_diff_data)) % (len(file_diff_data), c.lines_added, c.lines_deleted)}: %endif -
-
+
+
%for fid, url_fid, op, a_path, path, diff, stats in file_diff_data:
@@ -181,10 +182,11 @@ ## main comment form and it status ${comment.comments()} -
+
## FORM FOR MAKING JS ACTION AS CHANGESET COMMENTS - - + diff -r c387989f868f -r 3669e58f3002 kallithea/templates/changeset/changeset_file_comment.html --- a/kallithea/templates/changeset/changeset_file_comment.html Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/templates/changeset/changeset_file_comment.html Fri Oct 30 23:44:18 2020 +0100 @@ -192,7 +192,8 @@ - %endif - - - - - - - + + + + diff -r c387989f868f -r 3669e58f3002 kallithea/templates/search/search.html --- a/kallithea/templates/search/search.html Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/templates/search/search.html Fri Oct 30 23:44:18 2020 +0100 @@ -56,7 +56,6 @@ ${h.select('type',c.cur_type,[('content',_('File contents')), ('commit',_('Commit messages')), ('path',_('File names')), - ##('repository',_('Repository names')), ], class_='form-control')} @@ -81,8 +80,6 @@ <%include file='search_path.html'/> %elif c.cur_type == 'commit': <%include file='search_commit.html'/> - %elif c.cur_type == 'repository': - <%include file='search_repository.html'/> %endif diff -r c387989f868f -r 3669e58f3002 kallithea/templates/search/search_repository.html diff -r c387989f868f -r 3669e58f3002 kallithea/templates/summary/statistics.html --- a/kallithea/templates/summary/statistics.html Wed Oct 28 14:58:18 2020 +0100 +++ b/kallithea/templates/summary/statistics.html Fri Oct 30 23:44:18 2020 +0100 @@ -22,7 +22,7 @@ <%def name="main()"> ${self.repo_context_bar('summary')} -
+
${self.breadcrumbs()}
@@ -51,7 +51,8 @@
- - %if c.show_stats: -