changeset 499:ca41d544dbdf rhodecode-0.0.0.8.3

Merge with 6aa7db1c083a1384ebff5c2bb3c943a035bb310d - celery branch
author Marcin Kuzminski <marcin@python-works.com>
date Thu, 23 Sep 2010 01:23:13 +0200
parents e94f4e54dc03 (current diff) 6aa7db1c083a (diff)
children 5a650ddd50ab
files pylons_app/lib/indexers/pidlock.py tests.ini
diffstat 64 files changed, 2332 insertions(+), 749 deletions(-) [+]
line wrap: on
line diff
--- a/README.rst	Sat Sep 11 03:35:33 2010 +0200
+++ b/README.rst	Thu Sep 23 01:23:13 2010 +0200
@@ -11,9 +11,12 @@
 - full permissions per project read/write/admin access even on mercurial request
 - mako templates let's you cusmotize look and feel of application.
 - diffs annotations and source code all colored by pygments.
-- mercurial branch graph and yui-flot powered graphs
+- mercurial branch graph and yui-flot powered graphs with zooming
 - admin interface for performing user/permission managments as well as repository
   managment. 
+- full text search of source codes with indexing daemons using whoosh
+  (no external search servers required all in one application)
+- async tasks for speed and performance using celery (works without them too)  
 - Additional settings for mercurial web, (hooks editable from admin
   panel !) also manage paths, archive, remote messages  
 - backup scripts can do backup of whole app and send it over scp to desired location
@@ -27,11 +30,11 @@
 **Incoming**
 
 - code review based on hg-review (when it's stable)
-- git support (when vcs can handle it)
-- full text search of source codes with indexing daemons using whoosh
-  (no external search servers required all in one application)
-- manage hg ui() per repo, add hooks settings, per repo, and not globally
-- other cools stuff that i can figure out
+- git support (when vcs can handle it - almost there !)
+- commit based wikis
+- in server forks
+- clonning from remote repositories into hg-app 
+- other cools stuff that i can figure out (or You can help me figure out)
 
 .. note::
    This software is still in beta mode. 
@@ -47,10 +50,10 @@
    
 - create new virtualenv and activate it - highly recommend that you use separate
   virtual-env for whole application
-- download hg app from default (not demo) branch from bitbucket and run 
+- download hg app from default branch from bitbucket and run 
   'python setup.py install' this will install all required dependencies needed
 - run paster setup-app production.ini it should create all needed tables 
-  and an admin account. 
+  and an admin account make sure You specify correct path to repositories. 
 - remember that the given path for mercurial repositories must be write 
   accessible for the application
 - run paster serve development.ini - or you can use manage-hg_app script.
@@ -58,4 +61,9 @@
 - use admin account you created to login.
 - default permissions on each repository is read, and owner is admin. So remember
   to update these.
+- in order to use full power of async tasks, You must install message broker
+  preferrably rabbitmq and start celeryd daemon. The app should gain some speed 
+  than. For installation instructions 
+  You can visit: http://ask.github.com/celery/getting-started/index.html. All
+  needed configs are inside hg-app ie. celeryconfig.py
      
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/celeryconfig.py	Thu Sep 23 01:23:13 2010 +0200
@@ -0,0 +1,74 @@
+# List of modules to import when celery starts.
+import sys
+import os
+import ConfigParser
+root = os.getcwd()
+
+PYLONS_CONFIG_NAME = 'development.ini'
+
+sys.path.append(root)
+config = ConfigParser.ConfigParser({'here':root})
+config.read('%s/%s' % (root, PYLONS_CONFIG_NAME))
+PYLONS_CONFIG = config
+
+CELERY_IMPORTS = ("pylons_app.lib.celerylib.tasks",)
+
+## Result store settings.
+CELERY_RESULT_BACKEND = "database"
+CELERY_RESULT_DBURI = dict(config.items('app:main'))['sqlalchemy.db1.url']
+CELERY_RESULT_SERIALIZER = 'json'
+
+
+BROKER_CONNECTION_MAX_RETRIES = 30
+
+## Broker settings.
+BROKER_HOST = "localhost"
+BROKER_PORT = 5672
+BROKER_VHOST = "rabbitmqhost"
+BROKER_USER = "rabbitmq"
+BROKER_PASSWORD = "qweqwe"
+
+## Worker settings
+## If you're doing mostly I/O you can have more processes,
+## but if mostly spending CPU, try to keep it close to the
+## number of CPUs on your machine. If not set, the number of CPUs/cores
+## available will be used.
+CELERYD_CONCURRENCY = 2
+# CELERYD_LOG_FILE = "celeryd.log"
+CELERYD_LOG_LEVEL = "DEBUG"
+CELERYD_MAX_TASKS_PER_CHILD = 1
+
+#Tasks will never be sent to the queue, but executed locally instead.
+CELERY_ALWAYS_EAGER = False
+
+#===============================================================================
+# EMAIL SETTINGS
+#===============================================================================
+pylons_email_config = dict(config.items('DEFAULT'))
+
+CELERY_SEND_TASK_ERROR_EMAILS = True
+
+#List of (name, email_address) tuples for the admins that should receive error e-mails.
+ADMINS = [('Administrator', pylons_email_config.get('email_to'))]
+
+#The e-mail address this worker sends e-mails from. Default is "celery@localhost".
+SERVER_EMAIL = pylons_email_config.get('error_email_from')
+
+#The mail server to use. Default is "localhost".
+MAIL_HOST = pylons_email_config.get('smtp_server')
+
+#Username (if required) to log on to the mail server with.
+MAIL_HOST_USER = pylons_email_config.get('smtp_username')
+
+#Password (if required) to log on to the mail server with.
+MAIL_HOST_PASSWORD = pylons_email_config.get('smtp_password')
+
+MAIL_PORT = pylons_email_config.get('smtp_port')
+
+
+#===============================================================================
+# INSTRUCTIONS FOR RABBITMQ
+#===============================================================================
+# rabbitmqctl add_user rabbitmq qweqwe
+# rabbitmqctl add_vhost rabbitmqhost
+# rabbitmqctl set_permissions -p rabbitmqhost rabbitmq ".*" ".*" ".*"
--- a/development.ini	Sat Sep 11 03:35:33 2010 +0200
+++ b/development.ini	Thu Sep 23 01:23:13 2010 +0200
@@ -1,32 +1,37 @@
 ################################################################################
 ################################################################################
-# pylons_app - Pylons environment configuration                                #
+# hg-app - Pylons environment configuration                                    #
 #                                                                              # 
 # The %(here)s variable will be replaced with the parent directory of this file#
 ################################################################################
 
 [DEFAULT]
 debug = true
-############################################
-## Uncomment and replace with the address ##
-## which should receive any error reports ##
-############################################
+################################################################################
+## Uncomment and replace with the address which should receive                ## 
+## any error reports after application crash								  ##
+## Additionally those settings will be used by hg-app mailing system          ##
+################################################################################
 #email_to = admin@localhost
+#error_email_from = paste_error@localhost
+#app_email_from = hg-app-noreply@localhost
+#error_message =
+
 #smtp_server = mail.server.com
-#error_email_from = paste_error@localhost
 #smtp_username = 
-#smtp_password = 
-#error_message = 'mercurial crash !'
+#smtp_password =
+#smtp_port = 
+#smtp_use_tls = 
 
 [server:main]
 ##nr of threads to spawn
 threadpool_workers = 5
 
 ##max request before
-threadpool_max_requests = 2
+threadpool_max_requests = 6
 
 ##option to use threads of process
-use_threadpool = true
+use_threadpool = false
 
 use = egg:Paste#http
 host = 127.0.0.1
@@ -56,7 +61,7 @@
 ###       BEAKER SESSION        ####
 ####################################
 ## Type of storage used for the session, current types are 
-## “dbm”, “file”, “memcached”, “database”, and “memory”. 
+## "dbm", "file", "memcached", "database", and "memory". 
 ## The storage uses the Container API 
 ##that is also used by the cache system.
 beaker.session.type = file
--- a/production.ini	Sat Sep 11 03:35:33 2010 +0200
+++ b/production.ini	Thu Sep 23 01:23:13 2010 +0200
@@ -1,28 +1,33 @@
 ################################################################################
 ################################################################################
-# pylons_app - Pylons environment configuration                                #
+# hg-app - Pylons environment configuration                                    #
 #                                                                              # 
 # The %(here)s variable will be replaced with the parent directory of this file#
 ################################################################################
 
 [DEFAULT]
 debug = true
-############################################
-## Uncomment and replace with the address ##
-## which should receive any error reports ##
-############################################
+################################################################################
+## Uncomment and replace with the address which should receive                ## 
+## any error reports after application crash								  ##
+## Additionally those settings will be used by hg-app mailing system          ##
+################################################################################
 #email_to = admin@localhost
+#error_email_from = paste_error@localhost
+#app_email_from = hg-app-noreply@localhost
+#error_message =
+
 #smtp_server = mail.server.com
-#error_email_from = paste_error@localhost
 #smtp_username = 
 #smtp_password = 
-#error_message = 'mercurial crash !'
+#smtp_port = 
+#smtp_use_tls = false
 
 [server:main]
 ##nr of threads to spawn
 threadpool_workers = 5
 
-##max request before
+##max request before thread respawn
 threadpool_max_requests = 2
 
 ##option to use threads of process
--- a/pylons_app/__init__.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/__init__.py	Thu Sep 23 01:23:13 2010 +0200
@@ -20,10 +20,11 @@
 """
 Created on April 9, 2010
 Hg app, a web based mercurial repository managment based on pylons
+versioning implementation: http://semver.org/
 @author: marcink
 """
 
-VERSION = (0, 8, 2, 'beta')
+VERSION = (0, 8, 3, 'beta')
 
 __version__ = '.'.join((str(each) for each in VERSION[:4]))
 
--- a/pylons_app/config/deployment.ini_tmpl	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/config/deployment.ini_tmpl	Thu Sep 23 01:23:13 2010 +0200
@@ -7,16 +7,21 @@
 
 [DEFAULT]
 debug = true
-############################################
-## Uncomment and replace with the address ##
-## which should receive any error reports ##
-############################################
+################################################################################
+## Uncomment and replace with the address which should receive                ## 
+## any error reports after application crash								  ##
+## Additionally those settings will be used by hg-app mailing system          ##
+################################################################################
 #email_to = admin@localhost
+#error_email_from = paste_error@localhost
+#app_email_from = hg-app-noreply@localhost
+#error_message =
+
 #smtp_server = mail.server.com
-#error_email_from = paste_error@localhost
 #smtp_username = 
 #smtp_password = 
-#error_message = 'hp-app crash !'
+#smtp_port = 
+#smtp_use_tls = false
 
 [server:main]
 ##nr of threads to spawn
--- a/pylons_app/config/environment.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/config/environment.py	Thu Sep 23 01:23:13 2010 +0200
@@ -49,7 +49,12 @@
 
     #sets the c attribute access when don't existing attribute are accessed
     config['pylons.strict_tmpl_context'] = True
-    test = os.path.split(config['__file__'])[-1] == 'tests.ini'
+    test = os.path.split(config['__file__'])[-1] == 'test.ini'
+    if test:
+        from pylons_app.lib.utils import create_test_env, create_test_index
+        create_test_env('/tmp', config)
+        create_test_index('/tmp/*', True)
+        
     #MULTIPLE DB configs
     # Setup the SQLAlchemy database engine
     if config['debug'] and not test:
--- a/pylons_app/config/routing.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/config/routing.py	Thu Sep 23 01:23:13 2010 +0200
@@ -110,10 +110,11 @@
     #SEARCH
     map.connect('search', '/_admin/search', controller='search')
     
-    #LOGIN/LOGOUT
+    #LOGIN/LOGOUT/REGISTER/SIGN IN
     map.connect('login_home', '/_admin/login', controller='login')
     map.connect('logout_home', '/_admin/logout', controller='login', action='logout')
     map.connect('register', '/_admin/register', controller='login', action='register')
+    map.connect('reset_password', '/_admin/password_reset', controller='login', action='password_reset')
         
     #FEEDS
     map.connect('rss_feed_home', '/{repo_name:.*}/feed/rss',
@@ -129,7 +130,7 @@
                 controller='changeset', revision='tip',
                 conditions=dict(function=check_repo))
     map.connect('raw_changeset_home', '/{repo_name:.*}/raw-changeset/{revision}',
-                controller='changeset',action='raw_changeset', revision='tip',
+                controller='changeset', action='raw_changeset', revision='tip',
                 conditions=dict(function=check_repo))
     map.connect('summary_home', '/{repo_name:.*}/summary',
                 controller='summary', conditions=dict(function=check_repo))
@@ -147,9 +148,12 @@
     map.connect('files_diff_home', '/{repo_name:.*}/diff/{f_path:.*}',
                 controller='files', action='diff', revision='tip', f_path='',
                 conditions=dict(function=check_repo))
-    map.connect('files_raw_home', '/{repo_name:.*}/rawfile/{revision}/{f_path:.*}',
+    map.connect('files_rawfile_home', '/{repo_name:.*}/rawfile/{revision}/{f_path:.*}',
                 controller='files', action='rawfile', revision='tip', f_path='',
                 conditions=dict(function=check_repo))
+    map.connect('files_raw_home', '/{repo_name:.*}/raw/{revision}/{f_path:.*}',
+                controller='files', action='raw', revision='tip', f_path='',
+                conditions=dict(function=check_repo))
     map.connect('files_annotate_home', '/{repo_name:.*}/annotate/{revision}/{f_path:.*}',
                 controller='files', action='annotate', revision='tip', f_path='',
                 conditions=dict(function=check_repo))    
--- a/pylons_app/controllers/admin/settings.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/controllers/admin/settings.py	Thu Sep 23 01:23:13 2010 +0200
@@ -38,6 +38,7 @@
     ApplicationUiSettingsForm
 from pylons_app.model.hg_model import HgModel
 from pylons_app.model.user_model import UserModel
+from pylons_app.lib.celerylib import tasks, run_task
 import formencode
 import logging
 import traceback
@@ -102,6 +103,12 @@
             invalidate_cache('cached_repo_list')
             h.flash(_('Repositories sucessfully rescanned'), category='success')            
         
+        if setting_id == 'whoosh':
+            repo_location = get_hg_ui_settings()['paths_root_path']
+            full_index = request.POST.get('full_index', False)
+            task = run_task(tasks.whoosh_index, repo_location, full_index)
+            
+            h.flash(_('Whoosh reindex task scheduled'), category='success')
         if setting_id == 'global':
             
             application_form = ApplicationSettingsForm()()
@@ -253,7 +260,8 @@
         # url('admin_settings_my_account_update', id=ID)
         user_model = UserModel()
         uid = c.hg_app_user.user_id
-        _form = UserForm(edit=True, old_data={'user_id':uid})()
+        _form = UserForm(edit=True, old_data={'user_id':uid,
+                                              'email':c.hg_app_user.email})()
         form_result = {}
         try:
             form_result = _form.to_python(dict(request.POST))
@@ -262,7 +270,11 @@
                     category='success')
                            
         except formencode.Invalid as errors:
-            #c.user = self.sa.query(User).get(c.hg_app_user.user_id)
+            c.user = self.sa.query(User).get(c.hg_app_user.user_id)
+            c.user_repos = []
+            for repo in c.cached_repo_list.values():
+                if repo.dbrepo.user.username == c.user.username:
+                    c.user_repos.append(repo)            
             return htmlfill.render(
                 render('admin/users/user_edit_my_account.html'),
                 defaults=errors.value,
--- a/pylons_app/controllers/admin/users.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/controllers/admin/users.py	Thu Sep 23 01:23:13 2010 +0200
@@ -98,7 +98,10 @@
         #           method='put')
         # url('user', id=ID)
         user_model = UserModel()
-        _form = UserForm(edit=True, old_data={'user_id':id})()
+        c.user = user_model.get_user(id)
+        
+        _form = UserForm(edit=True, old_data={'user_id':id,
+                                              'email':c.user.email})()
         form_result = {}
         try:
             form_result = _form.to_python(dict(request.POST))
@@ -106,7 +109,6 @@
             h.flash(_('User updated succesfully'), category='success')
                            
         except formencode.Invalid as errors:
-            c.user = user_model.get_user(id)
             return htmlfill.render(
                 render('admin/users/user_edit.html'),
                 defaults=errors.value,
@@ -148,6 +150,8 @@
         """GET /users/id/edit: Form to edit an existing item"""
         # url('edit_user', id=ID)
         c.user = self.sa.query(User).get(id)
+        if not c.user:
+            return redirect(url('users'))
         if c.user.username == 'default':
             h.flash(_("You can't edit this user since it's" 
               " crucial for entire application"), category='warning')
--- a/pylons_app/controllers/files.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/controllers/files.py	Thu Sep 23 01:23:13 2010 +0200
@@ -45,6 +45,7 @@
                                    'repository.admin')       
     def __before__(self):
         super(FilesController, self).__before__()
+        c.file_size_limit = 250 * 1024 #limit of file size to display
 
     def index(self, repo_name, revision, f_path):
         hg_model = HgModel()
@@ -76,7 +77,6 @@
                              revision=next_rev, f_path=f_path)   
                     
             c.changeset = repo.get_changeset(revision)
-
                         
             c.cur_rev = c.changeset.raw_id
             c.rev_nr = c.changeset.revision
@@ -96,6 +96,14 @@
         response.content_disposition = 'attachment; filename=%s' \
                                                     % f_path.split('/')[-1] 
         return file_node.content
+
+    def raw(self, repo_name, revision, f_path):
+        hg_model = HgModel()
+        c.repo = hg_model.get_repo(c.repo_name)
+        file_node = c.repo.get_changeset(revision).get_node(f_path)
+        response.content_type = 'text/plain'
+        
+        return file_node.content
     
     def annotate(self, repo_name, revision, f_path):
         hg_model = HgModel()
--- a/pylons_app/controllers/login.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/controllers/login.py	Thu Sep 23 01:23:13 2010 +0200
@@ -28,7 +28,9 @@
 from pylons.controllers.util import abort, redirect
 from pylons_app.lib.auth import AuthUser, HasPermissionAnyDecorator
 from pylons_app.lib.base import BaseController, render
-from pylons_app.model.forms import LoginForm, RegisterForm
+import pylons_app.lib.helpers as h 
+from pylons.i18n.translation import _
+from pylons_app.model.forms import LoginForm, RegisterForm, PasswordResetForm
 from pylons_app.model.user_model import UserModel
 import formencode
 import logging
@@ -42,7 +44,7 @@
 
     def index(self):
         #redirect if already logged in
-        c.came_from = request.GET.get('came_from',None)
+        c.came_from = request.GET.get('came_from', None)
         
         if c.hg_app_user.is_authenticated:
             return redirect(url('hg_home'))
@@ -82,7 +84,7 @@
                         
         return render('/login.html')
     
-    @HasPermissionAnyDecorator('hg.admin', 'hg.register.auto_activate', 
+    @HasPermissionAnyDecorator('hg.admin', 'hg.register.auto_activate',
                                'hg.register.manual_activate')
     def register(self):
         user_model = UserModel()
@@ -99,6 +101,8 @@
                 form_result = register_form.to_python(dict(request.POST))
                 form_result['active'] = c.auto_active
                 user_model.create_registration(form_result)
+                h.flash(_('You have successfully registered into hg-app'),
+                            category='success')                
                 return redirect(url('login_home'))
                                
             except formencode.Invalid as errors:
@@ -110,7 +114,29 @@
                     encoding="UTF-8")
         
         return render('/register.html')
-    
+
+    def password_reset(self):
+        user_model = UserModel()
+        if request.POST:
+                
+            password_reset_form = PasswordResetForm()()
+            try:
+                form_result = password_reset_form.to_python(dict(request.POST))
+                user_model.reset_password(form_result)
+                h.flash(_('Your new password was sent'),
+                            category='success')                 
+                return redirect(url('login_home'))
+                               
+            except formencode.Invalid as errors:
+                return htmlfill.render(
+                    render('/password_reset.html'),
+                    defaults=errors.value,
+                    errors=errors.error_dict or {},
+                    prefix_error=False,
+                    encoding="UTF-8")
+        
+        return render('/password_reset.html')
+        
     def logout(self):
         session['hg_app_user'] = AuthUser()
         session.save()
--- a/pylons_app/controllers/search.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/controllers/search.py	Thu Sep 23 01:23:13 2010 +0200
@@ -26,10 +26,9 @@
 from pylons.controllers.util import abort, redirect
 from pylons_app.lib.auth import LoginRequired
 from pylons_app.lib.base import BaseController, render
-from pylons_app.lib.indexers import ANALYZER, IDX_LOCATION, SCHEMA, IDX_NAME
-from webhelpers.html.builder import escape
-from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter, \
-    ContextFragmenter
+from pylons_app.lib.indexers import IDX_LOCATION, SCHEMA, IDX_NAME, ResultWrapper
+from webhelpers.paginate import Page
+from webhelpers.util import update_params
 from pylons.i18n.translation import _
 from whoosh.index import open_dir, EmptyIndexError
 from whoosh.qparser import QueryParser, QueryParserError
@@ -45,69 +44,55 @@
     def __before__(self):
         super(SearchController, self).__before__()    
 
-
     def index(self):
         c.formated_results = []
         c.runtime = ''
-        search_items = set()
         c.cur_query = request.GET.get('q', None)
         if c.cur_query:
             cur_query = c.cur_query.lower()
         
-        
         if c.cur_query:
+            p = int(request.params.get('page', 1))
+            highlight_items = set()
             try:
                 idx = open_dir(IDX_LOCATION, indexname=IDX_NAME)
                 searcher = idx.searcher()
-            
+
                 qp = QueryParser("content", schema=SCHEMA)
                 try:
                     query = qp.parse(unicode(cur_query))
                     
                     if isinstance(query, Phrase):
-                        search_items.update(query.words)
+                        highlight_items.update(query.words)
                     else:
                         for i in query.all_terms():
-                            search_items.add(i[1])
-                        
-                    log.debug(query)
-                    log.debug(search_items)
-                    results = searcher.search(query)
-                    c.runtime = '%s results (%.3f seconds)' \
-                    % (len(results), results.runtime)
+                            if i[0] == 'content':
+                                highlight_items.add(i[1])
 
-                    analyzer = ANALYZER
-                    formatter = HtmlFormatter('span',
-                        between='\n<span class="break">...</span>\n') 
-                    
-                    #how the parts are splitted within the same text part
-                    fragmenter = SimpleFragmenter(200)
-                    #fragmenter = ContextFragmenter(search_items)
+                    matcher = query.matcher(searcher)
                     
-                    for res in results:
-                        d = {}
-                        d.update(res)
-                        hl = highlight(escape(res['content']), search_items,
-                                                         analyzer=analyzer,
-                                                         fragmenter=fragmenter,
-                                                         formatter=formatter,
-                                                         top=5)
-                        f_path = res['path'][res['path'].find(res['repository']) \
-                                             + len(res['repository']):].lstrip('/')
-                        d.update({'content_short':hl,
-                                  'f_path':f_path})
-                        #del d['content']
-                        c.formated_results.append(d)
-                                                    
+                    log.debug(query)
+                    log.debug(highlight_items)
+                    results = searcher.search(query)
+                    res_ln = len(results)
+                    c.runtime = '%s results (%.3f seconds)' \
+                    % (res_ln, results.runtime)
+                    
+                    def url_generator(**kw):
+                        return update_params("?q=%s" % c.cur_query, **kw)
+
+                    c.formated_results = Page(
+                                ResultWrapper(searcher, matcher, highlight_items),
+                                page=p, item_count=res_ln,
+                                items_per_page=10, url=url_generator)
+                           
                 except QueryParserError:
                     c.runtime = _('Invalid search query. Try quoting it.')
-
+                searcher.close()
             except (EmptyIndexError, IOError):
                 log.error(traceback.format_exc())
                 log.error('Empty Index data')
                 c.runtime = _('There is no index to search in. Please run whoosh indexer')
-            
-
-                
+                        
         # Return a rendered template
         return render('/search/search.html')
--- a/pylons_app/controllers/summary.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/controllers/summary.py	Thu Sep 23 01:23:13 2010 +0200
@@ -22,15 +22,17 @@
 summary controller for pylons
 @author: marcink
 """
-from datetime import datetime, timedelta
-from pylons import tmpl_context as c, request
+from pylons import tmpl_context as c, request, url
 from pylons_app.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 from pylons_app.lib.base import BaseController, render
-from pylons_app.lib.helpers import person
 from pylons_app.lib.utils import OrderedDict
 from pylons_app.model.hg_model import HgModel
+from pylons_app.model.db import Statistics
+from webhelpers.paginate import Page
+from pylons_app.lib.celerylib import run_task
+from pylons_app.lib.celerylib.tasks import get_commits_stats
+from datetime import datetime, timedelta
 from time import mktime
-from webhelpers.paginate import Page
 import calendar
 import logging
 
@@ -62,78 +64,33 @@
         c.repo_branches = OrderedDict()
         for name, hash in c.repo_info.branches.items()[:10]:
             c.repo_branches[name] = c.repo_info.get_changeset(hash)
+        
+        td = datetime.today() + timedelta(days=1) 
+        y, m, d = td.year, td.month, td.day
+        
+        ts_min_y = mktime((y - 1, (td - timedelta(days=calendar.mdays[m])).month,
+                            d, 0, 0, 0, 0, 0, 0,))
+        ts_min_m = mktime((y, (td - timedelta(days=calendar.mdays[m])).month,
+                            d, 0, 0, 0, 0, 0, 0,))
+        
+        ts_max_y = mktime((y, m, d, 0, 0, 0, 0, 0, 0,))
+            
+        run_task(get_commits_stats, c.repo_info.name, ts_min_y, ts_max_y)
+        c.ts_min = ts_min_m
+        c.ts_max = ts_max_y
+        
+        
+        stats = self.sa.query(Statistics)\
+            .filter(Statistics.repository == c.repo_info.dbrepo)\
+            .scalar()
 
-        c.commit_data = self.__get_commit_stats(c.repo_info)
+        if stats:
+            c.commit_data = stats.commit_activity
+            c.overview_data = stats.commit_activity_combined
+        else:
+            import json
+            c.commit_data = json.dumps({})
+            c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 0] ])
         
         return render('summary/summary.html')
 
-
-
-    def __get_commit_stats(self, repo):
-        aggregate = OrderedDict()
-        
-        #graph range
-        td = datetime.today() + timedelta(days=1) 
-        y, m, d = td.year, td.month, td.day
-        c.ts_min = mktime((y, (td - timedelta(days=calendar.mdays[m])).month,
-                            d, 0, 0, 0, 0, 0, 0,))
-        c.ts_max = mktime((y, m, d, 0, 0, 0, 0, 0, 0,))
-        
-        def author_key_cleaner(k):
-            k = person(k)
-            k = k.replace('"', "'") #for js data compatibilty
-            return k
-                
-        for cs in repo[:200]:#added limit 200 until fix #29 is made
-            k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
-                              cs.date.timetuple()[2])
-            timetupple = [int(x) for x in k.split('-')]
-            timetupple.extend([0 for _ in xrange(6)])
-            k = mktime(timetupple)
-            if aggregate.has_key(author_key_cleaner(cs.author)):
-                if aggregate[author_key_cleaner(cs.author)].has_key(k):
-                    aggregate[author_key_cleaner(cs.author)][k]["commits"] += 1
-                    aggregate[author_key_cleaner(cs.author)][k]["added"] += len(cs.added)
-                    aggregate[author_key_cleaner(cs.author)][k]["changed"] += len(cs.changed)
-                    aggregate[author_key_cleaner(cs.author)][k]["removed"] += len(cs.removed)
-                    
-                else:
-                    #aggregate[author_key_cleaner(cs.author)].update(dates_range)
-                    if k >= c.ts_min and k <= c.ts_max:
-                        aggregate[author_key_cleaner(cs.author)][k] = {}
-                        aggregate[author_key_cleaner(cs.author)][k]["commits"] = 1
-                        aggregate[author_key_cleaner(cs.author)][k]["added"] = len(cs.added)
-                        aggregate[author_key_cleaner(cs.author)][k]["changed"] = len(cs.changed)
-                        aggregate[author_key_cleaner(cs.author)][k]["removed"] = len(cs.removed) 
-                                            
-            else:
-                if k >= c.ts_min and k <= c.ts_max:
-                    aggregate[author_key_cleaner(cs.author)] = OrderedDict()
-                    #aggregate[author_key_cleaner(cs.author)].update(dates_range)
-                    aggregate[author_key_cleaner(cs.author)][k] = {}
-                    aggregate[author_key_cleaner(cs.author)][k]["commits"] = 1
-                    aggregate[author_key_cleaner(cs.author)][k]["added"] = len(cs.added)
-                    aggregate[author_key_cleaner(cs.author)][k]["changed"] = len(cs.changed)
-                    aggregate[author_key_cleaner(cs.author)][k]["removed"] = len(cs.removed)                 
-        
-        d = ''
-        tmpl0 = u""""%s":%s"""
-        tmpl1 = u"""{label:"%s",data:%s,schema:["commits"]},"""
-        for author in aggregate:
-            
-            d += tmpl0 % (author,
-                          tmpl1 \
-                          % (author,
-                        [{"time":x,
-                          "commits":aggregate[author][x]['commits'],
-                          "added":aggregate[author][x]['added'],
-                          "changed":aggregate[author][x]['changed'],
-                          "removed":aggregate[author][x]['removed'],
-                          } for x in aggregate[author]]))
-        if d == '':
-            d = '"%s":{label:"%s",data:[[0,1],]}' \
-                % (author_key_cleaner(repo.contact),
-                   author_key_cleaner(repo.contact))
-        return d
-
-
--- a/pylons_app/lib/auth.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/lib/auth.py	Thu Sep 23 01:23:13 2010 +0200
@@ -34,9 +34,36 @@
 import bcrypt
 from decorator import decorator
 import logging
+import random
 
 log = logging.getLogger(__name__) 
 
+class PasswordGenerator(object):
+    """This is a simple class for generating password from
+        different sets of characters
+        usage:
+        passwd_gen = PasswordGenerator()
+        #print 8-letter password containing only big and small letters of alphabet
+        print passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)        
+    """
+    ALPHABETS_NUM = r'''1234567890'''#[0]
+    ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''#[1]
+    ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''#[2]
+    ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''    #[3]
+    ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM + ALPHABETS_SPECIAL#[4]
+    ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM#[5]
+    ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
+    ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM#[6]
+    ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM#[7]
+            
+    def __init__(self, passwd=''):
+        self.passwd = passwd
+
+    def gen_password(self, len, type):
+        self.passwd = ''.join([random.choice(type) for _ in xrange(len)])
+        return self.passwd
+
+    
 def get_crypt_password(password):
     """Cryptographic function used for password hashing based on sha1
     @param password: password to hash
@@ -231,9 +258,9 @@
 
             p = request.environ.get('PATH_INFO')
             if request.environ.get('QUERY_STRING'):
-                p+='?'+request.environ.get('QUERY_STRING')
-            log.debug('redirecting to login page with %s',p)                
-            return redirect(url('login_home',came_from=p))
+                p += '?' + request.environ.get('QUERY_STRING')
+            log.debug('redirecting to login page with %s', p)                
+            return redirect(url('login_home', came_from=p))
 
 class PermsDecorator(object):
     """Base class for decorators"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylons_app/lib/celerylib/__init__.py	Thu Sep 23 01:23:13 2010 +0200
@@ -0,0 +1,66 @@
+from pylons_app.lib.pidlock import DaemonLock, LockHeld
+from vcs.utils.lazy import LazyProperty
+from decorator import decorator
+import logging
+import os
+import sys
+import traceback
+from hashlib import md5
+log = logging.getLogger(__name__)
+
+class ResultWrapper(object):
+    def __init__(self, task):
+        self.task = task
+        
+    @LazyProperty
+    def result(self):
+        return self.task
+
+def run_task(task, *args, **kwargs):
+    try:
+        t = task.delay(*args, **kwargs)
+        log.info('running task %s', t.task_id)
+        return t
+    except Exception, e:
+        print e
+        if e.errno == 111:
+            log.debug('Unnable to connect. Sync execution')
+        else:
+            log.error(traceback.format_exc())
+        #pure sync version
+        return ResultWrapper(task(*args, **kwargs))
+
+
+class LockTask(object):
+    """LockTask decorator"""
+    
+    def __init__(self, func):
+        self.func = func
+        
+    def __call__(self, func):
+        return decorator(self.__wrapper, func)
+    
+    def __wrapper(self, func, *fargs, **fkwargs):
+        params = []
+        params.extend(fargs)
+        params.extend(fkwargs.values())
+        lockkey = 'task_%s' % \
+           md5(str(self.func) + '-' + '-'.join(map(str, params))).hexdigest()
+        log.info('running task with lockkey %s', lockkey)
+        try:
+            l = DaemonLock(lockkey)
+            return func(*fargs, **fkwargs)
+            l.release()
+        except LockHeld:
+            log.info('LockHeld')
+            return 'Task with key %s already running' % lockkey   
+
+            
+            
+
+        
+        
+    
+    
+    
+  
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylons_app/lib/celerylib/tasks.py	Thu Sep 23 01:23:13 2010 +0200
@@ -0,0 +1,270 @@
+from celery.decorators import task
+from celery.task.sets import subtask
+from celeryconfig import PYLONS_CONFIG as config
+from pylons.i18n.translation import _
+from pylons_app.lib.celerylib import run_task, LockTask
+from pylons_app.lib.helpers import person
+from pylons_app.lib.smtp_mailer import SmtpMailer
+from pylons_app.lib.utils import OrderedDict
+from operator import itemgetter
+from vcs.backends.hg import MercurialRepository
+from time import mktime
+import traceback
+import json
+
+__all__ = ['whoosh_index', 'get_commits_stats',
+           'reset_user_password', 'send_email']
+
+def get_session():
+    from sqlalchemy import engine_from_config
+    from sqlalchemy.orm import sessionmaker, scoped_session
+    engine = engine_from_config(dict(config.items('app:main')), 'sqlalchemy.db1.')
+    sa = scoped_session(sessionmaker(bind=engine))
+    return sa
+
+def get_hg_settings():
+    from pylons_app.model.db import HgAppSettings
+    try:
+        sa = get_session()
+        ret = sa.query(HgAppSettings).all()
+    finally:
+        sa.remove()
+        
+    if not ret:
+        raise Exception('Could not get application settings !')
+    settings = {}
+    for each in ret:
+        settings['hg_app_' + each.app_settings_name] = each.app_settings_value    
+    
+    return settings
+
+def get_hg_ui_settings():
+    from pylons_app.model.db import HgAppUi
+    try:
+        sa = get_session()
+        ret = sa.query(HgAppUi).all()
+    finally:
+        sa.remove()
+        
+    if not ret:
+        raise Exception('Could not get application ui settings !')
+    settings = {}
+    for each in ret:
+        k = each.ui_key
+        v = each.ui_value
+        if k == '/':
+            k = 'root_path'
+        
+        if k.find('.') != -1:
+            k = k.replace('.', '_')
+        
+        if each.ui_section == 'hooks':
+            v = each.ui_active
+        
+        settings[each.ui_section + '_' + k] = v  
+    
+    return settings   
+
+@task
+def whoosh_index(repo_location, full_index):
+    log = whoosh_index.get_logger()
+    from pylons_app.lib.pidlock import DaemonLock
+    from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon, LockHeld
+    try:
+        l = DaemonLock()
+        WhooshIndexingDaemon(repo_location=repo_location)\
+            .run(full_index=full_index)
+        l.release()
+        return 'Done'
+    except LockHeld:
+        log.info('LockHeld')
+        return 'LockHeld'    
+
+
+@task
+@LockTask('get_commits_stats')
+def get_commits_stats(repo_name, ts_min_y, ts_max_y):
+    author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty
+        
+    from pylons_app.model.db import Statistics, Repository
+    log = get_commits_stats.get_logger()
+    commits_by_day_author_aggregate = {}
+    commits_by_day_aggregate = {}
+    repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
+    repo = MercurialRepository(repos_path + repo_name)
+
+    skip_date_limit = True
+    parse_limit = 350 #limit for single task changeset parsing
+    last_rev = 0
+    last_cs = None
+    timegetter = itemgetter('time')
+    
+    sa = get_session()
+    
+    dbrepo = sa.query(Repository)\
+        .filter(Repository.repo_name == repo_name).scalar()
+    cur_stats = sa.query(Statistics)\
+        .filter(Statistics.repository == dbrepo).scalar()
+    if cur_stats:
+        last_rev = cur_stats.stat_on_revision
+    
+    if last_rev == repo.revisions[-1]:
+        #pass silently without any work
+        return True
+    
+    if cur_stats:
+        commits_by_day_aggregate = OrderedDict(
+                                       json.loads(
+                                        cur_stats.commit_activity_combined))
+        commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
+    
+    for cnt, rev in enumerate(repo.revisions[last_rev:]):
+        last_cs = cs = repo.get_changeset(rev)
+        k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
+                          cs.date.timetuple()[2])
+        timetupple = [int(x) for x in k.split('-')]
+        timetupple.extend([0 for _ in xrange(6)])
+        k = mktime(timetupple)
+        if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
+            try:
+                l = [timegetter(x) for x in commits_by_day_author_aggregate\
+                        [author_key_cleaner(cs.author)]['data']]
+                time_pos = l.index(k)
+            except ValueError:
+                time_pos = False
+                
+            if time_pos >= 0 and time_pos is not False:
+                
+                datadict = commits_by_day_author_aggregate\
+                    [author_key_cleaner(cs.author)]['data'][time_pos]
+                
+                datadict["commits"] += 1
+                datadict["added"] += len(cs.added)
+                datadict["changed"] += len(cs.changed)
+                datadict["removed"] += len(cs.removed)
+                #print datadict
+                
+            else:
+                #print 'ELSE !!!!'
+                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
+                    
+                    datadict = {"time":k,
+                                "commits":1,
+                                "added":len(cs.added),
+                                "changed":len(cs.changed),
+                                "removed":len(cs.removed),
+                               }
+                    commits_by_day_author_aggregate\
+                        [author_key_cleaner(cs.author)]['data'].append(datadict)
+                                        
+        else:
+            #print k, 'nokey ADDING'
+            if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
+                commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
+                                    "label":author_key_cleaner(cs.author),
+                                    "data":[{"time":k,
+                                             "commits":1,
+                                             "added":len(cs.added),
+                                             "changed":len(cs.changed),
+                                             "removed":len(cs.removed),
+                                             }],
+                                    "schema":["commits"],
+                                    }               
+    
+#        #gather all data by day
+        if commits_by_day_aggregate.has_key(k):
+            commits_by_day_aggregate[k] += 1
+        else:
+            commits_by_day_aggregate[k] = 1
+        
+        if cnt >= parse_limit:
+            #don't fetch to much data since we can freeze application
+            break
+
+    overview_data = []
+    for k, v in commits_by_day_aggregate.items():
+        overview_data.append([k, v])
+    overview_data = sorted(overview_data, key=itemgetter(0))
+        
+    if not commits_by_day_author_aggregate:
+        commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
+            "label":author_key_cleaner(repo.contact),
+            "data":[0, 1],
+            "schema":["commits"],
+        }
+
+    stats = cur_stats if cur_stats else Statistics()
+    stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
+    stats.commit_activity_combined = json.dumps(overview_data)
+    stats.repository = dbrepo
+    stats.stat_on_revision = last_cs.revision
+    stats.languages = json.dumps({'_TOTAL_':0, '':0})
+    
+    try:
+        sa.add(stats)
+        sa.commit()    
+    except:
+        log.error(traceback.format_exc())
+        sa.rollback()
+        return False
+    
+    run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
+                            
+    return True
+
+@task
+def reset_user_password(user_email):
+    log = reset_user_password.get_logger()
+    from pylons_app.lib import auth
+    from pylons_app.model.db import User
+    
+    try:
+        try:
+            sa = get_session()
+            user = sa.query(User).filter(User.email == user_email).scalar()
+            new_passwd = auth.PasswordGenerator().gen_password(8,
+                             auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
+            if user:
+                user.password = auth.get_crypt_password(new_passwd)
+                sa.add(user)
+                sa.commit()
+                log.info('change password for %s', user_email)
+            if new_passwd is None:
+                raise Exception('unable to generate new password')
+            
+        except:
+            log.error(traceback.format_exc())
+            sa.rollback()
+        
+        run_task(send_email, user_email,
+                 "Your new hg-app password",
+                 'Your new hg-app password:%s' % (new_passwd))
+        log.info('send new password mail to %s', user_email)
+        
+        
+    except:
+        log.error('Failed to update user password')
+        log.error(traceback.format_exc())
+    return True
+
+@task    
+def send_email(recipients, subject, body):
+    log = send_email.get_logger()
+    email_config = dict(config.items('DEFAULT')) 
+    mail_from = email_config.get('app_email_from')
+    user = email_config.get('smtp_username')
+    passwd = email_config.get('smtp_password')
+    mail_server = email_config.get('smtp_server')
+    mail_port = email_config.get('smtp_port')
+    tls = email_config.get('smtp_use_tls')
+    ssl = False
+    
+    try:
+        m = SmtpMailer(mail_from, user, passwd, mail_server,
+                       mail_port, ssl, tls)
+        m.send(recipients, subject, body)  
+    except:
+        log.error('Mail sending failed')
+        log.error(traceback.format_exc())
+        return False
+    return True
--- a/pylons_app/lib/db_manage.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/lib/db_manage.py	Thu Sep 23 01:23:13 2010 +0200
@@ -43,7 +43,7 @@
 log = logging.getLogger(__name__)
 
 class DbManage(object):
-    def __init__(self, log_sql, dbname,tests=False):
+    def __init__(self, log_sql, dbname, tests=False):
         self.dbname = dbname
         self.tests = tests
         dburi = 'sqlite:////%s' % jn(ROOT, self.dbname)
@@ -68,7 +68,7 @@
         if override:
             log.info("database exisist and it's going to be destroyed")
             if self.tests:
-                destroy=True
+                destroy = True
             else:
                 destroy = ask_ok('Are you sure to destroy old database ? [y/n]')
             if not destroy:
@@ -84,15 +84,17 @@
             import getpass
             username = raw_input('Specify admin username:')
             password = getpass.getpass('Specify admin password:')
-            self.create_user(username, password, True)
+            email = raw_input('Specify admin email:')
+            self.create_user(username, password, email, True)
         else:
             log.info('creating admin and regular test users')
-            self.create_user('test_admin', 'test', True)
-            self.create_user('test_regular', 'test', False)
+            self.create_user('test_admin', 'test', 'test_admin@mail.com', True)
+            self.create_user('test_regular', 'test', 'test_regular@mail.com', False)
+            self.create_user('test_regular2', 'test', 'test_regular2@mail.com', False)
             
         
     
-    def config_prompt(self,test_repo_path=''):
+    def config_prompt(self, test_repo_path=''):
         log.info('Setting up repositories config')
         
         if not self.tests and not test_repo_path:
@@ -102,7 +104,7 @@
             path = test_repo_path
             
         if not os.path.isdir(path):
-            log.error('You entered wrong path: %s',path)
+            log.error('You entered wrong path: %s', path)
             sys.exit()
         
         hooks1 = HgAppUi()
@@ -166,14 +168,14 @@
             raise        
         log.info('created ui config')
                     
-    def create_user(self, username, password, admin=False):
+    def create_user(self, username, password, email='', admin=False):
         log.info('creating administrator user %s', username)
         new_user = User()
         new_user.username = username
         new_user.password = get_crypt_password(password)
         new_user.name = 'Hg'
         new_user.lastname = 'Admin'
-        new_user.email = 'admin@localhost'
+        new_user.email = email
         new_user.admin = admin
         new_user.active = True
         
--- a/pylons_app/lib/helpers.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/lib/helpers.py	Thu Sep 23 01:23:13 2010 +0200
@@ -277,13 +277,17 @@
     return literal(annotate_highlight(filenode, url_func, **kwargs))
       
 def repo_name_slug(value):
-    """
-    Return slug of name of repository
+    """Return slug of name of repository
+    This function is called on each creation/modification
+    of repository to prevent bad names in repo
     """
-    slug = urlify(value)
-    for c in """=[]\;'"<>,/~!@#$%^&*()+{}|:""":
+    slug = remove_formatting(value)
+    slug = strip_tags(slug)
+    
+    for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
         slug = slug.replace(c, '-')
     slug = recursive_replace(slug, '-')
+    slug = collapse(slug, '-')
     return slug
 
 def get_changeset_safe(repo, rev):
@@ -321,6 +325,7 @@
 isodatesec = lambda  x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2')
 localdate = lambda  x: (x[0], util.makedate()[1])
 rfc822date = lambda  x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2")
+rfc822date_notz = lambda  x: util.datestr(x, "%a, %d %b %Y %H:%M:%S")
 rfc3339date = lambda  x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2")
 time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2")
 
--- a/pylons_app/lib/indexers/__init__.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/lib/indexers/__init__.py	Thu Sep 23 01:23:13 2010 +0200
@@ -1,41 +1,139 @@
-import sys
+from os.path import dirname as dn, join as jn
+from pylons_app.config.environment import load_environment
+from pylons_app.model.hg_model import HgModel
+from shutil import rmtree
+from webhelpers.html.builder import escape
+from vcs.utils.lazy import LazyProperty
+
+from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
+from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
+from whoosh.index import create_in, open_dir
+from whoosh.formats import Characters
+from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter   
+
 import os
-from pidlock import LockHeld, DaemonLock
+import sys
 import traceback
 
-from os.path import dirname as dn
-from os.path import join as jn
-
 #to get the pylons_app import
 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
 
-from pylons_app.config.environment import load_environment
-from pylons_app.model.hg_model import HgModel
-from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
-from whoosh.fields import TEXT, ID, STORED, Schema
-from whoosh.index import create_in, open_dir
-from shutil import rmtree
 
 #LOCATION WE KEEP THE INDEX
 IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index')
 
 #EXTENSIONS WE WANT TO INDEX CONTENT OFF
-INDEX_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c', 
-                    'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl', 'h', 
-                    'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp', 
-                    'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3', 
-                    'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh', 'sql', 
-                    'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml','xsl','xslt', 
+INDEX_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c',
+                    'cfg', 'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl',
+                    'h', 'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp',
+                    'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3',
+                    'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh', 'sql',
+                    'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt',
                     'yaws']
 
 #CUSTOM ANALYZER wordsplit + lowercase filter
 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
 
+
 #INDEX SCHEMA DEFINITION
 SCHEMA = Schema(owner=TEXT(),
                 repository=TEXT(stored=True),
                 path=ID(stored=True, unique=True),
-                content=TEXT(stored=True, analyzer=ANALYZER),
-                modtime=STORED(),extension=TEXT(stored=True))
+                content=FieldType(format=Characters(ANALYZER),
+                             scorable=True, stored=True),
+                modtime=STORED(), extension=TEXT(stored=True))
+
+
+IDX_NAME = 'HG_INDEX'
+FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') 
+FRAGMENTER = SimpleFragmenter(200)
+                            
+class ResultWrapper(object):
+    def __init__(self, searcher, matcher, highlight_items):
+        self.searcher = searcher
+        self.matcher = matcher
+        self.highlight_items = highlight_items
+        self.fragment_size = 200 / 2
+    
+    @LazyProperty
+    def doc_ids(self):
+        docs_id = []
+        while self.matcher.is_active():
+            docnum = self.matcher.id()
+            chunks = [offsets for offsets in self.get_chunks()]
+            docs_id.append([docnum, chunks])
+            self.matcher.next()
+        return docs_id   
+        
+    def __str__(self):
+        return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
+
+    def __repr__(self):
+        return self.__str__()
+
+    def __len__(self):
+        return len(self.doc_ids)
+
+    def __iter__(self):
+        """
+        Allows Iteration over results,and lazy generate content
+
+        *Requires* implementation of ``__getitem__`` method.
+        """
+        for docid in self.doc_ids:
+            yield self.get_full_content(docid)
 
-IDX_NAME = 'HG_INDEX'
\ No newline at end of file
+    def __getslice__(self, i, j):
+        """
+        Slicing of resultWrapper
+        """
+        slice = []
+        for docid in self.doc_ids[i:j]:
+            slice.append(self.get_full_content(docid))
+        return slice   
+                            
+
+    def get_full_content(self, docid):
+        res = self.searcher.stored_fields(docid[0])
+        f_path = res['path'][res['path'].find(res['repository']) \
+                             + len(res['repository']):].lstrip('/')
+        
+        content_short = self.get_short_content(res, docid[1])
+        res.update({'content_short':content_short,
+                    'content_short_hl':self.highlight(content_short),
+                    'f_path':f_path})
+        
+        return res        
+    
+    def get_short_content(self, res, chunks):
+        
+        return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
+    
+    def get_chunks(self):
+        """
+        Smart function that implements chunking the content
+        but not overlap chunks so it doesn't highlight the same
+        close occurences twice.
+        @param matcher:
+        @param size:
+        """
+        memory = [(0, 0)]
+        for span in self.matcher.spans():
+            start = span.startchar or 0
+            end = span.endchar or 0
+            start_offseted = max(0, start - self.fragment_size)
+            end_offseted = end + self.fragment_size
+            
+            if start_offseted < memory[-1][1]:
+                start_offseted = memory[-1][1]
+            memory.append((start_offseted, end_offseted,))    
+            yield (start_offseted, end_offseted,)  
+        
+    def highlight(self, content, top=5):
+        hl = highlight(escape(content),
+                 self.highlight_items,
+                 analyzer=ANALYZER,
+                 fragmenter=FRAGMENTER,
+                 formatter=FORMATTER,
+                 top=top)
+        return hl 
--- a/pylons_app/lib/indexers/daemon.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/lib/indexers/daemon.py	Thu Sep 23 01:23:13 2010 +0200
@@ -32,20 +32,31 @@
 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
 sys.path.append(project_path)
 
-from pidlock import LockHeld, DaemonLock
-import traceback
-from pylons_app.config.environment import load_environment
+from pylons_app.lib.pidlock import LockHeld, DaemonLock
 from pylons_app.model.hg_model import HgModel
 from pylons_app.lib.helpers import safe_unicode
 from whoosh.index import create_in, open_dir
 from shutil import rmtree
-from pylons_app.lib.indexers import ANALYZER, INDEX_EXTENSIONS, IDX_LOCATION, \
-SCHEMA, IDX_NAME
+from pylons_app.lib.indexers import INDEX_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME
 
 import logging
-import logging.config
-logging.config.fileConfig(jn(project_path, 'development.ini'))
+
 log = logging.getLogger('whooshIndexer')
+# create logger
+log.setLevel(logging.DEBUG)
+log.propagate = False
+# create console handler and set level to debug
+ch = logging.StreamHandler()
+ch.setLevel(logging.DEBUG)
+
+# create formatter
+formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
+
+# add formatter to ch
+ch.setFormatter(formatter)
+
+# add ch to logger
+log.addHandler(ch)
 
 def scan_paths(root_location):
     return HgModel.repo_scan('/', root_location, None, True)
@@ -221,6 +232,7 @@
         WhooshIndexingDaemon(repo_location=repo_location)\
             .run(full_index=full_index)
         l.release()
+        reload(logging)
     except LockHeld:
         sys.exit(1)
 
--- a/pylons_app/lib/indexers/pidlock.py	Sat Sep 11 03:35:33 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,127 +0,0 @@
-import os, time
-import sys
-from warnings import warn
-
-class LockHeld(Exception):pass
-
-
-class DaemonLock(object):
-    '''daemon locking
-    USAGE:
-    try:
-        l = lock()
-        main()
-        l.release()
-    except LockHeld:
-        sys.exit(1)
-    '''
-
-    def __init__(self, file=None, callbackfn=None,
-                 desc='daemon lock', debug=False):
-
-        self.pidfile = file if file else os.path.join(os.path.dirname(__file__),
-                                                      'running.lock')
-        self.callbackfn = callbackfn
-        self.desc = desc
-        self.debug = debug
-        self.held = False
-        #run the lock automatically !
-        self.lock()
-
-    def __del__(self):
-        if self.held:
-
-#            warn("use lock.release instead of del lock",
-#                    category = DeprecationWarning,
-#                    stacklevel = 2)
-
-            # ensure the lock will be removed
-            self.release()
-
-
-    def lock(self):
-        '''
-        locking function, if lock is present it will raise LockHeld exception
-        '''
-        lockname = '%s' % (os.getpid())
-
-        self.trylock()
-        self.makelock(lockname, self.pidfile)
-        return True
-
-    def trylock(self):
-        running_pid = False
-        try:
-            pidfile = open(self.pidfile, "r")
-            pidfile.seek(0)
-            running_pid = pidfile.readline()
-            if self.debug:
-                print 'lock file present running_pid: %s, checking for execution'\
-                % running_pid
-            # Now we check the PID from lock file matches to the current
-            # process PID
-            if running_pid:
-                if os.path.exists("/proc/%s" % running_pid):
-                        print "You already have an instance of the program running"
-                        print "It is running as process %s" % running_pid
-                        raise LockHeld
-                else:
-                        print "Lock File is there but the program is not running"
-                        print "Removing lock file for the: %s" % running_pid
-                        self.release()
-        except IOError, e:
-            if e.errno != 2:
-                raise
-
-
-    def release(self):
-        '''
-        releases the pid by removing the pidfile
-        '''
-        if self.callbackfn:
-            #execute callback function on release
-            if self.debug:
-                print 'executing callback function %s' % self.callbackfn
-            self.callbackfn()
-        try:
-            if self.debug:
-                print 'removing pidfile %s' % self.pidfile
-            os.remove(self.pidfile)
-            self.held = False
-        except OSError, e:
-            if self.debug:
-                print 'removing pidfile failed %s' % e
-            pass
-
-    def makelock(self, lockname, pidfile):
-        '''
-        this function will make an actual lock
-        @param lockname: acctual pid of file
-        @param pidfile: the file to write the pid in
-        '''
-        if self.debug:
-            print 'creating a file %s and pid: %s' % (pidfile, lockname)
-        pidfile = open(self.pidfile, "wb")
-        pidfile.write(lockname)
-        pidfile.close
-        self.held = True
-
-
-def main():
-    print 'func is running'
-    cnt = 20
-    while 1:
-        print cnt
-        if cnt == 0:
-            break
-        time.sleep(1)
-        cnt -= 1
-
-
-if __name__ == "__main__":
-    try:
-        l = DaemonLock(desc='test lock')
-        main()
-        l.release()
-    except LockHeld:
-        sys.exit(1)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylons_app/lib/pidlock.py	Thu Sep 23 01:23:13 2010 +0200
@@ -0,0 +1,127 @@
+import os, time
+import sys
+from warnings import warn
+
+class LockHeld(Exception):pass
+
+
+class DaemonLock(object):
+    """daemon locking
+    USAGE:
+    try:
+        l = lock()
+        main()
+        l.release()
+    except LockHeld:
+        sys.exit(1)
+    """
+
+    def __init__(self, file=None, callbackfn=None,
+                 desc='daemon lock', debug=False):
+
+        self.pidfile = file if file else os.path.join(os.path.dirname(__file__),
+                                                      'running.lock')
+        self.callbackfn = callbackfn
+        self.desc = desc
+        self.debug = debug
+        self.held = False
+        #run the lock automatically !
+        self.lock()
+
+    def __del__(self):
+        if self.held:
+
+#            warn("use lock.release instead of del lock",
+#                    category = DeprecationWarning,
+#                    stacklevel = 2)
+
+            # ensure the lock will be removed
+            self.release()
+
+
+    def lock(self):
+        """
+        locking function, if lock is present it will raise LockHeld exception
+        """
+        lockname = '%s' % (os.getpid())
+
+        self.trylock()
+        self.makelock(lockname, self.pidfile)
+        return True
+
+    def trylock(self):
+        running_pid = False
+        try:
+            pidfile = open(self.pidfile, "r")
+            pidfile.seek(0)
+            running_pid = pidfile.readline()
+            if self.debug:
+                print 'lock file present running_pid: %s, checking for execution'\
+                % running_pid
+            # Now we check the PID from lock file matches to the current
+            # process PID
+            if running_pid:
+                if os.path.exists("/proc/%s" % running_pid):
+                        print "You already have an instance of the program running"
+                        print "It is running as process %s" % running_pid
+                        raise LockHeld
+                else:
+                        print "Lock File is there but the program is not running"
+                        print "Removing lock file for the: %s" % running_pid
+                        self.release()
+        except IOError, e:
+            if e.errno != 2:
+                raise
+
+
+    def release(self):
+        """
+        releases the pid by removing the pidfile
+        """
+        if self.callbackfn:
+            #execute callback function on release
+            if self.debug:
+                print 'executing callback function %s' % self.callbackfn
+            self.callbackfn()
+        try:
+            if self.debug:
+                print 'removing pidfile %s' % self.pidfile
+            os.remove(self.pidfile)
+            self.held = False
+        except OSError, e:
+            if self.debug:
+                print 'removing pidfile failed %s' % e
+            pass
+
+    def makelock(self, lockname, pidfile):
+        """
+        this function will make an actual lock
+        @param lockname: acctual pid of file
+        @param pidfile: the file to write the pid in
+        """
+        if self.debug:
+            print 'creating a file %s and pid: %s' % (pidfile, lockname)
+        pidfile = open(self.pidfile, "wb")
+        pidfile.write(lockname)
+        pidfile.close
+        self.held = True
+
+
+def main():
+    print 'func is running'
+    cnt = 20
+    while 1:
+        print cnt
+        if cnt == 0:
+            break
+        time.sleep(1)
+        cnt -= 1
+
+
+if __name__ == "__main__":
+    try:
+        l = DaemonLock(desc='test lock')
+        main()
+        l.release()
+    except LockHeld:
+        sys.exit(1)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylons_app/lib/smtp_mailer.py	Thu Sep 23 01:23:13 2010 +0200
@@ -0,0 +1,118 @@
+import logging
+import smtplib
+import mimetypes
+from email.mime.multipart import MIMEMultipart
+from email.mime.image import MIMEImage
+from email.mime.audio import MIMEAudio
+from email.mime.base import MIMEBase
+from email.mime.text import MIMEText
+from email.utils import formatdate
+from email import encoders
+
+class SmtpMailer(object):
+    """simple smtp mailer class
+    
+    mailer = SmtpMailer(mail_from, user, passwd, mail_server, mail_port, ssl, tls)
+    mailer.send(recipients, subject, body, attachment_files)    
+    
+    :param recipients might be a list of string or single string
+    :param attachment_files is a dict of {filename:location} 
+    it tries to guess the mimetype and attach the file
+    """
+
+    def __init__(self, mail_from, user, passwd, mail_server,
+                    mail_port=None, ssl=False, tls=False):
+        
+        self.mail_from = mail_from
+        self.mail_server = mail_server
+        self.mail_port = mail_port
+        self.user = user
+        self.passwd = passwd
+        self.ssl = ssl
+        self.tls = tls
+        self.debug = False
+        
+    def send(self, recipients=[], subject='', body='', attachment_files={}):
+
+        if isinstance(recipients, basestring):
+            recipients = [recipients]
+        if self.ssl:
+            smtp_serv = smtplib.SMTP_SSL(self.mail_server, self.mail_port)
+        else:
+            smtp_serv = smtplib.SMTP(self.mail_server, self.mail_port)
+
+        if self.tls:
+            smtp_serv.starttls()
+         
+        if self.debug:    
+            smtp_serv.set_debuglevel(1)
+
+        smtp_serv.ehlo("mailer")
+
+        #if server requires authorization you must provide login and password
+        smtp_serv.login(self.user, self.passwd)
+
+        date_ = formatdate(localtime=True)
+        msg = MIMEMultipart()
+        msg['From'] = self.mail_from
+        msg['To'] = ','.join(recipients)
+        msg['Date'] = date_
+        msg['Subject'] = subject
+        msg.preamble = 'You will not see this in a MIME-aware mail reader.\n'
+
+        msg.attach(MIMEText(body))
+
+        if attachment_files:
+            self.__atach_files(msg, attachment_files)
+
+        smtp_serv.sendmail(self.mail_from, recipients, msg.as_string())
+        logging.info('MAIL SEND TO: %s' % recipients)
+        smtp_serv.quit()
+
+
+    def __atach_files(self, msg, attachment_files):
+        if isinstance(attachment_files, dict):
+            for f_name, msg_file in attachment_files.items():
+                ctype, encoding = mimetypes.guess_type(f_name)
+                logging.info("guessing file %s type based on %s" , ctype, f_name)
+                if ctype is None or encoding is not None:
+                    # No guess could be made, or the file is encoded (compressed), so
+                    # use a generic bag-of-bits type.
+                    ctype = 'application/octet-stream'
+                maintype, subtype = ctype.split('/', 1)
+                if maintype == 'text':
+                    # Note: we should handle calculating the charset
+                    file_part = MIMEText(self.get_content(msg_file), 
+                                         _subtype=subtype)
+                elif maintype == 'image':
+                    file_part = MIMEImage(self.get_content(msg_file), 
+                                          _subtype=subtype)
+                elif maintype == 'audio':
+                    file_part = MIMEAudio(self.get_content(msg_file), 
+                                          _subtype=subtype)
+                else:
+                    file_part = MIMEBase(maintype, subtype)
+                    file_part.set_payload(self.get_content(msg_file))
+                    # Encode the payload using Base64
+                    encoders.encode_base64(msg)
+                # Set the filename parameter
+                file_part.add_header('Content-Disposition', 'attachment', 
+                                     filename=f_name)
+                file_part.add_header('Content-Type', ctype, name=f_name)
+                msg.attach(file_part)
+        else:
+            raise Exception('Attachment files should be' 
+                            'a dict in format {"filename":"filepath"}')    
+
+    def get_content(self, msg_file):
+        '''
+        Get content based on type, if content is a string do open first
+        else just read because it's a probably open file object
+        @param msg_file:
+        '''
+        if isinstance(msg_file, str):
+            return open(msg_file, "rb").read()
+        else:
+            #just for safe seek to 0
+            msg_file.seek(0)
+            return msg_file.read()
--- a/pylons_app/lib/timerproxy.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/lib/timerproxy.py	Thu Sep 23 01:23:13 2010 +0200
@@ -1,7 +1,6 @@
 from sqlalchemy.interfaces import ConnectionProxy
 import time
-import logging
-log = logging.getLogger('timerproxy')
+from sqlalchemy import log
 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
 
 def color_sql(sql):
@@ -22,7 +21,7 @@
     sql = sql.replace('\n', '')
     sql = one_space_trim(sql)
     sql = sql\
-        .replace(',',',\n\t')\
+        .replace(',', ',\n\t')\
         .replace('SELECT', '\n\tSELECT \n\t')\
         .replace('UPDATE', '\n\tUPDATE \n\t')\
         .replace('DELETE', '\n\tDELETE \n\t')\
@@ -39,19 +38,22 @@
 
 
 class TimerProxy(ConnectionProxy):
+    
+    def __init__(self):
+        super(TimerProxy, self).__init__()
+        self.logging_name = 'timerProxy'
+        self.log = log.instance_logger(self, True)
+        
     def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+        
         now = time.time()
         try:
-            log.info(">>>>> STARTING QUERY >>>>>")
+            self.log.info(">>>>> STARTING QUERY >>>>>")
             return execute(cursor, statement, parameters, context)
         finally:
             total = time.time() - now
             try:
-                log.info(format_sql("Query: %s" % statement % parameters))
+                self.log.info(format_sql("Query: %s" % statement % parameters))
             except TypeError:
-                log.info(format_sql("Query: %s %s" % (statement, parameters)))
-            log.info("<<<<< TOTAL TIME: %f <<<<<" % total)
-
-
-
-
+                self.log.info(format_sql("Query: %s %s" % (statement, parameters)))
+            self.log.info("<<<<< TOTAL TIME: %f <<<<<" % total)
--- a/pylons_app/lib/utils.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/lib/utils.py	Thu Sep 23 01:23:13 2010 +0200
@@ -31,6 +31,7 @@
 from vcs.utils.lazy import LazyProperty
 import logging
 import os
+
 log = logging.getLogger(__name__)
 
 
@@ -218,6 +219,7 @@
     
     revision = -1
     message = ''
+    author = ''
     
     @LazyProperty
     def raw_id(self):
@@ -362,3 +364,75 @@
 
     def __ne__(self, other):
         return not self == other
+
+
+#===============================================================================
+# TEST FUNCTIONS
+#===============================================================================
+def create_test_index(repo_location, full_index):
+    """Makes default test index
+    @param repo_location:
+    @param full_index:
+    """
+    from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon
+    from pylons_app.lib.pidlock import DaemonLock, LockHeld
+    from pylons_app.lib.indexers import IDX_LOCATION
+    import shutil
+    
+    if os.path.exists(IDX_LOCATION):
+        shutil.rmtree(IDX_LOCATION)
+         
+    try:
+        l = DaemonLock()
+        WhooshIndexingDaemon(repo_location=repo_location)\
+            .run(full_index=full_index)
+        l.release()
+    except LockHeld:
+        pass    
+    
+def create_test_env(repos_test_path, config):
+    """Makes a fresh database and 
+    install test repository into tmp dir
+    """
+    from pylons_app.lib.db_manage import DbManage
+    import tarfile
+    import shutil
+    from os.path import dirname as dn, join as jn, abspath
+    
+    log = logging.getLogger('TestEnvCreator')
+    # create logger
+    log.setLevel(logging.DEBUG)
+    log.propagate = True
+    # create console handler and set level to debug
+    ch = logging.StreamHandler()
+    ch.setLevel(logging.DEBUG)
+    
+    # create formatter
+    formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
+    
+    # add formatter to ch
+    ch.setFormatter(formatter)
+    
+    # add ch to logger
+    log.addHandler(ch)
+    
+    #PART ONE create db
+    log.debug('making test db')
+    dbname = config['sqlalchemy.db1.url'].split('/')[-1]
+    dbmanage = DbManage(log_sql=True, dbname=dbname, tests=True)
+    dbmanage.create_tables(override=True)
+    dbmanage.config_prompt(repos_test_path)
+    dbmanage.create_default_user()
+    dbmanage.admin_prompt()
+    dbmanage.create_permissions()
+    dbmanage.populate_default_permissions()
+    
+    #PART TWO make test repo
+    log.debug('making test vcs repo')
+    if os.path.isdir('/tmp/vcs_test'):
+        shutil.rmtree('/tmp/vcs_test')
+        
+    cur_dir = dn(dn(abspath(__file__)))
+    tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test.tar.gz"))
+    tar.extractall('/tmp')
+    tar.close()
--- a/pylons_app/model/__init__.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/model/__init__.py	Thu Sep 23 01:23:13 2010 +0200
@@ -1,15 +1,8 @@
 """The application's model objects"""
 import logging
-import sqlalchemy as sa
-from sqlalchemy import orm
 from pylons_app.model import meta
-from pylons_app.model.meta import Session
 log = logging.getLogger(__name__)
 
-# Add these two imports:
-import datetime
-from sqlalchemy import schema, types
-
 def init_model(engine):
     """Call me before using any of the tables or classes in the model"""
     log.info("INITIALIZING DB MODELS")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylons_app/model/caching_query.py	Thu Sep 23 01:23:13 2010 +0200
@@ -0,0 +1,267 @@
+"""caching_query.py
+
+Represent persistence structures which allow the usage of
+Beaker caching with SQLAlchemy.
+
+The three new concepts introduced here are:
+
+ * CachingQuery - a Query subclass that caches and
+   retrieves results in/from Beaker.
+ * FromCache - a query option that establishes caching
+   parameters on a Query
+ * RelationshipCache - a variant of FromCache which is specific
+   to a query invoked during a lazy load.
+ * _params_from_query - extracts value parameters from 
+   a Query.
+
+The rest of what's here are standard SQLAlchemy and
+Beaker constructs.
+   
+"""
+from sqlalchemy.orm.interfaces import MapperOption
+from sqlalchemy.orm.query import Query
+from sqlalchemy.sql import visitors
+
+class CachingQuery(Query):
+    """A Query subclass which optionally loads full results from a Beaker 
+    cache region.
+    
+    The CachingQuery stores additional state that allows it to consult
+    a Beaker cache before accessing the database:
+    
+    * A "region", which is a cache region argument passed to a 
+      Beaker CacheManager, specifies a particular cache configuration
+      (including backend implementation, expiration times, etc.)
+    * A "namespace", which is a qualifying name that identifies a
+      group of keys within the cache.  A query that filters on a name 
+      might use the name "by_name", a query that filters on a date range 
+      to a joined table might use the name "related_date_range".
+      
+    When the above state is present, a Beaker cache is retrieved.
+    
+    The "namespace" name is first concatenated with 
+    a string composed of the individual entities and columns the Query 
+    requests, i.e. such as ``Query(User.id, User.name)``.
+    
+    The Beaker cache is then loaded from the cache manager based
+    on the region and composed namespace.  The key within the cache
+    itself is then constructed against the bind parameters specified
+    by this query, which are usually literals defined in the 
+    WHERE clause.
+
+    The FromCache and RelationshipCache mapper options below represent
+    the "public" method of configuring this state upon the CachingQuery.
+    
+    """
+    
+    def __init__(self, manager, *args, **kw):
+        self.cache_manager = manager
+        Query.__init__(self, *args, **kw)
+        
+    def __iter__(self):
+        """override __iter__ to pull results from Beaker
+           if particular attributes have been configured.
+           
+           Note that this approach does *not* detach the loaded objects from
+           the current session. If the cache backend is an in-process cache
+           (like "memory") and lives beyond the scope of the current session's
+           transaction, those objects may be expired. The method here can be
+           modified to first expunge() each loaded item from the current
+           session before returning the list of items, so that the items
+           in the cache are not the same ones in the current Session.
+           
+        """
+        if hasattr(self, '_cache_parameters'):
+            return self.get_value(createfunc=lambda: list(Query.__iter__(self)))
+        else:
+            return Query.__iter__(self)
+
+    def invalidate(self):
+        """Invalidate the value represented by this Query."""
+
+        cache, cache_key = _get_cache_parameters(self)
+        cache.remove(cache_key)
+
+    def get_value(self, merge=True, createfunc=None):
+        """Return the value from the cache for this query.
+
+        Raise KeyError if no value present and no
+        createfunc specified.
+
+        """
+        cache, cache_key = _get_cache_parameters(self)
+        ret = cache.get_value(cache_key, createfunc=createfunc)
+        if merge:
+            ret = self.merge_result(ret, load=False)
+        return ret
+
+    def set_value(self, value):
+        """Set the value in the cache for this query."""
+
+        cache, cache_key = _get_cache_parameters(self)
+        cache.put(cache_key, value)        
+
+def query_callable(manager):
+    def query(*arg, **kw):
+        return CachingQuery(manager, *arg, **kw)
+    return query
+    
+def _get_cache_parameters(query):
+    """For a query with cache_region and cache_namespace configured,
+    return the correspoinding Cache instance and cache key, based
+    on this query's current criterion and parameter values.
+
+    """
+    if not hasattr(query, '_cache_parameters'):
+        raise ValueError("This Query does not have caching parameters configured.")
+
+    region, namespace, cache_key = query._cache_parameters
+    
+    namespace = _namespace_from_query(namespace, query)
+
+    if cache_key is None:
+        # cache key - the value arguments from this query's parameters.
+        args = _params_from_query(query)
+        cache_key = " ".join([str(x) for x in args])
+
+    # get cache
+    cache = query.cache_manager.get_cache_region(namespace, region)
+
+    # optional - hash the cache_key too for consistent length
+    # import uuid
+    # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
+
+    return cache, cache_key
+
+def _namespace_from_query(namespace, query):
+    # cache namespace - the token handed in by the 
+    # option + class we're querying against
+    namespace = " ".join([namespace] + [str(x) for x in query._entities])
+
+    # memcached wants this
+    namespace = namespace.replace(' ', '_')
+
+    return namespace
+
+def _set_cache_parameters(query, region, namespace, cache_key):
+    
+    if hasattr(query, '_cache_parameters'):
+        region, namespace, cache_key = query._cache_parameters
+        raise ValueError("This query is already configured "
+                        "for region %r namespace %r" % 
+                        (region, namespace)
+                    )
+    query._cache_parameters = region, namespace, cache_key
+    
+class FromCache(MapperOption):
+    """Specifies that a Query should load results from a cache."""
+
+    propagate_to_loaders = False
+
+    def __init__(self, region, namespace, cache_key=None):
+        """Construct a new FromCache.
+        
+        :param region: the cache region.  Should be a
+        region configured in the Beaker CacheManager.
+        
+        :param namespace: the cache namespace.  Should
+        be a name uniquely describing the target Query's
+        lexical structure.
+        
+        :param cache_key: optional.  A string cache key 
+        that will serve as the key to the query.   Use this
+        if your query has a huge amount of parameters (such
+        as when using in_()) which correspond more simply to 
+        some other identifier.
+
+        """
+        self.region = region
+        self.namespace = namespace
+        self.cache_key = cache_key
+    
+    def process_query(self, query):
+        """Process a Query during normal loading operation."""
+        
+        _set_cache_parameters(query, self.region, self.namespace, self.cache_key)
+
+class RelationshipCache(MapperOption):
+    """Specifies that a Query as called within a "lazy load" 
+       should load results from a cache."""
+
+    propagate_to_loaders = True
+
+    def __init__(self, region, namespace, attribute):
+        """Construct a new RelationshipCache.
+        
+        :param region: the cache region.  Should be a
+        region configured in the Beaker CacheManager.
+        
+        :param namespace: the cache namespace.  Should
+        be a name uniquely describing the target Query's
+        lexical structure.
+        
+        :param attribute: A Class.attribute which
+        indicates a particular class relationship() whose
+        lazy loader should be pulled from the cache.
+        
+        """
+        self.region = region
+        self.namespace = namespace
+        self._relationship_options = {
+            (attribute.property.parent.class_, attribute.property.key) : self
+        }
+
+    def process_query_conditionally(self, query):
+        """Process a Query that is used within a lazy loader.
+
+        (the process_query_conditionally() method is a SQLAlchemy
+        hook invoked only within lazyload.)
+
+        """
+        if query._current_path:
+            mapper, key = query._current_path[-2:]
+
+            for cls in mapper.class_.__mro__:
+                if (cls, key) in self._relationship_options:
+                    relationship_option = self._relationship_options[(cls, key)]
+                    _set_cache_parameters(
+                            query,
+                            relationship_option.region,
+                            relationship_option.namespace,
+                            None)
+
+    def and_(self, option):
+        """Chain another RelationshipCache option to this one.
+        
+        While many RelationshipCache objects can be specified on a single
+        Query separately, chaining them together allows for a more efficient
+        lookup during load.
+        
+        """
+        self._relationship_options.update(option._relationship_options)
+        return self
+
+
+def _params_from_query(query):
+    """Pull the bind parameter values from a query.
+    
+    This takes into account any scalar attribute bindparam set up.
+    
+    E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
+    would return [5, 7].
+    
+    """
+    v = []
+    def visit_bindparam(bind):
+        value = query._params.get(bind.key, bind.value)
+        
+        # lazyloader may dig a callable in here, intended
+        # to late-evaluate params after autoflush is called.
+        # convert to a scalar value.
+        if callable(value):
+            value = value()
+            
+        v.append(value)
+    if query._criterion is not None:
+        visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam})
+    return v
--- a/pylons_app/model/db.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/model/db.py	Thu Sep 23 01:23:13 2010 +0200
@@ -26,7 +26,7 @@
     
 class User(Base): 
     __tablename__ = 'users'
-    __table_args__ = (UniqueConstraint('username'), {'useexisting':True})
+    __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'useexisting':True})
     user_id = Column("user_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
     username = Column("username", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     password = Column("password", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
@@ -56,7 +56,7 @@
             self.last_login = datetime.datetime.now()
             session.add(self)
             session.commit()
-            log.debug('updated user %s lastlogin',self.username)
+            log.debug('updated user %s lastlogin', self.username)
         except Exception:
             session.rollback()        
     
@@ -120,6 +120,15 @@
     user = relation('User')
     permission = relation('Permission')
 
-
+class Statistics(Base):
+    __tablename__ = 'statistics'
+    __table_args__ = (UniqueConstraint('repository_id'), {'useexisting':True})
+    stat_id = Column("stat_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
+    repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=True, default=None)
+    stat_on_revision = Column("stat_on_revision", INTEGER(), nullable=False)
+    commit_activity = Column("commit_activity", BLOB(), nullable=False)#JSON data
+    commit_activity_combined = Column("commit_activity_combined", BLOB(), nullable=False)#JSON data
+    languages = Column("languages", BLOB(), nullable=False)#JSON data
+    
+    repository = relation('Repository')
 
-
--- a/pylons_app/model/forms.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/model/forms.py	Thu Sep 23 01:23:13 2010 +0200
@@ -102,7 +102,7 @@
                                      error_dict=self.e_dict)            
         if user:
             if user.active:
-                if user.username == username and check_password(password, 
+                if user.username == username and check_password(password,
                                                                 user.password):
                     return value
                 else:
@@ -208,7 +208,37 @@
         
         raise formencode.Invalid(msg, value, state,
                                      error_dict={'paths_root_path':msg})            
-                       
+
+def UniqSystemEmail(old_data):
+    class _UniqSystemEmail(formencode.validators.FancyValidator):
+        def to_python(self, value, state):
+            if old_data.get('email') != value:
+                sa = meta.Session
+                try:
+                    user = sa.query(User).filter(User.email == value).scalar()
+                    if user:
+                        raise formencode.Invalid(_("That e-mail address is already taken") ,
+                                                 value, state)
+                finally:
+                    meta.Session.remove()
+                
+            return value
+        
+    return _UniqSystemEmail
+    
+class ValidSystemEmail(formencode.validators.FancyValidator):
+    def to_python(self, value, state):
+        sa = meta.Session
+        try:
+            user = sa.query(User).filter(User.email == value).scalar()
+            if  user is None:
+                raise formencode.Invalid(_("That e-mail address doesn't exist.") ,
+                                         value, state)
+        finally:
+            meta.Session.remove()
+            
+        return value     
+
 #===============================================================================
 # FORMS        
 #===============================================================================
@@ -250,13 +280,19 @@
         active = StringBoolean(if_missing=False)
         name = UnicodeString(strip=True, min=3, not_empty=True)
         lastname = UnicodeString(strip=True, min=3, not_empty=True)
-        email = Email(not_empty=True)
+        email = All(Email(not_empty=True), UniqSystemEmail(old_data))
         
     return _UserForm
 
 RegisterForm = UserForm
-    
-    
+
+def PasswordResetForm():
+    class _PasswordResetForm(formencode.Schema):
+        allow_extra_fields = True
+        filter_extra_fields = True
+        email = All(ValidSystemEmail(), Email(not_empty=True))             
+    return _PasswordResetForm
+
 def RepoForm(edit=False, old_data={}):
     class _RepoForm(formencode.Schema):
         allow_extra_fields = True
--- a/pylons_app/model/hg_model.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/model/hg_model.py	Thu Sep 23 01:23:13 2010 +0200
@@ -43,16 +43,14 @@
     raise Exception('Unable to import vcs')
 
 def _get_repos_cached_initial(app_globals, initial):
-    """
-    return cached dict with repos
+    """return cached dict with repos
     """
     g = app_globals
     return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui, initial)
 
 @cache_region('long_term', 'cached_repo_list')
 def _get_repos_cached():
-    """
-    return cached dict with repos
+    """return cached dict with repos
     """
     log.info('getting all repositories list')
     from pylons import app_globals as g
@@ -61,11 +59,12 @@
 @cache_region('super_short_term', 'cached_repos_switcher_list')
 def _get_repos_switcher_cached(cached_repo_list):
     repos_lst = []
-    for repo in sorted(x.name.lower() for x in cached_repo_list.values()):
-        if HasRepoPermissionAny('repository.write', 'repository.read', 'repository.admin')(repo, 'main page check'):
-            repos_lst.append(repo)
+    for repo in [x for x in cached_repo_list.values()]:
+        if HasRepoPermissionAny('repository.write', 'repository.read',
+                    'repository.admin')(repo.name.lower(), 'main page check'):
+            repos_lst.append((repo.name, repo.dbrepo.private,))
     
-    return repos_lst
+    return sorted(repos_lst, key=lambda k:k[0])
 
 @cache_region('long_term', 'full_changelog')
 def _full_changelog_cached(repo_name):
@@ -73,14 +72,11 @@
     return list(reversed(list(HgModel().get_repo(repo_name))))
 
 class HgModel(object):
-    """
-    Mercurial Model
+    """Mercurial Model
     """
 
     def __init__(self):
-        """
-        Constructor
-        """
+        pass
     
     @staticmethod
     def repo_scan(repos_prefix, repos_path, baseui, initial=False):
@@ -92,8 +88,7 @@
         """
         sa = meta.Session()
         def check_repo_dir(path):
-            """
-            Checks the repository
+            """Checks the repository
             :param path:
             """
             repos_path = path.split('/')
@@ -102,7 +97,7 @@
             if repos_path[0] != '/':
                 repos_path[0] = '/'
             if not os.path.isdir(os.path.join(*repos_path)):
-                raise RepositoryError('Not a valid repository in %s' % path[0][1])        
+                raise RepositoryError('Not a valid repository in %s' % path)        
         if not repos_path.endswith('*'):
             raise VCSError('You need to specify * or ** at the end of path '
                             'for recursive scanning')
--- a/pylons_app/model/meta.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/model/meta.py	Thu Sep 23 01:23:13 2010 +0200
@@ -1,15 +1,58 @@
 """SQLAlchemy Metadata and Session object"""
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import scoped_session, sessionmaker
+from pylons_app.model import caching_query
+from beaker import cache
+import os
+from os.path import join as jn, dirname as dn, abspath
+import time
+
+# Beaker CacheManager.  A home base for cache configurations.
+cache_manager = cache.CacheManager()
 
 __all__ = ['Base', 'Session']
 #
 # SQLAlchemy session manager. Updated by model.init_model()
 #
-Session = scoped_session(sessionmaker())
-#
+Session = scoped_session(
+                sessionmaker(
+                    query_cls=caching_query.query_callable(cache_manager)
+                )
+          )
 
 # The declarative Base
 Base = declarative_base()
 #For another db...
 #Base2 = declarative_base()
+
+#===============================================================================
+# CACHE OPTIONS
+#===============================================================================
+cache_dir = jn(dn(dn(dn(abspath(__file__)))), 'data', 'cache')
+if not os.path.isdir(cache_dir):
+    os.mkdir(cache_dir)
+# set start_time to current time
+# to re-cache everything
+# upon application startup
+start_time = time.time()
+# configure the "sqlalchemy" cache region.
+cache_manager.regions['sql_cache_short'] = {
+        'type':'memory',
+        'data_dir':cache_dir,
+        'expire':10,
+        'start_time':start_time
+    }
+cache_manager.regions['sql_cache_med'] = {
+        'type':'memory',
+        'data_dir':cache_dir,
+        'expire':360,
+        'start_time':start_time
+    }
+cache_manager.regions['sql_cache_long'] = {
+        'type':'file',
+        'data_dir':cache_dir,
+        'expire':3600,
+        'start_time':start_time
+    }
+#to use cache use this in query
+#.options(FromCache("sqlalchemy_cache_type", "cachekey"))
--- a/pylons_app/model/user_model.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/model/user_model.py	Thu Sep 23 01:23:13 2010 +0200
@@ -2,7 +2,7 @@
 # encoding: utf-8
 # Model for users
 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
- 
+# 
 # This program is free software; you can redistribute it and/or
 # modify it under the terms of the GNU General Public License
 # as published by the Free Software Foundation; version 2
@@ -23,10 +23,12 @@
 Model for users
 @author: marcink
 """
-
+from pylons_app.lib import auth
+from pylons.i18n.translation import _
+from pylons_app.lib.celerylib import tasks, run_task
 from pylons_app.model.db import User
 from pylons_app.model.meta import Session
-from pylons.i18n.translation import _
+import traceback
 import logging
 log = logging.getLogger(__name__)
 
@@ -43,7 +45,7 @@
     def get_user(self, id):
         return self.sa.query(User).get(id)
     
-    def get_user_by_name(self,name):
+    def get_user_by_name(self, name):
         return self.sa.query(User).filter(User.username == name).scalar()
     
     def create(self, form_data):
@@ -54,8 +56,8 @@
                 
             self.sa.add(new_user)
             self.sa.commit()
-        except Exception as e:
-            log.error(e)
+        except:
+            log.error(traceback.format_exc())
             self.sa.rollback()
             raise      
     
@@ -68,8 +70,8 @@
                 
             self.sa.add(new_user)
             self.sa.commit()
-        except Exception as e:
-            log.error(e)
+        except:
+            log.error(traceback.format_exc())
             self.sa.rollback()
             raise      
     
@@ -88,8 +90,8 @@
                 
             self.sa.add(new_user)
             self.sa.commit()
-        except Exception as e:
-            log.error(e)
+        except:
+            log.error(traceback.format_exc())
             self.sa.rollback()
             raise      
         
@@ -109,13 +111,12 @@
                 
             self.sa.add(new_user)
             self.sa.commit()
-        except Exception as e:
-            log.error(e)
+        except:
+            log.error(traceback.format_exc())
             self.sa.rollback()
             raise 
                 
     def delete(self, id):
-        
         try:
             
             user = self.sa.query(User).get(id)
@@ -125,7 +126,10 @@
                                   " crucial for entire application"))
             self.sa.delete(user)
             self.sa.commit()            
-        except Exception as e:
-            log.error(e)
+        except:
+            log.error(traceback.format_exc())
             self.sa.rollback()
             raise        
+
+    def reset_password(self, data):
+        run_task(tasks.reset_user_password, data['email'])
--- a/pylons_app/public/css/style.css	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/public/css/style.css	Thu Sep 23 01:23:13 2010 +0200
@@ -505,6 +505,33 @@
 
 
 /*ICONS*/
+#header #header-inner #quick li ul li a.journal,
+#header #header-inner #quick li ul li a.journal:hover
+{
+    background:url("../images/icons/book.png") no-repeat scroll 4px 9px #FFFFFF;
+    margin:0;
+    padding:12px 9px 7px 24px;
+    width:167px;
+
+}
+#header #header-inner #quick li ul li a.private_repo,
+#header #header-inner #quick li ul li a.private_repo:hover
+{
+    background:url("../images/icons/lock.png") no-repeat scroll 4px 9px #FFFFFF;
+    margin:0;
+    padding:12px 9px 7px 24px;
+    width:167px;
+
+}
+#header #header-inner #quick li ul li a.public_repo,
+#header #header-inner #quick li ul li a.public_repo:hover
+{
+    background:url("../images/icons/lock_open.png") no-repeat scroll 4px 9px #FFFFFF;
+    margin:0;
+    padding:12px 9px 7px 24px;
+    width:167px;
+
+}
 
 #header #header-inner #quick li ul li a.repos,
 #header #header-inner #quick li ul li a.repos:hover
@@ -2877,7 +2904,7 @@
 #register div.form div.fields div.buttons
 {
 	margin: 0;
-	padding: 10px 0 0 97px;
+	padding: 10px 0 0 114px;
 	clear: both;
 	overflow: hidden;
 	border-top: 1px solid #DDDDDD;
--- a/pylons_app/templates/admin/admin_log.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/admin/admin_log.html	Thu Sep 23 01:23:13 2010 +0200
@@ -11,8 +11,8 @@
 
 	%for cnt,l in enumerate(c.users_log):
 	<tr class="parity${cnt%2}">
-		<td>${l.user.username}</td>
-		<td>${l.repository}</td>
+		<td>${h.link_to(l.user.username,h.url('edit_user', id=l.user.user_id))}</td>
+		<td>${h.link_to(l.repository,h.url('summary_home',repo_name=l.repository))}</td>
 		<td>${l.action}</td>
 		<td>${l.action_date}</td>
 		<td>${l.user_ip}</td>
--- a/pylons_app/templates/admin/permissions/permissions.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/admin/permissions/permissions.html	Thu Sep 23 01:23:13 2010 +0200
@@ -29,7 +29,7 @@
         
 			<div class="field">
 				<div class="label">
-					<label for="default_perm">${_('Default repository permission')}:</label>
+					<label for="default_perm">${_('Repository permission')}:</label>
 				</div>
 				<div class="select">
 					${h.select('default_perm','',c.perms_choices)}
@@ -51,7 +51,7 @@
 			</div> 		
              <div class="field">
                 <div class="label">
-                    <label for="default_create">${_('Allow repository creation')}:</label>
+                    <label for="default_create">${_('Repository creation')}:</label>
                 </div>
 				<div class="select">
 					${h.select('default_create','',c.create_choices)}
--- a/pylons_app/templates/admin/settings/settings.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/admin/settings/settings.html	Thu Sep 23 01:23:13 2010 +0200
@@ -47,7 +47,32 @@
         </div>
     </div>  
     ${h.end_form()}
-     
+    
+    <h3>${_('Whoosh indexing')}</h3>
+    ${h.form(url('admin_setting', setting_id='whoosh'),method='put')}
+    <div class="form">
+        <!-- fields -->
+        
+        <div class="fields">
+            <div class="field">
+                <div class="label label-checkbox">
+                    <label for="destroy">${_('index build option')}:</label>
+                </div>
+                <div class="checkboxes">
+                    <div class="checkbox">
+                        ${h.checkbox('full_index',True)}
+                        <label for="checkbox-1">${_('build from scratch')}</label>
+                    </div>
+                </div>
+            </div>
+                            
+            <div class="buttons">
+            ${h.submit('reindex','reindex',class_="ui-button ui-widget ui-state-default ui-corner-all")}
+            </div>                                                          
+        </div>
+    </div>  
+    ${h.end_form()}
+         
     <h3>${_('Global application settings')}</h3> 
     ${h.form(url('admin_setting', setting_id='global'),method='put')}
     <div class="form">
--- a/pylons_app/templates/base/base.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/base/base.html	Thu Sep 23 01:23:13 2010 +0200
@@ -97,8 +97,12 @@
                     <span>&darr;</span>					
 					</a>
 					<ul class="repo_switcher">
-                        %for repo in c.repo_switcher_list:
-                            <li>${h.link_to(repo,h.url('summary_home',repo_name=repo))}</li>
+                        %for repo,private in c.repo_switcher_list:
+                          %if private:
+                             <li>${h.link_to(repo,h.url('summary_home',repo_name=repo),class_="private_repo")}</li>
+                          %else:
+                             <li>${h.link_to(repo,h.url('summary_home',repo_name=repo),class_="public_repo")}</li>
+                          %endif  
                         %endfor					
 					</ul>			
 				</li>
@@ -203,6 +207,7 @@
                    <span>${_('Admin')}</span>                 
                    </a>    
 				    <ul>
+				        <li>${h.link_to(_('journal'),h.url('admin_home'),class_='journal')}</li>
 				        <li>${h.link_to(_('repositories'),h.url('repos'),class_='repos')}</li>
 				        <li>${h.link_to(_('users'),h.url('users'),class_='users')}</li>
 				        <li>${h.link_to(_('permissions'),h.url('edit_permission',id='default'),class_='permissions')}</li>
--- a/pylons_app/templates/files/files_annotate.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/files/files_annotate.html	Thu Sep 23 01:23:13 2010 +0200
@@ -23,18 +23,22 @@
     </div>
     <div class="table">
 		<div id="files_data">
-			<h2>${_('Location')}: ${h.files_breadcrumbs(c.repo_name,c.cur_rev,c.file.path)}</h2>
+			<h3 class="files_location">${_('Location')}: ${h.files_breadcrumbs(c.repo_name,c.cur_rev,c.file.path)}</h3>
 			<dl class="overview">
 				<dt>${_('Last revision')}</dt>
 				<dd>${h.link_to("r%s:%s" % (c.file.last_changeset.revision,c.file.last_changeset._short),
 						h.url('files_annotate_home',repo_name=c.repo_name,revision=c.file.last_changeset._short,f_path=c.f_path))} </dd>
 				<dt>${_('Size')}</dt>
 				<dd>${h.format_byte_size(c.file.size,binary=True)}</dd>
+    			<dt>${_('Mimetype')}</dt>
+				<dd>${c.file.mimetype}</dd>				
 				<dt>${_('Options')}</dt>
 				<dd>${h.link_to(_('show source'),
 						h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}  
+					/ ${h.link_to(_('show as raw'),
+						h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}
 					/ ${h.link_to(_('download as raw'),
-						h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}
+						h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}
 				</dd>				
 			</dl>
 			<div id="body" class="codeblock">
@@ -43,7 +47,12 @@
 					<div class="commit">"${c.file_msg}"</div>
 				</div>
 				<div class="code-body">
-					${h.pygmentize_annotation(c.file,linenos=True,anchorlinenos=True,lineanchors='S',cssclass="code-highlight")}
+					% if c.file.size < c.file_size_limit:
+						${h.pygmentize_annotation(c.file,linenos=True,anchorlinenos=True,lineanchors='S',cssclass="code-highlight")}
+					%else:
+						${_('File is to big to display')} ${h.link_to(_('show as raw'),
+						h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}
+					%endif				
 				</div>
 			</div>
 		</div>    
--- a/pylons_app/templates/files/files_browser.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/files/files_browser.html	Thu Sep 23 01:23:13 2010 +0200
@@ -23,31 +23,38 @@
 		             <tr>
 		                 <th>${_('Name')}</th>
 		                 <th>${_('Size')}</th>
+		                 <th>${_('Mimetype')}</th>
 		                 <th>${_('Revision')}</th>
 		                 <th>${_('Last modified')}</th>
 		                 <th>${_('Last commiter')}</th>
 		             </tr>
 		         </thead>
-		         	<tr class="parity0">
-		          		<td>
-		          		% if c.files_list.parent:
-		          			${h.link_to('..',h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.files_list.parent.path),class_="browser-dir")}
-		          		%endif
-		          		</td>
-		          		<td></td>
-		          		<td></td>
-		          		<td></td>
-		          		<td></td>
-		         	</tr>
+
+          		% if c.files_list.parent:
+         		<tr class="parity0">
+	          		<td>		          		
+	          			${h.link_to('..',h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.files_list.parent.path),class_="browser-dir")}
+	          		</td>
+	          		<td></td>
+	          		<td></td>
+	          		<td></td>
+	          		<td></td>
+	          		<td></td>
+				</tr>	          		
+          		%endif
+		         	
 		    %for cnt,node in enumerate(c.files_list,1):
 				<tr class="parity${cnt%2}">
 		             <td>
 						${h.link_to(node.name,h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=node.path),class_=file_class(node))}
 		             </td>
 		             <td>
-		                %if node.is_file():
-		             		${h.format_byte_size(node.size,binary=True)}
-		             	%endif
+		             	${h.format_byte_size(node.size,binary=True)}
+		             </td>
+		             <td>
+		              %if node.is_file():
+		                  ${node.mimetype}
+		              %endif
 		             </td>
 		             <td>
 		             	%if node.is_file():
--- a/pylons_app/templates/files/files_source.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/files/files_source.html	Thu Sep 23 01:23:13 2010 +0200
@@ -6,11 +6,15 @@
 	</dd>
 	<dt>${_('Size')}</dt>
 	<dd>${h.format_byte_size(c.files_list.size,binary=True)}</dd>
+	<dt>${_('Mimetype')}</dt>
+	<dd>${c.files_list.mimetype}</dd>
 	<dt>${_('Options')}</dt>
 	<dd>${h.link_to(_('show annotation'),
-			h.url('files_annotate_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}  
+			h.url('files_annotate_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}
+		 / ${h.link_to(_('show as raw'),
+			h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}			
 		 / ${h.link_to(_('download as raw'),
-			h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}
+			h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}
 	</dd>
 	<dt>${_('History')}</dt>
 	<dd>
@@ -32,7 +36,12 @@
 		<div class="commit">"${c.files_list.last_changeset.message}"</div>
 	</div>
 	<div class="code-body">
-		${h.pygmentize(c.files_list,linenos=True,anchorlinenos=True,lineanchors='S',cssclass="code-highlight")}
+		% if c.files_list.size < c.file_size_limit:
+			${h.pygmentize(c.files_list,linenos=True,anchorlinenos=True,lineanchors='S',cssclass="code-highlight")}
+		%else:
+			${_('File is to big to display')} ${h.link_to(_('show as raw'),
+			h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))}
+		%endif
 	</div>
 </div>
 
--- a/pylons_app/templates/login.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/login.html	Thu Sep 23 01:23:13 2010 +0200
@@ -60,7 +60,7 @@
                     <!-- end fields -->
                     <!-- links -->
                     <div class="links">
-                        ${h.link_to(_('Forgot your password ?'),h.url('#'))}
+                        ${h.link_to(_('Forgot your password ?'),h.url('reset_password'))}
                         %if h.HasPermissionAny('hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')():
 	                         / 
 	                        ${h.link_to(_("Don't have an account ?"),h.url('register'))}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pylons_app/templates/password_reset.html	Thu Sep 23 01:23:13 2010 +0200
@@ -0,0 +1,54 @@
+## -*- coding: utf-8 -*-
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" id="mainhtml">
+    <head>
+        <title>${_('Reset You password to hg-app')}</title>
+        <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
+        <link rel="icon" href="/images/hgicon.png" type="image/png" />
+        <meta name="robots" content="index, nofollow"/>
+            
+        <!-- stylesheets -->
+        <link rel="stylesheet" type="text/css" href="/css/reset.css" />
+        <link rel="stylesheet" type="text/css" href="/css/style.css" media="screen" />
+        <link id="color" rel="stylesheet" type="text/css" href="/css/colors/blue.css" />
+
+        <!-- scripts -->
+
+    </head>
+    <body>
+		<div id="register">
+			
+			<div class="title">
+				<h5>${_('Reset You password to hg-app')}</h5>
+                <div class="corner tl"></div>
+                <div class="corner tr"></div>				
+			</div>
+			<div class="inner">
+			    ${h.form(url('password_reset'))}
+			    <div class="form">
+			        <!-- fields -->
+			        <div class="fields">
+			            
+			             <div class="field">
+			                <div class="label">
+			                    <label for="email">${_('Email address')}:</label>
+			                </div>
+			                <div class="input">
+			                    ${h.text('email')}
+			                </div>
+			             </div>
+			                        
+			            <div class="buttons">
+				            <div class="nohighlight">
+				              ${h.submit('send','Reset my password',class_="ui-button ui-widget ui-state-default ui-corner-all")}
+							  	<div class="activation_msg">${_('Your new password will be send to matching email address')}</div>
+				            </div>
+			            </div>             
+			    	</div>
+			    </div>
+			    ${h.end_form()}
+			</div>    
+	    </div>
+    </body>
+</html>
+
--- a/pylons_app/templates/search/search.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/search/search.html	Thu Sep 23 01:23:13 2010 +0200
@@ -46,7 +46,7 @@
 					h.url('files_home',repo_name=sr['repository'],revision='tip',f_path=sr['f_path']))}</div>
 				</div>
 				<div class="code-body">
-					<pre>${h.literal(sr['content_short'])}</pre>
+					<pre>${h.literal(sr['content_short_hl'])}</pre>
 				</div>
 			</div>
 		</div>
@@ -59,11 +59,13 @@
 			</div>		
 			%endif
 			
-		%endif
+		%endif		
 	%endfor
-
-	
-	
+	%if c.cur_query:
+	<div class="pagination-wh pagination-left">
+		${c.formated_results.pager('$link_previous ~2~ $link_next')}
+	</div>	
+	%endif
 </div>
 
 </%def>    
--- a/pylons_app/templates/shortlog/shortlog_data.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/shortlog/shortlog_data.html	Thu Sep 23 01:23:13 2010 +0200
@@ -13,7 +13,7 @@
 	</tr>
 %for cnt,cs in enumerate(c.repo_changesets):
 	<tr class="parity${cnt%2}">
-		<td>${h.age(cs._ctx.date())}</td>
+		<td>${h.age(cs._ctx.date())} - ${h.rfc822date_notz(cs._ctx.date())} </td>
 		<td title="${cs.author}">${h.person(cs.author)}</td>
 		<td>r${cs.revision}:${cs.raw_id}</td>
 		<td>
--- a/pylons_app/templates/summary/summary.html	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/templates/summary/summary.html	Thu Sep 23 01:23:13 2010 +0200
@@ -76,7 +76,9 @@
 			      <label>${_('Last change')}:</label>
 			  </div>
 			  <div class="input-short">
-			      ${h.age(c.repo_info.last_change)} - ${h.rfc822date(c.repo_info.last_change)}
+			      ${h.age(c.repo_info.last_change)} - ${h.rfc822date(c.repo_info.last_change)} 
+			      ${_('by')} ${h.get_changeset_safe(c.repo_info,'tip').author} 
+			      
 			  </div>
 			 </div>
 			
@@ -121,151 +123,356 @@
 <div class="box box-right"  style="min-height:455px">
     <!-- box / title -->
     <div class="title">
-        <h5>${_('Last month commit activity')}</h5>
+        <h5>${_('Commit activity by day / author')}</h5>
     </div>
     
     <div class="table">
         <div id="commit_history" style="width:560px;height:300px;float:left"></div>
-    	<div id="legend_data">
+        <div style="clear: both;height: 10px"></div>
+        <div id="overview" style="width:560px;height:100px;float:left"></div>
+        
+    	<div id="legend_data" style="clear:both;margin-top:10px;">
 	    	<div id="legend_container"></div>
 	    	<div id="legend_choices">
 				<table id="legend_choices_tables" style="font-size:smaller;color:#545454"></table>
 	    	</div>
     	</div>
 		<script type="text/javascript">
-		
-		(function () {
-			var datasets = {${c.commit_data|n}};
-			var i = 0;
+		/**
+		 * Plots summary graph
+		 *
+		 * @class SummaryPlot
+		 * @param {from} initial from for detailed graph
+		 * @param {to} initial to for detailed graph
+		 * @param {dataset}
+		 * @param {overview_dataset}
+		 */
+		function SummaryPlot(from,to,dataset,overview_dataset) {
+			var initial_ranges = {
+			    "xaxis":{
+				    "from":from,
+				   	"to":to,
+				},
+			};
+		    var dataset = dataset;
+		    var overview_dataset = [overview_dataset];
 		    var choiceContainer = YAHOO.util.Dom.get("legend_choices");
 		    var choiceContainerTable = YAHOO.util.Dom.get("legend_choices_tables");
-		    for(var key in datasets) {
-		        datasets[key].color = i;
-		        i++;
-		        choiceContainerTable.innerHTML += '<tr><td>'+
-		        '<input type="checkbox" name="' + key +'" checked="checked" />'
-		        +datasets[key].label+
-		        '</td></tr>';
-		    };
+		    var plotContainer = YAHOO.util.Dom.get('commit_history');
+		    var overviewContainer = YAHOO.util.Dom.get('overview');
 		    
-
-		    function plotAccordingToChoices() {
-		        var data = [];
-
-		        var inputs = choiceContainer.getElementsByTagName("input");
-		        for(var i=0; i<inputs.length; i++) {
-		            var key = inputs[i].name;
-		            if (key && datasets[key]){
-			            if(!inputs[i].checked){
-				            data.push({label:key,data:[[0,1],]});	
-				        }
-			            else{
-			            	data.push(datasets[key]);
-			            }
-		                
-		            }
-		            
-		        };
-
-		        if (data.length > 0){
+		    var plot_options = {
+				bars: {show:true,align:'center',lineWidth:4},
+				legend: {show:true, container:"legend_container"},
+				points: {show:true,radius:0,fill:false},
+				yaxis: {tickDecimals:0,},
+				xaxis: {
+					mode: "time", 
+					timeformat: "%d/%m",
+				    min:from,
+				    max:to,	
+				}, 
+				grid: {
+					hoverable: true, 
+				    clickable: true,
+				    autoHighlight:true,
+				    color: "#999"
+				},
+				//selection: {mode: "x"}
+		    };
+		    var overview_options = {
+				legend:{show:false},
+			    bars: {show:true,barWidth: 2,},
+			    shadowSize: 0,
+			    xaxis: {mode: "time", timeformat: "%d/%m/%y",},
+			    yaxis: {ticks: 3, min: 0,},
+			    grid: {color: "#999",},
+			    selection: {mode: "x"}
+			};
 
-				    var plot = YAHOO.widget.Flot("commit_history", data,
-					        { bars: { show: true, align:'center',lineWidth:4 },
-			    			  points: { show: true, radius:0,fill:true },
-			    			  legend:{show:true, container:"legend_container"},
-			    	          selection: { mode: "xy" },
-			    	          yaxis: {tickDecimals:0},
-				              xaxis: { mode: "time", timeformat: "%d",tickSize:[1, "day"],min:${c.ts_min},max:${c.ts_max} }, 
-				              grid: { hoverable: true, clickable: true,autoHighlight:true },
-					        });
-			        
-				    function showTooltip(x, y, contents) {
-				        var div=document.getElementById('tooltip');
-				        if(!div) {
-				            div = document.createElement('div');
-				            div.id="tooltip";
-				            div.style.position="absolute";
-				            div.style.border='1px solid #fdd';
-				            div.style.padding='2px';
-				            div.style.backgroundColor='#fee';
-				            document.body.appendChild(div);
-				        }
-				        YAHOO.util.Dom.setStyle(div, 'opacity', 0);
-				        div.innerHTML = contents;
-				        div.style.top=(y + 5) + "px";
-				        div.style.left=(x + 5) + "px";
-		
-				        var anim = new YAHOO.util.Anim(div, {opacity: {to: 0.8}}, 0.2);
-				        anim.animate();
+			/**
+			*get dummy data needed in few places
+			*/
+		    function getDummyData(label){
+		    	return {"label":label,
+               	 "data":[{"time":0,
+               		 "commits":0,
+	                     "added":0,
+	                     "changed":0,
+	                     "removed":0,
+                    }],
+                    "schema":["commits"],
+                    "color":'#ffffff',
+           		}
+			}
+			
+		    /**
+		     * generate checkboxes accordindly to data
+		     * @param keys
+		     * @returns
+		     */
+		    function generateCheckboxes(data) {
+			    //append checkboxes
+			    var i = 0;
+			    choiceContainerTable.innerHTML = '';
+			    for(var pos in data) {
+			    	
+			    	data[pos].color = i;
+			        i++;
+			        if(data[pos].label != ''){
+				        choiceContainerTable.innerHTML += '<tr><td>'+
+				        '<input type="checkbox" name="' + data[pos].label +'" checked="checked" />'
+				        +data[pos].label+
+				        '</td></tr>';
+			        }
+			    }	
+		    }
+		    
+		    /**
+		     * ToolTip show
+		     */
+		    function showTooltip(x, y, contents) {
+		        var div=document.getElementById('tooltip');
+		        if(!div) {
+		            div = document.createElement('div');
+		            div.id="tooltip";
+		            div.style.position="absolute";
+		            div.style.border='1px solid #fdd';
+		            div.style.padding='2px';
+		            div.style.backgroundColor='#fee';
+		            document.body.appendChild(div);
+		        }
+		        YAHOO.util.Dom.setStyle(div, 'opacity', 0);
+		        div.innerHTML = contents;
+		        div.style.top=(y + 5) + "px";
+		        div.style.left=(x + 5) + "px";
+
+		        var anim = new YAHOO.util.Anim(div, {opacity: {to: 0.8}}, 0.2);
+		        anim.animate();
+		    }
+		    
+			/**
+			 * This function will detect if selected period has some changesets for this user
+			if it does this data is then pushed for displaying
+			Additionally it will only display users that are selected by the checkbox
+			*/
+		    function getDataAccordingToRanges(ranges) {
+		    	
+		        var data = [];
+		        var keys = [];
+				for(var key in dataset){
+					var push = false;
+					//method1 slow !!
+		            ///*
+		            for(var ds in dataset[key].data){
+			            commit_data = dataset[key].data[ds];
+			            //console.log(key);
+			            //console.log(new Date(commit_data.time*1000));
+			            //console.log(new Date(ranges.xaxis.from*1000));
+			            //console.log(new Date(ranges.xaxis.to*1000));
+			            if (commit_data.time >= ranges.xaxis.from && commit_data.time <= ranges.xaxis.to){
+			            	push = true;
+			            	break;
+					    }
 				    }
+				    //*/
+				    /*//method2 sorted commit data !!!
+				    var first_commit = dataset[key].data[0].time;
+				    var last_commit = dataset[key].data[dataset[key].data.length-1].time;
+				    
+				    console.log(first_commit);
+				    console.log(last_commit);
+				    
+				    if (first_commit >= ranges.xaxis.from && last_commit <= ranges.xaxis.to){
+						push = true;
+					}
+				    */
+				    if(push){			
+				    	data.push(dataset[key]);
+				    }
+				}
+				if(data.length >= 1){
+					return data;
+				} 
+				else{
+					//just return dummy data for graph to plot itself
+					return [getDummyData('')];	
+				}
+				
+		    }
+		    
+			/**
+			* redraw using new checkbox data
+			*/
+		    function plotchoiced(e,args){
+			    var cur_data = args[0];
+			    var cur_ranges = args[1];
+		    	
+				var new_data = [];
+		    	var inputs = choiceContainer.getElementsByTagName("input");
 
-			        var previousPoint = null;
-			        plot.subscribe("plothover", function (o) {
-				        var pos = o.pos;
-				        var item = o.item;
-				        
-				        //YAHOO.util.Dom.get("x").innerHTML = pos.x.toFixed(2);
-				        //YAHOO.util.Dom.get("y").innerHTML = pos.y.toFixed(2);
-		                if (item) {
-		                    if (previousPoint != item.datapoint) {
-		                        previousPoint = item.datapoint;
-		                        
-		                        var tooltip = YAHOO.util.Dom.get("tooltip");
-		                        if(tooltip) {
-		                        	  tooltip.parentNode.removeChild(tooltip);
-		                        }
-		                        var x = item.datapoint.x.toFixed(2);
-		                        var y = item.datapoint.y.toFixed(2);
-								
-		                        if (!item.series.label){
-		                            item.series.label = 'commits';
-			                    }
-		                        var d = new Date(x*1000);
-		                        var fd = d.getFullYear()+'-'+(d.getMonth()+1)+'-'+d.getDate();
-		                        var nr_commits = parseInt(y);
-		                        
-		                        var cur_data = datasets[item.series.label].data[item.dataIndex];
-				                var added = cur_data.added;
-				                var changed = cur_data.changed;
-				                var removed = cur_data.removed;
-				                
-		                        var nr_commits_suffix = " ${_('commits')} ";
-		                        var added_suffix = " ${_('files added')} ";
-			                    var changed_suffix = " ${_('files changed')} ";
-				                var removed_suffix = " ${_('files removed')} ";
+		    	//show only checked labels
+		        for(var i=0; i<inputs.length; i++) {
+		            var checkbox_key = inputs[i].name;
+		            
+	                if(inputs[i].checked){
+						for(var d in cur_data){
+							if(cur_data[d].label == checkbox_key){
+								new_data.push(cur_data[d]);
+							}
+						}			                
+	    	        }
+	                else{
+		                //push dummy data to not hide the label
+						new_data.push(getDummyData(checkbox_key));
+			        }
+		        }
+						        
+		    	var new_options = YAHOO.lang.merge(plot_options, {
+		            xaxis: { 
+		  	      		min: cur_ranges.xaxis.from, 
+		  	      		max: cur_ranges.xaxis.to,
+		  	      		mode:"time",
+		  	      		timeformat: "%d/%m",
+		        	}
+		    	});
+		    	if (!new_data){
+					new_data = [[0,1]];
+				}
+		    	// do the zooming
+		       plot = YAHOO.widget.Flot(plotContainer, new_data, new_options);
+		       
+		       plot.subscribe("plotselected", plotselected);
+	
+		       //resubscribe plothover
+		       plot.subscribe("plothover", plothover);
+		        
+		       // don't fire event on the overview to prevent eternal loop
+		       overview.setSelection(cur_ranges, true);
+	
+		    }
+		    
+			/**
+		     * plot only selected items from overview
+		     * @param ranges
+		     * @returns
+		     */
+		    function plotselected(ranges,cur_data) {
+			    //updates the data for new plot
+	    		data = getDataAccordingToRanges(ranges);
+	    		generateCheckboxes(data);
+	    		
+		    	var new_options = YAHOO.lang.merge(plot_options, {
+		            xaxis: { 
+		  	      		min: ranges.xaxis.from, 
+		  	      		max: ranges.xaxis.to,
+		  	      		mode:"time",
+		  	      		timeformat: "%d/%m",
+		        	}
+		    	});
+		    	// do the zooming
+		        plot = YAHOO.widget.Flot(plotContainer, data, new_options);
+
+		        plot.subscribe("plotselected", plotselected);
+
+		        //resubscribe plothover
+		        plot.subscribe("plothover", plothover);
+		        
+		        // don't fire event on the overview to prevent eternal loop
+		        overview.setSelection(ranges, true);
+
+		        //resubscribe choiced
+		        YAHOO.util.Event.on(choiceContainer.getElementsByTagName("input"), "click", plotchoiced, [data, ranges]);
+		    }
+		    
+		    var previousPoint = null;
 
-				                
-		                        if(nr_commits == 1){nr_commits_suffix = " ${_('commit')} ";}
-								if(added==1){added_suffix=" ${_('file added')} ";}
-								if(changed==1){changed_suffix=" ${_('file changed')} ";}
-								if(removed==1){removed_suffix=" ${_('file removed')} ";}
-												                
-		                        showTooltip(item.pageX, item.pageY, item.series.label + " on " + fd
-										 +'<br/>'+
-				                         nr_commits + nr_commits_suffix+'<br/>'+
-				                         added + added_suffix +'<br/>'+
-				                         changed + changed_suffix + '<br/>'+
-				                         removed + removed_suffix + '<br/>');
-		                    }
+			function plothover(o) {
+		        var pos = o.pos;
+		        var item = o.item;
+		        
+		        //YAHOO.util.Dom.get("x").innerHTML = pos.x.toFixed(2);
+		        //YAHOO.util.Dom.get("y").innerHTML = pos.y.toFixed(2);
+		        if (item) {
+		            if (previousPoint != item.datapoint) {
+		                previousPoint = item.datapoint;
+		                
+		                var tooltip = YAHOO.util.Dom.get("tooltip");
+		                if(tooltip) {
+		                	  tooltip.parentNode.removeChild(tooltip);
+		                }
+		                var x = item.datapoint.x.toFixed(2);
+		                var y = item.datapoint.y.toFixed(2);
+						
+		                if (!item.series.label){
+		                    item.series.label = 'commits';
 		                }
-		                else {
-		                	  var tooltip = YAHOO.util.Dom.get("tooltip");
-		                	  
-					          if(tooltip) {
-					                tooltip.parentNode.removeChild(tooltip);
-					          }
-		                    previousPoint = null;
-		                }
-			        });
+		                var d = new Date(x*1000);
+		                var fd = d.toDateString()
+		                var nr_commits = parseInt(y);
+		                
+		                var cur_data = dataset[item.series.label].data[item.dataIndex];
+		                var added = cur_data.added;
+		                var changed = cur_data.changed;
+		                var removed = cur_data.removed;
+		                
+		                var nr_commits_suffix = " ${_('commits')} ";
+		                var added_suffix = " ${_('files added')} ";
+		                var changed_suffix = " ${_('files changed')} ";
+		                var removed_suffix = " ${_('files removed')} ";
 
-			    }
+		                
+		                if(nr_commits == 1){nr_commits_suffix = " ${_('commit')} ";}
+						if(added==1){added_suffix=" ${_('file added')} ";}
+						if(changed==1){changed_suffix=" ${_('file changed')} ";}
+						if(removed==1){removed_suffix=" ${_('file removed')} ";}
+										                
+		                showTooltip(item.pageX, item.pageY, item.series.label + " on " + fd
+								 +'<br/>'+
+		                         nr_commits + nr_commits_suffix+'<br/>'+
+		                         added + added_suffix +'<br/>'+
+		                         changed + changed_suffix + '<br/>'+
+		                         removed + removed_suffix + '<br/>');
+		            }
+		        }
+		        else {
+		        	  var tooltip = YAHOO.util.Dom.get("tooltip");
+		        	  
+			          if(tooltip) {
+			                tooltip.parentNode.removeChild(tooltip);
+			          }
+		            previousPoint = null;
+		        }
 		    }
+			
+		    /**
+		     * MAIN EXECUTION
+		     */
+			
+			var data = getDataAccordingToRanges(initial_ranges);
+			generateCheckboxes(data);
+			
+		    //main plot
+		    var plot = YAHOO.widget.Flot(plotContainer,data,plot_options);
+		    
+			//overview
+			var overview = YAHOO.widget.Flot(overviewContainer, overview_dataset, overview_options);
+			
+			//show initial selection on overview
+			overview.setSelection(initial_ranges);    
+			
+		    plot.subscribe("plotselected", plotselected);
+		    
+		    overview.subscribe("plotselected", function (ranges) {
+		        plot.setSelection(ranges);
+		    });		
+				
+		    plot.subscribe("plothover", plothover);
 
-		    YAHOO.util.Event.on(choiceContainer.getElementsByTagName("input"), "click", plotAccordingToChoices);
-
-		    plotAccordingToChoices();
-		    })();
-         </script>
+		    YAHOO.util.Event.on(choiceContainer.getElementsByTagName("input"), "click", plotchoiced, [data, initial_ranges]);
+		}
+			SummaryPlot(${c.ts_min},${c.ts_max},${c.commit_data|n},${c.overview_data|n});		
+		</script>
 
     </div>
 </div>    
--- a/pylons_app/tests/__init__.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/__init__.py	Thu Sep 23 01:23:13 2010 +0200
@@ -16,12 +16,18 @@
 from webtest import TestApp
 import os
 from pylons_app.model import meta
+import logging
+
+
+log = logging.getLogger(__name__) 
+
 import pylons.test
 
 __all__ = ['environ', 'url', 'TestController']
 
 # Invoke websetup with the current config file
-SetupCommand('setup-app').run([pylons.test.pylonsapp.config['__file__']])
+#SetupCommand('setup-app').run([config_file])
+
 
 environ = {}
 
@@ -33,13 +39,13 @@
         self.app = TestApp(wsgiapp)
         url._push_object(URLGenerator(config['routes.map'], environ))
         self.sa = meta.Session
+
         TestCase.__init__(self, *args, **kwargs)
-
     
-    def log_user(self):
+    def log_user(self, username='test_admin', password='test'):
         response = self.app.post(url(controller='login', action='index'),
-                                 {'username':'test_admin',
-                                  'password':'test'})
+                                 {'username':username,
+                                  'password':password})
         assert response.status == '302 Found', 'Wrong response code from login got %s' % response.status
         assert response.session['hg_app_user'].username == 'test_admin', 'wrong logged in user'
-        return response.follow()        
\ No newline at end of file
+        return response.follow()
--- a/pylons_app/tests/functional/test_admin.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_admin.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,5 +3,7 @@
 class TestAdminController(TestController):
 
     def test_index(self):
+        self.log_user()
         response = self.app.get(url(controller='admin/admin', action='index'))
+        assert 'Admin dashboard - journal' in response.body,'No proper title in dashboard'
         # Test response...
--- a/pylons_app/tests/functional/test_admin_settings.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_admin_settings.py	Thu Sep 23 01:23:13 2010 +0200
@@ -1,4 +1,5 @@
 from pylons_app.tests import *
+from pylons_app.model.db import User
 
 class TestSettingsController(TestController):
 
@@ -41,3 +42,75 @@
 
     def test_edit_as_xml(self):
         response = self.app.get(url('formatted_admin_edit_setting', setting_id=1, format='xml'))
+
+    def test_my_account(self):
+        self.log_user()
+        response = self.app.get(url('admin_settings_my_account'))
+        print response
+        assert 'value="test_admin' in response.body
+        
+        
+            
+    def test_my_account_update(self):
+        self.log_user()
+        new_email = 'new@mail.pl'
+        response = self.app.post(url('admin_settings_my_account_update'), params=dict(
+                                                            _method='put',
+                                                            username='test_admin',
+                                                            new_password='test',
+                                                            password='',
+                                                            name='NewName',
+                                                            lastname='NewLastname',
+                                                            email=new_email,))
+        response.follow()
+        print response
+    
+        print 'x' * 100
+        print response.session
+        assert 'Your account was updated succesfully' in response.session['flash'][0][1], 'no flash message about success of change'
+        user = self.sa.query(User).filter(User.username == 'test_admin').one()
+        assert user.email == new_email , 'incorrect user email after update got %s vs %s' % (user.email, new_email)
+    
+    def test_my_account_update_own_email_ok(self):
+        self.log_user()
+                
+        new_email = 'new@mail.pl'
+        response = self.app.post(url('admin_settings_my_account_update'), params=dict(
+                                                            _method='put',
+                                                            username='test_admin',
+                                                            new_password='test',
+                                                            name='NewName',
+                                                            lastname='NewLastname',
+                                                            email=new_email,))
+        print response
+                
+    def test_my_account_update_err_email_exists(self):
+        self.log_user()
+                
+        new_email = 'test_regular@mail.com'#already exisitn email
+        response = self.app.post(url('admin_settings_my_account_update'), params=dict(
+                                                            _method='put',
+                                                            username='test_admin',
+                                                            new_password='test',
+                                                            name='NewName',
+                                                            lastname='NewLastname',
+                                                            email=new_email,))
+        print response
+        
+        assert 'That e-mail address is already taken' in response.body, 'Missing error message about existing email'
+        
+        
+    def test_my_account_update_err(self):
+        self.log_user()
+                
+        new_email = 'newmail.pl'
+        response = self.app.post(url('admin_settings_my_account_update'), params=dict(
+                                                            _method='put',
+                                                            username='test_regular2',
+                                                            new_password='test',
+                                                            name='NewName',
+                                                            lastname='NewLastname',
+                                                            email=new_email,))
+        print response
+        assert 'An email address must contain a single @' in response.body, 'Missing error message about wrong email'
+        assert 'This username already exists' in response.body, 'Missing error message about existing user'
--- a/pylons_app/tests/functional/test_branches.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_branches.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,5 +3,6 @@
 class TestBranchesController(TestController):
 
     def test_index(self):
+        self.log_user()
         response = self.app.get(url(controller='branches', action='index',repo_name='vcs_test'))
         # Test response...
--- a/pylons_app/tests/functional/test_changelog.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_changelog.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,5 +3,6 @@
 class TestChangelogController(TestController):
 
     def test_index(self):
+        self.log_user()
         response = self.app.get(url(controller='changelog', action='index',repo_name='vcs_test'))
         # Test response...
--- a/pylons_app/tests/functional/test_feed.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_feed.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,11 +3,13 @@
 class TestFeedController(TestController):
 
     def test_rss(self):
+        self.log_user()
         response = self.app.get(url(controller='feed', action='rss',
                                     repo_name='vcs_test'))
         # Test response...
 
     def test_atom(self):
+        self.log_user()
         response = self.app.get(url(controller='feed', action='atom',
                                     repo_name='vcs_test'))
         # Test response...
\ No newline at end of file
--- a/pylons_app/tests/functional/test_files.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_files.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,6 +3,7 @@
 class TestFilesController(TestController):
 
     def test_index(self):
+        self.log_user()
         response = self.app.get(url(controller='files', action='index',
                                     repo_name='vcs_test',
                                     revision='tip',
--- a/pylons_app/tests/functional/test_login.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_login.py	Thu Sep 23 01:23:13 2010 +0200
@@ -82,9 +82,9 @@
         
         
     def test_register_ok(self):
-        username = 'test_regular2'
+        username = 'test_regular4'
         password = 'qweqwe'
-        email = 'goodmail@mail.com'
+        email = 'marcin@test.com'
         name = 'testname'
         lastname = 'testlastname'
         
@@ -94,18 +94,46 @@
                                              'email':email,
                                              'name':name,
                                              'lastname':lastname})
-        
+        print response.body
         assert response.status == '302 Found', 'Wrong response from register page got %s' % response.status        
+        assert 'You have successfully registered into hg-app' in response.session['flash'][0], 'No flash message about user registration'
         
-        ret = self.sa.query(User).filter(User.username == 'test_regular2').one()
+        ret = self.sa.query(User).filter(User.username == 'test_regular4').one()
         assert ret.username == username , 'field mismatch %s %s' % (ret.username, username)
-        assert check_password(password,ret.password) == True , 'password mismatch'
+        assert check_password(password, ret.password) == True , 'password mismatch'
         assert ret.email == email , 'field mismatch %s %s' % (ret.email, email)
         assert ret.name == name , 'field mismatch %s %s' % (ret.name, name)
         assert ret.lastname == lastname , 'field mismatch %s %s' % (ret.lastname, lastname)
     
         
+    def test_forgot_password_wrong_mail(self):    
+        response = self.app.post(url(controller='login', action='password_reset'),
+                                            {'email':'marcin@wrongmail.org', })
+        
+        assert "That e-mail address doesn't exist" in response.body, 'Missing error message about wrong email'
+                
+    def test_forgot_password(self):
+        response = self.app.get(url(controller='login', action='password_reset'))
+        assert response.status == '200 OK', 'Wrong response from login page got %s' % response.status
+
+        username = 'test_password_reset_1'
+        password = 'qweqwe'
+        email = 'marcin@python-works.com'
+        name = 'passwd'
+        lastname = 'reset'
+                
+        response = self.app.post(url(controller='login', action='register'),
+                                            {'username':username,
+                                             'password':password,
+                                             'email':email,
+                                             'name':name,
+                                             'lastname':lastname})        
+        #register new user for email test
+        response = self.app.post(url(controller='login', action='password_reset'),
+                                            {'email':email, })
+        print response.session['flash']
+        assert 'You have successfully registered into hg-app' in response.session['flash'][0], 'No flash message about user registration'
+        assert 'Your new password was sent' in response.session['flash'][1], 'No flash message about password reset'
         
         
         
-        
--- a/pylons_app/tests/functional/test_search.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_search.py	Thu Sep 23 01:23:13 2010 +0200
@@ -9,7 +9,7 @@
         self.log_user()
         response = self.app.get(url(controller='search', action='index'))
         print response.body
-        assert 'class="small" id="q" name="q" type="text"' in response.body,'Search box content error'
+        assert 'class="small" id="q" name="q" type="text"' in response.body, 'Search box content error'
         # Test response...
 
     def test_empty_search(self):
@@ -18,12 +18,21 @@
             raise SkipTest('skipped due to existing index')
         else:
             self.log_user()
-            response = self.app.get(url(controller='search', action='index'),{'q':'vcs_test'})
-            assert 'There is no index to search in. Please run whoosh indexer' in response.body,'No error message about empty index'
+            response = self.app.get(url(controller='search', action='index'), {'q':'vcs_test'})
+            assert 'There is no index to search in. Please run whoosh indexer' in response.body, 'No error message about empty index'
         
     def test_normal_search(self):
         self.log_user()
-        response = self.app.get(url(controller='search', action='index'),{'q':'def+repo'})
+        response = self.app.get(url(controller='search', action='index'), {'q':'def repo'})
         print response.body
-        assert '9 results' in response.body,'no message about proper search results'
+        assert '10 results' in response.body, 'no message about proper search results'
+        assert 'Permission denied' not in response.body, 'Wrong permissions settings for that repo and user'
         
+    
+    def test_repo_search(self):
+        self.log_user()
+        response = self.app.get(url(controller='search', action='index'), {'q':'repository:vcs_test def test'})
+        print response.body
+        assert '4 results' in response.body, 'no message about proper search results'
+        assert 'Permission denied' not in response.body, 'Wrong permissions settings for that repo and user'
+        
--- a/pylons_app/tests/functional/test_settings.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_settings.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,6 +3,7 @@
 class TestSettingsController(TestController):
 
     def test_index(self):
+        self.log_user()
         response = self.app.get(url(controller='settings', action='index',
                                     repo_name='vcs_test'))
         # Test response...
--- a/pylons_app/tests/functional/test_shortlog.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_shortlog.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,5 +3,6 @@
 class TestShortlogController(TestController):
 
     def test_index(self):
+        self.log_user()
         response = self.app.get(url(controller='shortlog', action='index',repo_name='vcs_test'))
         # Test response...
--- a/pylons_app/tests/functional/test_summary.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_summary.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,5 +3,6 @@
 class TestSummaryController(TestController):
 
     def test_index(self):
+        self.log_user()
         response = self.app.get(url(controller='summary', action='index',repo_name='vcs_test'))
         # Test response...
--- a/pylons_app/tests/functional/test_tags.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/tests/functional/test_tags.py	Thu Sep 23 01:23:13 2010 +0200
@@ -3,5 +3,6 @@
 class TestTagsController(TestController):
 
     def test_index(self):
+        self.log_user()
         response = self.app.get(url(controller='tags', action='index',repo_name='vcs_test'))
         # Test response...
--- a/pylons_app/websetup.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/pylons_app/websetup.py	Thu Sep 23 01:23:13 2010 +0200
@@ -1,40 +1,25 @@
 """Setup the pylons_app application"""
 
-from os.path import dirname as dn, join as jn
+from os.path import dirname as dn
 from pylons_app.config.environment import load_environment
 from pylons_app.lib.db_manage import DbManage
-import datetime
-from time import mktime
 import logging
 import os
 import sys
-import tarfile
 
 log = logging.getLogger(__name__)
 
 ROOT = dn(dn(os.path.realpath(__file__)))
 sys.path.append(ROOT)
 
+
 def setup_app(command, conf, vars):
     """Place any commands to setup pylons_app here"""
     log_sql = True
     tests = False
-    
-    dbname = os.path.split(conf['sqlalchemy.db1.url'])[-1]
-    filename = os.path.split(conf.filename)[-1]
+    REPO_TEST_PATH = None
     
-    if filename == 'tests.ini':
-        uniq_suffix = str(int(mktime(datetime.datetime.now().timetuple())))
-        REPO_TEST_PATH = '/tmp/hg_app_test_%s' % uniq_suffix
-        
-        if not os.path.isdir(REPO_TEST_PATH):
-            os.mkdir(REPO_TEST_PATH)
-            cur_dir = dn(os.path.abspath(__file__))
-            tar = tarfile.open(jn(cur_dir,'tests',"vcs_test.tar.gz"))
-            tar.extractall(REPO_TEST_PATH)
-            tar.close()
-            
-        tests = True    
+    dbname = os.path.split(conf['sqlalchemy.db1.url'])[-1] 
     
     dbmanage = DbManage(log_sql, dbname, tests)
     dbmanage.create_tables(override=True)
--- a/setup.cfg	Sat Sep 11 03:35:33 2010 +0200
+++ b/setup.cfg	Thu Sep 23 01:23:13 2010 +0200
@@ -8,7 +8,7 @@
 [nosetests]
 verbose=True
 verbosity=2
-with-pylons=tests.ini
+with-pylons=test.ini
 detailed-errors=1
 
 # Babel configuration
--- a/setup.py	Sat Sep 11 03:35:33 2010 +0200
+++ b/setup.py	Thu Sep 23 01:23:13 2010 +0200
@@ -7,7 +7,7 @@
     from setuptools import setup, find_packages
 
 setup(
-    name='HgApp-%s'%get_version(),
+    name='HgApp-%s' % get_version(),
     version=get_version(),
     description='Mercurial repository serving and browsing app',
     keywords='mercurial web hgwebdir replacement serving hgweb',
@@ -20,12 +20,13 @@
         "SQLAlchemy>=0.6",
         "babel",
         "Mako>=0.3.2",
-        "vcs>=0.1.4",
+        "vcs>=0.1.5",
         "pygments>=1.3.0",
         "mercurial>=1.6",
         "pysqlite",
-        "whoosh==1.0.0b10",
+        "whoosh==1.0.0b17",
         "py-bcrypt",
+        "celery",
     ],
     setup_requires=["PasteScript>=1.6.3"],
     packages=find_packages(exclude=['ez_setup']),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test.ini	Thu Sep 23 01:23:13 2010 +0200
@@ -0,0 +1,160 @@
+################################################################################
+################################################################################
+# hg-app - Pylons environment configuration                                    #
+#                                                                              # 
+# The %(here)s variable will be replaced with the parent directory of this file#
+################################################################################
+
+[DEFAULT]
+debug = true
+################################################################################
+## Uncomment and replace with the address which should receive                ## 
+## any error reports after application crash								  ##
+## Additionally those settings will be used by hg-app mailing system          ##
+################################################################################
+#email_to = admin@localhost
+#error_email_from = paste_error@localhost
+#app_email_from = hg-app-noreply@localhost
+#error_message =
+
+#smtp_server = mail.server.com
+#smtp_username = 
+#smtp_password = 
+#smtp_port = 
+#smtp_use_tls = false
+
+[server:main]
+##nr of threads to spawn
+threadpool_workers = 5
+
+##max request before thread respawn
+threadpool_max_requests = 2
+
+##option to use threads of process
+use_threadpool = true
+
+use = egg:Paste#http
+host = 127.0.0.1
+port = 5000
+
+[app:main]
+use = egg:pylons_app
+full_stack = true
+static_files = true
+lang=en
+cache_dir = %(here)s/data
+
+####################################
+###         BEAKER CACHE        ####
+####################################
+beaker.cache.data_dir=/%(here)s/data/cache/data
+beaker.cache.lock_dir=/%(here)s/data/cache/lock
+beaker.cache.regions=super_short_term,short_term,long_term
+beaker.cache.long_term.type=memory
+beaker.cache.long_term.expire=36000
+beaker.cache.short_term.type=memory
+beaker.cache.short_term.expire=60
+beaker.cache.super_short_term.type=memory
+beaker.cache.super_short_term.expire=10
+
+####################################
+###       BEAKER SESSION        ####
+####################################
+## Type of storage used for the session, current types are 
+## "dbm", "file", "memcached", "database", and "memory". 
+## The storage uses the Container API 
+##that is also used by the cache system.
+beaker.session.type = file
+
+beaker.session.key = hg-app
+beaker.session.secret = g654dcno0-9873jhgfreyu
+beaker.session.timeout = 36000
+
+##auto save the session to not to use .save()
+beaker.session.auto = False
+
+##true exire at browser close
+#beaker.session.cookie_expires = 3600
+
+    
+################################################################################
+## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
+## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
+## execute malicious code after an exception is raised.                       ##
+################################################################################
+#set debug = false
+
+##################################
+###       LOGVIEW CONFIG       ###
+##################################
+logview.sqlalchemy = #faa
+logview.pylons.templating = #bfb
+logview.pylons.util = #eee
+
+#########################################################
+### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
+#########################################################
+sqlalchemy.db1.url = sqlite:///%(here)s/test.db
+#sqlalchemy.db1.echo = False
+#sqlalchemy.db1.pool_recycle = 3600
+sqlalchemy.convert_unicode = true
+
+################################
+### LOGGING CONFIGURATION   ####
+################################
+[loggers]
+keys = root, routes, pylons_app, sqlalchemy
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic,color_formatter
+
+#############
+## LOGGERS ##
+#############
+[logger_root]
+level = ERROR
+handlers = console
+
+[logger_routes]
+level = ERROR
+handlers = console
+qualname = routes.middleware
+# "level = DEBUG" logs the route matched and routing variables.
+
+[logger_pylons_app]
+level = ERROR
+handlers = console
+qualname = pylons_app
+propagate = 0
+
+[logger_sqlalchemy]
+level = ERROR
+handlers = console
+qualname = sqlalchemy.engine
+propagate = 0
+
+##############
+## HANDLERS ##
+##############
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = color_formatter
+
+################
+## FORMATTERS ##
+################
+
+[formatter_generic]
+format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %Y-%m-%d %H:%M:%S
+
+[formatter_color_formatter]
+class=pylons_app.lib.colored_formatter.ColorFormatter
+format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %Y-%m-%d %H:%M:%S
\ No newline at end of file
--- a/tests.ini	Sat Sep 11 03:35:33 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,155 +0,0 @@
-################################################################################
-################################################################################
-# pylons_app - Pylons environment configuration                                #
-#                                                                              # 
-# The %(here)s variable will be replaced with the parent directory of this file#
-################################################################################
-
-[DEFAULT]
-debug = true
-############################################
-## Uncomment and replace with the address ##
-## which should receive any error reports ##
-############################################
-#email_to = admin@localhost
-#smtp_server = mail.server.com
-#error_email_from = paste_error@localhost
-#smtp_username = 
-#smtp_password = 
-#error_message = 'mercurial crash !'
-
-[server:main]
-##nr of threads to spawn
-threadpool_workers = 5
-
-##max request before
-threadpool_max_requests = 2
-
-##option to use threads of process
-use_threadpool = true
-
-use = egg:Paste#http
-host = 127.0.0.1
-port = 5000
-
-[app:main]
-use = egg:pylons_app
-full_stack = true
-static_files = true
-lang=en
-cache_dir = %(here)s/data
-
-####################################
-###         BEAKER CACHE        ####
-####################################
-beaker.cache.data_dir=/%(here)s/data/cache/data
-beaker.cache.lock_dir=/%(here)s/data/cache/lock
-beaker.cache.regions=super_short_term,short_term,long_term
-beaker.cache.long_term.type=memory
-beaker.cache.long_term.expire=36000
-beaker.cache.short_term.type=memory
-beaker.cache.short_term.expire=60
-beaker.cache.super_short_term.type=memory
-beaker.cache.super_short_term.expire=10
-
-####################################
-###       BEAKER SESSION        ####
-####################################
-## Type of storage used for the session, current types are 
-## “dbm”, “file”, “memcached”, “database”, and “memory”. 
-## The storage uses the Container API 
-##that is also used by the cache system.
-beaker.session.type = file
-
-beaker.session.key = hg-app
-beaker.session.secret = g654dcno0-9873jhgfreyu
-beaker.session.timeout = 36000
-
-##auto save the session to not to use .save()
-beaker.session.auto = False
-
-##true exire at browser close
-#beaker.session.cookie_expires = 3600
-
-    
-################################################################################
-## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
-## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
-## execute malicious code after an exception is raised.                       ##
-################################################################################
-#set debug = false
-
-##################################
-###       LOGVIEW CONFIG       ###
-##################################
-logview.sqlalchemy = #faa
-logview.pylons.templating = #bfb
-logview.pylons.util = #eee
-
-#########################################################
-### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
-#########################################################
-sqlalchemy.db1.url = sqlite:///%(here)s/test.db
-#sqlalchemy.db1.echo = False
-#sqlalchemy.db1.pool_recycle = 3600
-sqlalchemy.convert_unicode = true
-
-################################
-### LOGGING CONFIGURATION   ####
-################################
-[loggers]
-keys = root, routes, pylons_app, sqlalchemy
-
-[handlers]
-keys = console
-
-[formatters]
-keys = generic,color_formatter
-
-#############
-## LOGGERS ##
-#############
-[logger_root]
-level = ERROR
-handlers = console
-
-[logger_routes]
-level = ERROR
-handlers = console
-qualname = routes.middleware
-# "level = DEBUG" logs the route matched and routing variables.
-
-[logger_pylons_app]
-level = ERROR
-handlers = console
-qualname = pylons_app
-propagate = 0
-
-[logger_sqlalchemy]
-level = ERROR
-handlers = console
-qualname = sqlalchemy.engine
-propagate = 0
-
-##############
-## HANDLERS ##
-##############
-
-[handler_console]
-class = StreamHandler
-args = (sys.stderr,)
-level = NOTSET
-formatter = color_formatter
-
-################
-## FORMATTERS ##
-################
-
-[formatter_generic]
-format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
-datefmt = %Y-%m-%d %H:%M:%S
-
-[formatter_color_formatter]
-class=pylons_app.lib.colored_formatter.ColorFormatter
-format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
-datefmt = %Y-%m-%d %H:%M:%S
\ No newline at end of file