changeset 1076:db71228a4c23 beta

moved statistics parse_limit into .ini files decreased celery max task per child to 1 for better memory usage stats task take usage of new slice option of repository
author Marcin Kuzminski <marcin@python-works.com>
date Thu, 24 Feb 2011 23:44:21 +0100
parents f726a939d2d4
children ee3a5a5f66bf
files development.ini production.ini rhodecode/config/deployment.ini_tmpl rhodecode/lib/celerylib/tasks.py
diffstat 4 files changed, 16 insertions(+), 11 deletions(-) [+]
line wrap: on
line diff
--- a/development.ini	Thu Feb 24 23:41:39 2011 +0100
+++ b/development.ini	Thu Feb 24 23:44:21 2011 +0100
@@ -45,8 +45,10 @@
 lang=en
 cache_dir = %(here)s/data
 index_dir = %(here)s/data/index
+app_instance_uuid = develop
 cut_off_limit = 256000
 force_https = false
+commit_parse_limit = 25
 
 ####################################
 ###        CELERY CONFIG        ####
@@ -70,7 +72,7 @@
 celeryd.concurrency = 2
 #celeryd.log.file = celeryd.log
 celeryd.log.level = debug
-celeryd.max.tasks.per.child = 3
+celeryd.max.tasks.per.child = 1
 
 #tasks will never be sent to the queue, but executed locally instead.
 celery.always.eager = false
@@ -78,8 +80,9 @@
 ####################################
 ###         BEAKER CACHE        ####
 ####################################
-beaker.cache.data_dir=/%(here)s/data/cache/data
-beaker.cache.lock_dir=/%(here)s/data/cache/lock
+beaker.cache.data_dir=%(here)s/data/cache/data
+beaker.cache.lock_dir=%(here)s/data/cache/lock
+
 beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
 
 beaker.cache.super_short_term.type=memory
--- a/production.ini	Thu Feb 24 23:41:39 2011 +0100
+++ b/production.ini	Thu Feb 24 23:44:21 2011 +0100
@@ -47,6 +47,7 @@
 index_dir = %(here)s/data/index
 cut_off_limit = 256000
 force_https = false
+commit_parse_limit = 250
 
 ####################################
 ###        CELERY CONFIG        ####
@@ -70,7 +71,7 @@
 celeryd.concurrency = 2
 #celeryd.log.file = celeryd.log
 celeryd.log.level = debug
-celeryd.max.tasks.per.child = 3
+celeryd.max.tasks.per.child = 1
 
 #tasks will never be sent to the queue, but executed locally instead.
 celery.always.eager = false
--- a/rhodecode/config/deployment.ini_tmpl	Thu Feb 24 23:41:39 2011 +0100
+++ b/rhodecode/config/deployment.ini_tmpl	Thu Feb 24 23:44:21 2011 +0100
@@ -48,6 +48,7 @@
 app_instance_uuid = ${app_instance_uuid}
 cut_off_limit = 256000
 force_https = false 
+commit_parse_limit = 50
 
 ####################################
 ###        CELERY CONFIG        ####
@@ -71,7 +72,7 @@
 celeryd.concurrency = 2
 #celeryd.log.file = celeryd.log
 celeryd.log.level = debug
-celeryd.max.tasks.per.child = 3
+celeryd.max.tasks.per.child = 1
 
 #tasks will never be sent to the queue, but executed locally instead.
 celery.always.eager = false
--- a/rhodecode/lib/celerylib/tasks.py	Thu Feb 24 23:41:39 2011 +0100
+++ b/rhodecode/lib/celerylib/tasks.py	Thu Feb 24 23:44:21 2011 +0100
@@ -104,7 +104,7 @@
     repo = get_repo(p)
 
     skip_date_limit = True
-    parse_limit = 250 #limit for single task changeset parsing optimal for
+    parse_limit = int(config['app_conf'].get('commit_parse_limit'))
     last_rev = 0
     last_cs = None
     timegetter = itemgetter('time')
@@ -135,8 +135,9 @@
     lmktime = mktime
 
     last_rev = last_rev + 1 if last_rev > 0 else last_rev
-    for rev in repo.revisions[last_rev:last_rev + parse_limit]:
-        last_cs = cs = repo.get_changeset(rev)
+
+    for cs in repo[last_rev:last_rev + parse_limit]:
+        last_cs = cs #remember last parsed changeset
         k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
                       cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
 
@@ -209,10 +210,9 @@
         log.debug('getting code trending stats')
         stats.languages = json.dumps(__get_codes_stats(repo_name))
 
-    stats.repository = dbrepo
-    stats.stat_on_revision = last_cs.revision
-
     try:
+        stats.repository = dbrepo
+        stats.stat_on_revision = last_cs.revision if last_cs else 0
         sa.add(stats)
         sa.commit()
     except: