Mercurial > kallithea
comparison rhodecode/lib/utils.py @ 1512:bf263968da47
merge beta in stable branch
author | Marcin Kuzminski <marcin@python-works.com> |
---|---|
date | Fri, 07 Oct 2011 01:08:50 +0200 |
parents | 9f6560667743 7d687ed11929 |
children | 0b8fba8ab90b |
comparison
equal
deleted
inserted
replaced
1329:e058df3ff2b4 | 1512:bf263968da47 |
---|---|
27 import logging | 27 import logging |
28 import datetime | 28 import datetime |
29 import traceback | 29 import traceback |
30 import paste | 30 import paste |
31 import beaker | 31 import beaker |
32 from os.path import dirname as dn, join as jn | |
32 | 33 |
33 from paste.script.command import Command, BadCommand | 34 from paste.script.command import Command, BadCommand |
34 | 35 |
35 from UserDict import DictMixin | 36 from mercurial import ui, config |
36 | |
37 from mercurial import ui, config, hg | |
38 from mercurial.error import RepoError | |
39 | 37 |
40 from webhelpers.text import collapse, remove_formatting, strip_tags | 38 from webhelpers.text import collapse, remove_formatting, strip_tags |
41 | 39 |
40 from vcs import get_backend | |
42 from vcs.backends.base import BaseChangeset | 41 from vcs.backends.base import BaseChangeset |
43 from vcs.utils.lazy import LazyProperty | 42 from vcs.utils.lazy import LazyProperty |
43 from vcs.utils.helpers import get_scm | |
44 from vcs.exceptions import VCSError | |
44 | 45 |
45 from rhodecode.model import meta | 46 from rhodecode.model import meta |
46 from rhodecode.model.caching_query import FromCache | 47 from rhodecode.model.caching_query import FromCache |
47 from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog | 48 from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog, Group, \ |
49 RhodeCodeSettings | |
48 from rhodecode.model.repo import RepoModel | 50 from rhodecode.model.repo import RepoModel |
49 from rhodecode.model.user import UserModel | |
50 | 51 |
51 log = logging.getLogger(__name__) | 52 log = logging.getLogger(__name__) |
52 | 53 |
53 | 54 |
54 def recursive_replace(str, replace=' '): | 55 def recursive_replace(str, replace=' '): |
106 | 107 |
107 if not sa: | 108 if not sa: |
108 sa = meta.Session() | 109 sa = meta.Session() |
109 | 110 |
110 try: | 111 try: |
111 um = UserModel() | |
112 if hasattr(user, 'user_id'): | 112 if hasattr(user, 'user_id'): |
113 user_obj = user | 113 user_obj = user |
114 elif isinstance(user, basestring): | 114 elif isinstance(user, basestring): |
115 user_obj = um.get_by_username(user, cache=False) | 115 user_obj = User.by_username(user, cache=False) |
116 else: | 116 else: |
117 raise Exception('You have to provide user object or username') | 117 raise Exception('You have to provide user object or username') |
118 | 118 |
119 rm = RepoModel() | 119 rm = RepoModel() |
120 if hasattr(repo, 'repo_id'): | 120 if hasattr(repo, 'repo_id'): |
142 except: | 142 except: |
143 log.error(traceback.format_exc()) | 143 log.error(traceback.format_exc()) |
144 sa.rollback() | 144 sa.rollback() |
145 | 145 |
146 | 146 |
147 def get_repos(path, recursive=False, initial=False): | 147 def get_repos(path, recursive=False): |
148 """ | 148 """ |
149 Scans given path for repos and return (name,(type,path)) tuple | 149 Scans given path for repos and return (name,(type,path)) tuple |
150 | 150 |
151 :param prefix: | 151 :param path: path to scann for repositories |
152 :param path: | 152 :param recursive: recursive search and return names with subdirs in front |
153 :param recursive: | |
154 :param initial: | |
155 """ | 153 """ |
156 from vcs.utils.helpers import get_scm | 154 from vcs.utils.helpers import get_scm |
157 from vcs.exceptions import VCSError | 155 from vcs.exceptions import VCSError |
158 | 156 |
159 try: | 157 if path.endswith(os.sep): |
160 scm = get_scm(path) | 158 #remove ending slash for better results |
161 except: | 159 path = path[:-1] |
162 pass | 160 |
163 else: | 161 def _get_repos(p): |
164 raise Exception('The given path %s should not be a repository got %s', | 162 if not os.access(p, os.W_OK): |
165 path, scm) | 163 return |
166 | 164 for dirpath in os.listdir(p): |
167 for dirpath in os.listdir(path): | 165 if os.path.isfile(os.path.join(p, dirpath)): |
168 try: | 166 continue |
169 yield dirpath, get_scm(os.path.join(path, dirpath)) | 167 cur_path = os.path.join(p, dirpath) |
170 except VCSError: | 168 try: |
171 pass | 169 scm_info = get_scm(cur_path) |
172 | 170 yield scm_info[1].split(path)[-1].lstrip(os.sep), scm_info |
173 def check_repo_fast(repo_name, base_path): | 171 except VCSError: |
174 """ | 172 if not recursive: |
175 Check given path for existence of directory | 173 continue |
174 #check if this dir containts other repos for recursive scan | |
175 rec_path = os.path.join(p, dirpath) | |
176 if os.path.isdir(rec_path): | |
177 for inner_scm in _get_repos(rec_path): | |
178 yield inner_scm | |
179 | |
180 return _get_repos(path) | |
181 | |
182 | |
183 def is_valid_repo(repo_name, base_path): | |
184 """ | |
185 Returns True if given path is a valid repository False otherwise | |
176 :param repo_name: | 186 :param repo_name: |
177 :param base_path: | 187 :param base_path: |
178 | 188 |
179 :return False: if this directory is present | 189 :return True: if given path is a valid repository |
180 """ | 190 """ |
181 if os.path.isdir(os.path.join(base_path, repo_name)): | 191 full_path = os.path.join(base_path, repo_name) |
192 | |
193 try: | |
194 get_scm(full_path) | |
195 return True | |
196 except VCSError: | |
182 return False | 197 return False |
183 return True | 198 |
184 | 199 def is_valid_repos_group(repos_group_name, base_path): |
185 | 200 """ |
186 def check_repo(repo_name, base_path, verify=True): | 201 Returns True if given path is a repos group False otherwise |
187 | 202 |
188 repo_path = os.path.join(base_path, repo_name) | 203 :param repo_name: |
189 | 204 :param base_path: |
190 try: | 205 """ |
191 if not check_repo_fast(repo_name, base_path): | 206 full_path = os.path.join(base_path, repos_group_name) |
192 return False | 207 |
193 r = hg.repository(ui.ui(), repo_path) | 208 # check if it's not a repo |
194 if verify: | 209 if is_valid_repo(repos_group_name, base_path): |
195 hg.verify(r) | |
196 #here we hnow that repo exists it was verified | |
197 log.info('%s repo is already created', repo_name) | |
198 return False | 210 return False |
199 except RepoError: | 211 |
200 #it means that there is no valid repo there... | 212 # check if it's a valid path |
201 log.info('%s repo is free for creation', repo_name) | 213 if os.path.isdir(full_path): |
202 return True | 214 return True |
203 | 215 |
204 | 216 return False |
217 | |
205 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): | 218 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): |
206 while True: | 219 while True: |
207 ok = raw_input(prompt) | 220 ok = raw_input(prompt) |
208 if ok in ('y', 'ye', 'yes'): | 221 if ok in ('y', 'ye', 'yes'): |
209 return True | 222 return True |
275 def set_rhodecode_config(config): | 288 def set_rhodecode_config(config): |
276 """Updates pylons config with new settings from database | 289 """Updates pylons config with new settings from database |
277 | 290 |
278 :param config: | 291 :param config: |
279 """ | 292 """ |
280 from rhodecode.model.settings import SettingsModel | 293 hgsettings = RhodeCodeSettings.get_app_settings() |
281 hgsettings = SettingsModel().get_app_settings() | |
282 | 294 |
283 for k, v in hgsettings.items(): | 295 for k, v in hgsettings.items(): |
284 config[k] = v | 296 config[k] = v |
285 | 297 |
286 | 298 |
300 """ | 312 """ |
301 An dummy empty changeset. It's possible to pass hash when creating | 313 An dummy empty changeset. It's possible to pass hash when creating |
302 an EmptyChangeset | 314 an EmptyChangeset |
303 """ | 315 """ |
304 | 316 |
305 def __init__(self, cs='0' * 40, repo=None): | 317 def __init__(self, cs='0' * 40, repo=None, requested_revision=None, alias=None): |
306 self._empty_cs = cs | 318 self._empty_cs = cs |
307 self.revision = -1 | 319 self.revision = -1 |
308 self.message = '' | 320 self.message = '' |
309 self.author = '' | 321 self.author = '' |
310 self.date = '' | 322 self.date = '' |
311 self.repository = repo | 323 self.repository = repo |
324 self.requested_revision = requested_revision | |
325 self.alias = alias | |
312 | 326 |
313 @LazyProperty | 327 @LazyProperty |
314 def raw_id(self): | 328 def raw_id(self): |
315 """Returns raw string identifying this changeset, useful for web | 329 """Returns raw string identifying this changeset, useful for web |
316 representation. | 330 representation. |
317 """ | 331 """ |
318 | 332 |
319 return self._empty_cs | 333 return self._empty_cs |
320 | 334 |
321 @LazyProperty | 335 @LazyProperty |
336 def branch(self): | |
337 return get_backend(self.alias).DEFAULT_BRANCH_NAME | |
338 | |
339 @LazyProperty | |
322 def short_id(self): | 340 def short_id(self): |
323 return self.raw_id[:12] | 341 return self.raw_id[:12] |
324 | 342 |
325 def get_file_changeset(self, path): | 343 def get_file_changeset(self, path): |
326 return self | 344 return self |
329 return u'' | 347 return u'' |
330 | 348 |
331 def get_file_size(self, path): | 349 def get_file_size(self, path): |
332 return 0 | 350 return 0 |
333 | 351 |
352 | |
353 def map_groups(groups): | |
354 """Checks for groups existence, and creates groups structures. | |
355 It returns last group in structure | |
356 | |
357 :param groups: list of groups structure | |
358 """ | |
359 sa = meta.Session() | |
360 | |
361 parent = None | |
362 group = None | |
363 for lvl, group_name in enumerate(groups[:-1]): | |
364 group = sa.query(Group).filter(Group.group_name == group_name).scalar() | |
365 | |
366 if group is None: | |
367 group = Group(group_name, parent) | |
368 sa.add(group) | |
369 sa.commit() | |
370 | |
371 parent = group | |
372 | |
373 return group | |
374 | |
375 | |
334 def repo2db_mapper(initial_repo_list, remove_obsolete=False): | 376 def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
335 """maps all found repositories into db | 377 """maps all repos given in initial_repo_list, non existing repositories |
378 are created, if remove_obsolete is True it also check for db entries | |
379 that are not in initial_repo_list and removes them. | |
380 | |
381 :param initial_repo_list: list of repositories found by scanning methods | |
382 :param remove_obsolete: check for obsolete entries in database | |
336 """ | 383 """ |
337 | 384 |
338 sa = meta.Session() | 385 sa = meta.Session() |
339 rm = RepoModel() | 386 rm = RepoModel() |
340 user = sa.query(User).filter(User.admin == True).first() | 387 user = sa.query(User).filter(User.admin == True).first() |
341 | 388 added = [] |
342 for name, repo in initial_repo_list.items(): | 389 for name, repo in initial_repo_list.items(): |
390 group = map_groups(name.split(os.sep)) | |
343 if not rm.get_by_repo_name(name, cache=False): | 391 if not rm.get_by_repo_name(name, cache=False): |
344 log.info('repository %s not found creating default', name) | 392 log.info('repository %s not found creating default', name) |
345 | 393 added.append(name) |
346 form_data = { | 394 form_data = { |
347 'repo_name':name, | 395 'repo_name': name, |
348 'repo_type':repo.alias, | 396 'repo_name_full': name, |
349 'description':repo.description \ | 397 'repo_type': repo.alias, |
398 'description': repo.description \ | |
350 if repo.description != 'unknown' else \ | 399 if repo.description != 'unknown' else \ |
351 '%s repository' % name, | 400 '%s repository' % name, |
352 'private':False | 401 'private': False, |
402 'group_id': getattr(group, 'group_id', None) | |
353 } | 403 } |
354 rm.create(form_data, user, just_db=True) | 404 rm.create(form_data, user, just_db=True) |
355 | 405 |
406 removed = [] | |
356 if remove_obsolete: | 407 if remove_obsolete: |
357 #remove from database those repositories that are not in the filesystem | 408 #remove from database those repositories that are not in the filesystem |
358 for repo in sa.query(Repository).all(): | 409 for repo in sa.query(Repository).all(): |
359 if repo.repo_name not in initial_repo_list.keys(): | 410 if repo.repo_name not in initial_repo_list.keys(): |
411 removed.append(repo.repo_name) | |
360 sa.delete(repo) | 412 sa.delete(repo) |
361 sa.commit() | 413 sa.commit() |
362 | 414 |
363 | 415 return added, removed |
364 class OrderedDict(dict, DictMixin): | |
365 | |
366 def __init__(self, *args, **kwds): | |
367 if len(args) > 1: | |
368 raise TypeError('expected at most 1 arguments, got %d' % len(args)) | |
369 try: | |
370 self.__end | |
371 except AttributeError: | |
372 self.clear() | |
373 self.update(*args, **kwds) | |
374 | |
375 def clear(self): | |
376 self.__end = end = [] | |
377 end += [None, end, end] # sentinel node for doubly linked list | |
378 self.__map = {} # key --> [key, prev, next] | |
379 dict.clear(self) | |
380 | |
381 def __setitem__(self, key, value): | |
382 if key not in self: | |
383 end = self.__end | |
384 curr = end[1] | |
385 curr[2] = end[1] = self.__map[key] = [key, curr, end] | |
386 dict.__setitem__(self, key, value) | |
387 | |
388 def __delitem__(self, key): | |
389 dict.__delitem__(self, key) | |
390 key, prev, next = self.__map.pop(key) | |
391 prev[2] = next | |
392 next[1] = prev | |
393 | |
394 def __iter__(self): | |
395 end = self.__end | |
396 curr = end[2] | |
397 while curr is not end: | |
398 yield curr[0] | |
399 curr = curr[2] | |
400 | |
401 def __reversed__(self): | |
402 end = self.__end | |
403 curr = end[1] | |
404 while curr is not end: | |
405 yield curr[0] | |
406 curr = curr[1] | |
407 | |
408 def popitem(self, last=True): | |
409 if not self: | |
410 raise KeyError('dictionary is empty') | |
411 if last: | |
412 key = reversed(self).next() | |
413 else: | |
414 key = iter(self).next() | |
415 value = self.pop(key) | |
416 return key, value | |
417 | |
418 def __reduce__(self): | |
419 items = [[k, self[k]] for k in self] | |
420 tmp = self.__map, self.__end | |
421 del self.__map, self.__end | |
422 inst_dict = vars(self).copy() | |
423 self.__map, self.__end = tmp | |
424 if inst_dict: | |
425 return (self.__class__, (items,), inst_dict) | |
426 return self.__class__, (items,) | |
427 | |
428 def keys(self): | |
429 return list(self) | |
430 | |
431 setdefault = DictMixin.setdefault | |
432 update = DictMixin.update | |
433 pop = DictMixin.pop | |
434 values = DictMixin.values | |
435 items = DictMixin.items | |
436 iterkeys = DictMixin.iterkeys | |
437 itervalues = DictMixin.itervalues | |
438 iteritems = DictMixin.iteritems | |
439 | |
440 def __repr__(self): | |
441 if not self: | |
442 return '%s()' % (self.__class__.__name__,) | |
443 return '%s(%r)' % (self.__class__.__name__, self.items()) | |
444 | |
445 def copy(self): | |
446 return self.__class__(self) | |
447 | |
448 @classmethod | |
449 def fromkeys(cls, iterable, value=None): | |
450 d = cls() | |
451 for key in iterable: | |
452 d[key] = value | |
453 return d | |
454 | |
455 def __eq__(self, other): | |
456 if isinstance(other, OrderedDict): | |
457 return len(self) == len(other) and self.items() == other.items() | |
458 return dict.__eq__(self, other) | |
459 | |
460 def __ne__(self, other): | |
461 return not self == other | |
462 | |
463 | 416 |
464 #set cache regions for beaker so celery can utilise it | 417 #set cache regions for beaker so celery can utilise it |
465 def add_cache(settings): | 418 def add_cache(settings): |
466 cache_settings = {'regions': None} | 419 cache_settings = {'regions': None} |
467 for key in settings.keys(): | 420 for key in settings.keys(): |
510 | 463 |
511 | 464 |
512 #============================================================================== | 465 #============================================================================== |
513 # TEST FUNCTIONS AND CREATORS | 466 # TEST FUNCTIONS AND CREATORS |
514 #============================================================================== | 467 #============================================================================== |
515 def create_test_index(repo_location, full_index): | 468 def create_test_index(repo_location, config, full_index): |
516 """Makes default test index | 469 """ |
517 :param repo_location: | 470 Makes default test index |
471 | |
472 :param config: test config | |
518 :param full_index: | 473 :param full_index: |
519 """ | 474 """ |
475 | |
520 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | 476 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
521 from rhodecode.lib.pidlock import DaemonLock, LockHeld | 477 from rhodecode.lib.pidlock import DaemonLock, LockHeld |
522 import shutil | 478 |
523 | 479 repo_location = repo_location |
524 index_location = os.path.join(repo_location, 'index') | 480 |
525 if os.path.exists(index_location): | 481 index_location = os.path.join(config['app_conf']['index_dir']) |
526 shutil.rmtree(index_location) | 482 if not os.path.exists(index_location): |
483 os.makedirs(index_location) | |
527 | 484 |
528 try: | 485 try: |
529 l = DaemonLock() | 486 l = DaemonLock(file=jn(dn(index_location), 'make_index.lock')) |
530 WhooshIndexingDaemon(index_location=index_location, | 487 WhooshIndexingDaemon(index_location=index_location, |
531 repo_location=repo_location)\ | 488 repo_location=repo_location)\ |
532 .run(full_index=full_index) | 489 .run(full_index=full_index) |
533 l.release() | 490 l.release() |
534 except LockHeld: | 491 except LockHeld: |
542 from rhodecode.lib.db_manage import DbManage | 499 from rhodecode.lib.db_manage import DbManage |
543 from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \ | 500 from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \ |
544 HG_FORK, GIT_FORK, TESTS_TMP_PATH | 501 HG_FORK, GIT_FORK, TESTS_TMP_PATH |
545 import tarfile | 502 import tarfile |
546 import shutil | 503 import shutil |
547 from os.path import dirname as dn, join as jn, abspath | 504 from os.path import abspath |
548 | 505 |
549 log = logging.getLogger('TestEnvCreator') | 506 # PART ONE create db |
550 # create logger | |
551 log.setLevel(logging.DEBUG) | |
552 log.propagate = True | |
553 # create console handler and set level to debug | |
554 ch = logging.StreamHandler() | |
555 ch.setLevel(logging.DEBUG) | |
556 | |
557 # create formatter | |
558 formatter = logging.Formatter("%(asctime)s - %(name)s -" | |
559 " %(levelname)s - %(message)s") | |
560 | |
561 # add formatter to ch | |
562 ch.setFormatter(formatter) | |
563 | |
564 # add ch to logger | |
565 log.addHandler(ch) | |
566 | |
567 #PART ONE create db | |
568 dbconf = config['sqlalchemy.db1.url'] | 507 dbconf = config['sqlalchemy.db1.url'] |
569 log.debug('making test db %s', dbconf) | 508 log.debug('making test db %s', dbconf) |
509 | |
510 # create test dir if it doesn't exist | |
511 if not os.path.isdir(repos_test_path): | |
512 log.debug('Creating testdir %s' % repos_test_path) | |
513 os.makedirs(repos_test_path) | |
570 | 514 |
571 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], | 515 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], |
572 tests=True) | 516 tests=True) |
573 dbmanage.create_tables(override=True) | 517 dbmanage.create_tables(override=True) |
574 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) | 518 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) |
575 dbmanage.create_default_user() | 519 dbmanage.create_default_user() |
576 dbmanage.admin_prompt() | 520 dbmanage.admin_prompt() |
577 dbmanage.create_permissions() | 521 dbmanage.create_permissions() |
578 dbmanage.populate_default_permissions() | 522 dbmanage.populate_default_permissions() |
579 | 523 |
580 #PART TWO make test repo | 524 # PART TWO make test repo |
581 log.debug('making test vcs repositories') | 525 log.debug('making test vcs repositories') |
582 | 526 |
583 #remove old one from previos tests | 527 idx_path = config['app_conf']['index_dir'] |
584 for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]: | 528 data_path = config['app_conf']['cache_dir'] |
585 | 529 |
586 if os.path.isdir(jn(TESTS_TMP_PATH, r)): | 530 #clean index and data |
587 log.debug('removing %s', r) | 531 if idx_path and os.path.exists(idx_path): |
588 shutil.rmtree(jn(TESTS_TMP_PATH, r)) | 532 log.debug('remove %s' % idx_path) |
533 shutil.rmtree(idx_path) | |
534 | |
535 if data_path and os.path.exists(data_path): | |
536 log.debug('remove %s' % data_path) | |
537 shutil.rmtree(data_path) | |
589 | 538 |
590 #CREATE DEFAULT HG REPOSITORY | 539 #CREATE DEFAULT HG REPOSITORY |
591 cur_dir = dn(dn(abspath(__file__))) | 540 cur_dir = dn(dn(abspath(__file__))) |
592 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) | 541 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) |
593 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) | 542 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) |
657 from pylons import config as pylonsconfig | 606 from pylons import config as pylonsconfig |
658 | 607 |
659 path_to_ini_file = os.path.realpath(conf) | 608 path_to_ini_file = os.path.realpath(conf) |
660 conf = paste.deploy.appconfig('config:' + path_to_ini_file) | 609 conf = paste.deploy.appconfig('config:' + path_to_ini_file) |
661 pylonsconfig.init_app(conf.global_conf, conf.local_conf) | 610 pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
611 |