Commit 8440c81eed24bee897467bbf2286530eaaac351c

Authored by Sergio Oliveira
2 parents 721677c8 4cb38792

Merge pull request #58 from colab/refactor-data-import

Refactor data import
Showing 49 changed files with 1127 additions and 603 deletions   Show diff stats
@@ -64,7 +64,7 @@ To run Colab with development server you will have to: @@ -64,7 +64,7 @@ To run Colab with development server you will have to:
64 64
65 .. code-block:: 65 .. code-block::
66 66
67 - colab-init-config > /etc/colab/settings.yaml 67 + colab-admin initconfig > /etc/colab/settings.py
68 68
69 2- Edit the configuration file. Make sure you set everything you need including **database** credentials. 69 2- Edit the configuration file. Make sure you set everything you need including **database** credentials.
70 70
@@ -92,4 +92,4 @@ How to run the tests @@ -92,4 +92,4 @@ How to run the tests
92 Follow the steps below: 92 Follow the steps below:
93 93
94 * Go to vagrant/colab/ 94 * Go to vagrant/colab/
95 -* run: ./runtests.sh  
96 \ No newline at end of file 95 \ No newline at end of file
  96 +* run: ./runtests.sh
colab/management/__init__.py
@@ -1,20 +0,0 @@ @@ -1,20 +0,0 @@
1 -  
2 -import os  
3 -  
4 -from django.core.management import ManagementUtility  
5 -  
6 -from .initconfig import initconfig  
7 -  
8 -  
9 -def execute_from_command_line(argv=None):  
10 - """  
11 - A simple method that runs a ManagementUtility.  
12 - """  
13 - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "colab.settings")  
14 -  
15 - utility = ManagementUtility(argv)  
16 - utility.execute()  
17 -  
18 -  
19 -def run_colab_config(argv=None):  
20 - initconfig()  
colab/management/commands/__init__.py 0 → 100644
colab/management/commands/celery.py 0 → 100644
@@ -0,0 +1,26 @@ @@ -0,0 +1,26 @@
  1 +from __future__ import absolute_import, unicode_literals
  2 +
  3 +from celery.bin import celery
  4 +
  5 +from colab.celery import app
  6 +from colab.queue.command import CeleryCommand
  7 +
  8 +base = celery.CeleryCommand(app=app)
  9 +
  10 +
  11 +# this is a reimplementation of the djcelery 'celery' command
  12 +# taken from Sentry
  13 +class Command(CeleryCommand):
  14 + """The celery command."""
  15 + help = 'celery commands, see celery help'
  16 + options = (CeleryCommand.options
  17 + + base.get_options()
  18 + + base.preload_options)
  19 +
  20 + def run_from_argv(self, argv):
  21 + argv = self.handle_default_options(argv)
  22 + if self.requires_system_checks:
  23 + self.validate()
  24 + base.execute_from_commandline(
  25 + ['{0[0]} {0[1]}'.format(argv)] + argv[2:],
  26 + )
colab/management/commands/initconfig.py 0 → 100644
@@ -0,0 +1,124 @@ @@ -0,0 +1,124 @@
  1 +
  2 +from django.core.management.base import BaseCommand
  3 +from django.utils.crypto import get_random_string
  4 +
  5 +
  6 +CONFIG_TEMPLATE = """
  7 +## Set to false in production
  8 +DEBUG = True
  9 +TEMPLATE_DEBUG = True
  10 +
  11 +## System admins
  12 +ADMINS = [['John Foo', 'john@example.com'], ['Mary Bar', 'mary@example.com']]
  13 +
  14 +MANAGERS = ADMINS
  15 +
  16 +COLAB_FROM_ADDRESS = '"Colab" <noreply@example.com>'
  17 +SERVER_EMAIL = '"Colab" <noreply@example.com>'
  18 +
  19 +EMAIL_HOST = 'localhost'
  20 +EMAIL_PORT = 25
  21 +EMAIL_SUBJECT_PREFIX = '[colab]'
  22 +
  23 +SECRET_KEY = '{secret_key}'
  24 +
  25 +ALLOWED_HOSTS = [
  26 + 'localhost',
  27 +# 'example.com',
  28 +# 'example.org',
  29 +# 'example.net',
  30 +]
  31 +
  32 +### Uncomment to enable social networks fields profile
  33 +# SOCIAL_NETWORK_ENABLED = True
  34 +
  35 +## Database settings
  36 +##
  37 +## When DEBUG is True colab will create the DB on
  38 +## the repository root. In case of production settings
  39 +## (DEBUG False) the DB settings must be set.
  40 +##
  41 +# DATABASES = {{
  42 +# 'default': {{
  43 +# 'ENGINE': 'django.db.backends.sqlite3',
  44 +# 'NAME': '/path/to/colab.sqlite3',
  45 +# }}
  46 +# }}
  47 +
  48 +## Disable indexing
  49 +ROBOTS_NOINDEX = False
  50 +
  51 +LOGGING = {{
  52 + 'version': 1,
  53 +
  54 + 'handlers': {{
  55 + 'null': {{
  56 + 'level': 'DEBUG',
  57 + 'class': 'logging.NullHandler',
  58 + }},
  59 + }},
  60 +
  61 + 'loggers': {{
  62 + 'colab.mailman': {{
  63 + 'handlers': ['null'],
  64 + 'propagate': False,
  65 + }},
  66 + 'haystack': {{
  67 + 'handlers': ['null'],
  68 + 'propagate': False,
  69 + }},
  70 + 'pysolr': {{
  71 + 'handlers': ['null'],
  72 + 'propagate': False,
  73 + }},
  74 + }},
  75 +}}
  76 +
  77 +
  78 +## Gitlab plugin - Put this in plugins.d/gitlab.py to actiate ##
  79 +# from django.utils.translation import ugettext_lazy as _
  80 +# from colab.plugins.utils.menu import colab_url_factory
  81 +#
  82 +# name = 'colab.plugins.gitlab'
  83 +# verbose_name = 'Gitlab Plugin'
  84 +#
  85 +# upstream = 'localhost'
  86 +# #middlewares = []
  87 +#
  88 +# urls = {{
  89 +# 'include': 'colab.plugins.gitlab.urls',
  90 +# 'namespace': 'gitlab',
  91 +# 'prefix': 'gitlab',
  92 +# }}
  93 +#
  94 +# menu_title = _('Code')
  95 +#
  96 +# url = colab_url_factory('gitlab')
  97 +#
  98 +# menu_urls = (
  99 +# url(display=_('Public Projects'), viewname='gitlab',
  100 +# kwargs={{'path': '/public/projects'}}, auth=False),
  101 +# url(display=_('Profile'), viewname='gitlab',
  102 +# kwargs={{'path': '/profile'}}, auth=True),
  103 +# url(display=_('New Project'), viewname='gitlab',
  104 +# kwargs={{'path': '/projects/new'}}, auth=True),
  105 +# url(display=_('Projects'), viewname='gitlab',
  106 +# kwargs={{'path': '/dashboard/projects'}}, auth=True),
  107 +# url(display=_('Groups'), viewname='gitlab',
  108 +# kwargs={{'path': '/profile/groups'}}, auth=True),
  109 +# url(display=_('Issues'), viewname='gitlab',
  110 +# kwargs={{'path': '/dashboard/issues'}}, auth=True),
  111 +# url(display=_('Merge Requests'), viewname='gitlab',
  112 +# kwargs={{'path': '/merge_requests'}}, auth=True),
  113 +#
  114 +# )
  115 +"""
  116 +
  117 +
  118 +class Command(BaseCommand):
  119 + help = 'Returns an example config file for Colab'
  120 +
  121 + def handle(self, *args, **kwargs):
  122 + chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
  123 + secret_key = get_random_string(50, chars)
  124 + print(CONFIG_TEMPLATE.format(secret_key=secret_key))
colab/management/initconfig.py
@@ -1,120 +0,0 @@ @@ -1,120 +0,0 @@
1 -  
2 -from django.utils.crypto import get_random_string  
3 -  
4 -  
5 -CONFIG_TEMPLATE = """  
6 -## Set to false in production  
7 -DEBUG = True  
8 -TEMPLATE_DEBUG = True  
9 -  
10 -## System admins  
11 -ADMINS = [['John Foo', 'john@example.com'], ['Mary Bar', 'mary@example.com']]  
12 -  
13 -MANAGERS = ADMINS  
14 -  
15 -COLAB_FROM_ADDRESS = '"Colab" <noreply@example.com>'  
16 -SERVER_EMAIL = '"Colab" <noreply@example.com>'  
17 -  
18 -EMAIL_HOST = 'localhost'  
19 -EMAIL_PORT = 25  
20 -EMAIL_SUBJECT_PREFIX = '[colab]'  
21 -  
22 -SECRET_KEY = '{secret_key}'  
23 -  
24 -ALLOWED_HOSTS = [  
25 - 'localhost',  
26 -# 'example.com',  
27 -# 'example.org',  
28 -# 'example.net',  
29 -]  
30 -  
31 -### Uncomment to enable social networks fields profile  
32 -# SOCIAL_NETWORK_ENABLED = True  
33 -  
34 -## Database settings  
35 -##  
36 -## When DEBUG is True colab will create the DB on  
37 -## the repository root. In case of production settings  
38 -## (DEBUG False) the DB settings must be set.  
39 -##  
40 -# DATABASES = {{  
41 -# 'default': {{  
42 -# 'ENGINE': 'django.db.backends.sqlite3',  
43 -# 'NAME': '/path/to/colab.sqlite3',  
44 -# }}  
45 -# }}  
46 -  
47 -## Disable indexing  
48 -ROBOTS_NOINDEX = False  
49 -  
50 -LOGGING = {{  
51 - 'version': 1,  
52 -  
53 - 'handlers': {{  
54 - 'null': {{  
55 - 'level': 'DEBUG',  
56 - 'class': 'logging.NullHandler',  
57 - }},  
58 - }},  
59 -  
60 - 'loggers': {{  
61 - 'colab.mailman': {{  
62 - 'handlers': ['null'],  
63 - 'propagate': False,  
64 - }},  
65 - 'haystack': {{  
66 - 'handlers': ['null'],  
67 - 'propagate': False,  
68 - }},  
69 - 'pysolr': {{  
70 - 'handlers': ['null'],  
71 - 'propagate': False,  
72 - }},  
73 - }},  
74 -}}  
75 -  
76 -  
77 -## Gitlab plugin - Put this in plugins.d/gitlab.py to actiate ##  
78 -# from django.utils.translation import ugettext_lazy as _  
79 -# from colab.plugins.utils.menu import colab_url_factory  
80 -#  
81 -# name = 'colab.plugins.gitlab'  
82 -# verbose_name = 'Gitlab Proxy'  
83 -#  
84 -# upstream = 'localhost'  
85 -# #middlewares = []  
86 -#  
87 -# urls = {{  
88 -# 'include': 'colab.plugins.gitlab.urls',  
89 -# 'namespace': 'gitlab',  
90 -# 'prefix': 'gitlab',  
91 -# }}  
92 -#  
93 -# menu_title = _('Code')  
94 -#  
95 -# url = colab_url_factory('gitlab')  
96 -#  
97 -# menu_urls = (  
98 -# url(display=_('Public Projects'), viewname='gitlab',  
99 -# kwargs={{'path': '/public/projects'}}, auth=False),  
100 -# url(display=_('Profile'), viewname='gitlab',  
101 -# kwargs={{'path': '/profile'}}, auth=True),  
102 -# url(display=_('New Project'), viewname='gitlab',  
103 -# kwargs={{'path': '/projects/new'}}, auth=True),  
104 -# url(display=_('Projects'), viewname='gitlab',  
105 -# kwargs={{'path': '/dashboard/projects'}}, auth=True),  
106 -# url(display=_('Groups'), viewname='gitlab',  
107 -# kwargs={{'path': '/profile/groups'}}, auth=True),  
108 -# url(display=_('Issues'), viewname='gitlab',  
109 -# kwargs={{'path': '/dashboard/issues'}}, auth=True),  
110 -# url(display=_('Merge Requests'), viewname='gitlab',  
111 -# kwargs={{'path': '/merge_requests'}}, auth=True),  
112 -#  
113 -# )  
114 -"""  
115 -  
116 -  
117 -def initconfig():  
118 - chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'  
119 - secret_key = get_random_string(50, chars)  
120 - print(CONFIG_TEMPLATE.format(secret_key=secret_key))  
colab/plugins/apps.py
1 1
2 from django.apps import AppConfig 2 from django.apps import AppConfig
3 3
  4 +from .data import register_tasks
4 from .utils.signals import connect_signal, register_signal 5 from .utils.signals import connect_signal, register_signal
5 6
6 7
@@ -10,3 +11,5 @@ class PluginAppConfig(AppConfig): @@ -10,3 +11,5 @@ class PluginAppConfig(AppConfig):
10 def ready(self): 11 def ready(self):
11 register_signal() 12 register_signal()
12 connect_signal() 13 connect_signal()
  14 +
  15 + register_tasks()
colab/plugins/conf.py 0 → 100644
@@ -0,0 +1,6 @@ @@ -0,0 +1,6 @@
  1 +
  2 +from django.conf import settings
  3 +
  4 +
  5 +def get_plugin_config(app_label):
  6 + return settings.COLAB_APPS.get('gitlab', {})
colab/plugins/data/__init__.py 0 → 100644
@@ -0,0 +1,3 @@ @@ -0,0 +1,3 @@
  1 +
  2 +from .base_importer import PluginDataImporter # noqa
  3 +from .tasks import TASKS, data_import, register_tasks # noqa
colab/plugins/data/base_importer.py 0 → 100644
@@ -0,0 +1,19 @@ @@ -0,0 +1,19 @@
  1 +
  2 +import abc
  3 +
  4 +from django.conf import settings
  5 +
  6 +
  7 +class PluginDataImporter(object):
  8 +
  9 + def __init__(self):
  10 + self.config = settings.COLAB_APPS.get(self.app_label, {})
  11 +
  12 + @abc.abstractmethod
  13 + def fetch_data(self):
  14 + raise NotImplementedError
  15 + fetch_data.is_abstract = True
  16 +
  17 + @abc.abstractmethod
  18 + def app_label(self):
  19 + raise NotImplementedError
colab/plugins/data/tasks.py 0 → 100644
@@ -0,0 +1,61 @@ @@ -0,0 +1,61 @@
  1 +#!/usr/bin/env python
  2 +
  3 +import importlib
  4 +import logging
  5 +
  6 +import redis
  7 +
  8 +from django.conf import settings
  9 +
  10 +from colab.celery import app
  11 +
  12 +LOGGER = logging.getLogger('colab.plugins.data')
  13 +TASKS = set()
  14 +
  15 +
  16 +def lock(method, name):
  17 + def wrapped_method(self, *args, **kwargs):
  18 + lock_id = 'colab-data-importer-{}'.format(name)
  19 + lock = redis.Redis().lock(lock_id)
  20 +
  21 + if lock.acquire(blocking=False):
  22 + try:
  23 + return method(*args, **kwargs)
  24 + finally:
  25 + lock.release()
  26 +
  27 + return wrapped_method
  28 +
  29 +
  30 +def register_tasks():
  31 +
  32 + global TASKS
  33 +
  34 + for app_name in settings.INSTALLED_APPS:
  35 +
  36 + module_name = '{}.data_importer'.format(app_name)
  37 + try:
  38 + module = importlib.import_module(module_name)
  39 + except ImportError:
  40 + continue
  41 +
  42 + for item_name in dir(module):
  43 + item = getattr(module, item_name)
  44 +
  45 + if callable(getattr(item, 'fetch_data', None)):
  46 + if getattr(item.fetch_data, 'is_abstract', False):
  47 + continue
  48 + instance = item()
  49 + task_name = '{}.{}'.format(module.__name__, item_name)
  50 + thread_safe_method = lock(instance.fetch_data, task_name)
  51 + task = app.task(name=task_name, bind=True)(thread_safe_method)
  52 + TASKS.add(task)
  53 + LOGGER.debug('Registered task: %s', task_name)
  54 +
  55 + LOGGER.debug(TASKS)
  56 + return TASKS
  57 +
  58 +
  59 +def data_import(self):
  60 + for task in TASKS:
  61 + task.delay()
colab/plugins/gitlab/__init__.py
1 1
2 2
3 -default_app_config = 'colab.plugins.gitlab.apps.ProxyGitlabAppConfig' 3 +default_app_config = 'colab.plugins.gitlab.apps.GitlabPluginAppConfig'
colab/plugins/gitlab/apps.py
1 1
2 -from ..utils.apps import ColabProxiedAppConfig 2 +from ..utils.apps import ColabPluginAppConfig
3 from colab.plugins.gitlab.tasks import handling_method 3 from colab.plugins.gitlab.tasks import handling_method
4 from colab.signals.signals import register_signal, connect_signal 4 from colab.signals.signals import register_signal, connect_signal
5 5
6 6
7 -class ProxyGitlabAppConfig(ColabProxiedAppConfig): 7 +class GitlabPluginAppConfig(ColabPluginAppConfig):
8 name = 'colab.plugins.gitlab' 8 name = 'colab.plugins.gitlab'
9 verbose_name = 'Gitlab Plugin' 9 verbose_name = 'Gitlab Plugin'
10 short_name = 'gitlab' 10 short_name = 'gitlab'
colab/plugins/gitlab/data_api.py
@@ -1,205 +0,0 @@ @@ -1,205 +0,0 @@
1 -import json  
2 -import urllib  
3 -import urllib2  
4 -import logging  
5 -  
6 -from dateutil.parser import parse  
7 -  
8 -from django.conf import settings  
9 -from django.db.models.fields import DateTimeField  
10 -  
11 -from colab.plugins.gitlab.models import (GitlabProject, GitlabMergeRequest,  
12 - GitlabComment, GitlabIssue)  
13 -from colab.plugins.utils.proxy_data_api import ProxyDataAPI  
14 -  
15 -LOGGER = logging.getLogger('colab.plugin.gitlab')  
16 -  
17 -  
18 -class GitlabDataAPI(ProxyDataAPI):  
19 -  
20 - def get_request_url(self, path, **kwargs):  
21 - proxy_config = settings.PROXIED_APPS.get(self.app_label, {})  
22 -  
23 - upstream = proxy_config.get('upstream')  
24 - kwargs['private_token'] = proxy_config.get('private_token')  
25 - params = urllib.urlencode(kwargs)  
26 -  
27 - if upstream[-1] == '/':  
28 - upstream = upstream[:-1]  
29 -  
30 - return u'{}{}?{}'.format(upstream, path, params)  
31 -  
32 - def get_json_data(self, api_url, page, pages=1000):  
33 - url = self.get_request_url(api_url, per_page=pages,  
34 - page=page)  
35 -  
36 - try:  
37 - data = urllib2.urlopen(url, timeout=10)  
38 - json_data = json.load(data)  
39 - except urllib2.URLError:  
40 - LOGGER.exception("Connection timeout: " + url)  
41 - json_data = []  
42 -  
43 - return json_data  
44 -  
45 - def fill_object_data(self, element, _object):  
46 - for field in _object._meta.fields:  
47 - try:  
48 - if field.name == "user":  
49 - _object.update_user(  
50 - element["author"]["username"])  
51 - continue  
52 - if field.name == "project":  
53 - _object.project_id = element["project_id"]  
54 - continue  
55 -  
56 - if isinstance(field, DateTimeField):  
57 - value = parse(element[field.name])  
58 - else:  
59 - value = element[field.name]  
60 -  
61 - setattr(_object, field.name, value)  
62 - except KeyError:  
63 - continue  
64 -  
65 - return _object  
66 -  
67 - def fetch_projects(self):  
68 - page = 1  
69 - projects = []  
70 -  
71 - while True:  
72 - json_data = self.get_json_data('/api/v3/projects/all', page)  
73 - page = page + 1  
74 -  
75 - if not len(json_data):  
76 - break  
77 -  
78 - for element in json_data:  
79 - project = GitlabProject()  
80 - self.fill_object_data(element, project)  
81 - projects.append(project)  
82 -  
83 - return projects  
84 -  
85 - def fetch_merge_request(self, projects):  
86 - all_merge_request = []  
87 -  
88 - for project in projects:  
89 - page = 1  
90 - while True:  
91 - url = '/api/v3/projects/{}/merge_requests'.format(project.id)  
92 - json_data_mr = self.get_json_data(url, page)  
93 - page = page + 1  
94 -  
95 - if len(json_data_mr) == 0:  
96 - break  
97 -  
98 - for element in json_data_mr:  
99 - single_merge_request = GitlabMergeRequest()  
100 - self.fill_object_data(element, single_merge_request)  
101 - all_merge_request.append(single_merge_request)  
102 -  
103 - return all_merge_request  
104 -  
105 - def fetch_issue(self, projects):  
106 - all_issues = []  
107 -  
108 - for project in projects:  
109 - page = 1  
110 - while True:  
111 - url = '/api/v3/projects/{}/issues'.format(project.id)  
112 - json_data_issue = self.get_json_data(url, page)  
113 - page = page + 1  
114 -  
115 - if len(json_data_issue) == 0:  
116 - break  
117 -  
118 - for element in json_data_issue:  
119 - single_issue = GitlabIssue()  
120 - self.fill_object_data(element, single_issue)  
121 - all_issues.append(single_issue)  
122 -  
123 - return all_issues  
124 -  
125 - def fetch_comments(self):  
126 - all_comments = []  
127 - all_comments.extend(self.fetch_comments_MR())  
128 - all_comments.extend(self.fetch_comments_issues())  
129 -  
130 - return all_comments  
131 -  
132 - def fetch_comments_MR(self):  
133 - all_comments = []  
134 - all_merge_requests = GitlabMergeRequest.objects.all()  
135 -  
136 - for merge_request in all_merge_requests:  
137 - page = 1  
138 - while True:  
139 - url = '/api/v3/projects/{}/merge_requests/{}/notes'.format(  
140 - merge_request.project_id, merge_request.id)  
141 - json_data_mr = self.get_json_data(url, page)  
142 - page = page + 1  
143 -  
144 - if len(json_data_mr) == 0:  
145 - break  
146 -  
147 - for element in json_data_mr:  
148 - single_comment = GitlabComment()  
149 - self.fill_object_data(element, single_comment)  
150 - single_comment.project = merge_request.project  
151 - single_comment.issue_comment = False  
152 - single_comment.parent_id = merge_request.id  
153 - all_comments.append(single_comment)  
154 -  
155 - return all_comments  
156 -  
157 - def fetch_comments_issues(self):  
158 - all_comments = []  
159 - all_issues = GitlabIssue.objects.all()  
160 -  
161 - for issue in all_issues:  
162 - page = 1  
163 - while True:  
164 - url = '/api/v3/projects/{}/issues/{}/notes'.format(  
165 - issue.project_id, issue.id)  
166 - json_data_mr = self.get_json_data(url, page)  
167 - page = page + 1  
168 -  
169 - if len(json_data_mr) == 0:  
170 - break  
171 -  
172 - for element in json_data_mr:  
173 - single_comment = GitlabComment()  
174 - self.fill_object_data(element, single_comment)  
175 - single_comment.project = issue.project  
176 - single_comment.issue_comment = True  
177 - single_comment.parent_id = issue.id  
178 - all_comments.append(single_comment)  
179 -  
180 - return all_comments  
181 -  
182 - def fetch_data(self):  
183 - LOGGER.info("Importing Projects")  
184 - projects = self.fetch_projects()  
185 - for datum in projects:  
186 - datum.save()  
187 -  
188 - LOGGER.info("Importing Merge Requests")  
189 - merge_request_list = self.fetch_merge_request(projects)  
190 - for datum in merge_request_list:  
191 - datum.save()  
192 -  
193 - LOGGER.info("Importing Issues")  
194 - issue_list = self.fetch_issue(projects)  
195 - for datum in issue_list:  
196 - datum.save()  
197 -  
198 - LOGGER.info("Importing Comments")  
199 - comments_list = self.fetch_comments()  
200 - for datum in comments_list:  
201 - datum.save()  
202 -  
203 - @property  
204 - def app_label(self):  
205 - return 'gitlab'  
colab/plugins/gitlab/data_importer.py 0 → 100644
@@ -0,0 +1,217 @@ @@ -0,0 +1,217 @@
  1 +import json
  2 +import urllib
  3 +import urllib2
  4 +import logging
  5 +
  6 +from dateutil.parser import parse
  7 +
  8 +from django.db.models.fields import DateTimeField
  9 +from colab.plugins.data import PluginDataImporter
  10 +
  11 +from .models import (GitlabProject, GitlabMergeRequest,
  12 + GitlabComment, GitlabIssue)
  13 +
  14 +
  15 +LOGGER = logging.getLogger('colab.plugin.gitlab')
  16 +
  17 +
  18 +class GitlabDataImporter(PluginDataImporter):
  19 + app_label = 'gitlab'
  20 +
  21 + def get_request_url(self, path, **kwargs):
  22 + upstream = self.config.get('upstream')
  23 + kwargs['private_token'] = self.config.get('private_token')
  24 + params = urllib.urlencode(kwargs)
  25 +
  26 + if upstream[-1] == '/':
  27 + upstream = upstream[:-1]
  28 +
  29 + return u'{}{}?{}'.format(upstream, path, params)
  30 +
  31 + def get_json_data(self, api_url, page, pages=1000):
  32 + url = self.get_request_url(api_url, per_page=pages,
  33 + page=page)
  34 +
  35 + try:
  36 + data = urllib2.urlopen(url, timeout=10)
  37 + json_data = json.load(data)
  38 + except urllib2.URLError:
  39 + LOGGER.exception("Connection timeout: " + url)
  40 + json_data = []
  41 +
  42 + return json_data
  43 +
  44 + def fill_object_data(self, element, _object):
  45 + for field in _object._meta.fields:
  46 + try:
  47 + if field.name == "user":
  48 + _object.update_user(
  49 + element["author"]["username"])
  50 + continue
  51 + if field.name == "project":
  52 + _object.project_id = element["project_id"]
  53 + continue
  54 +
  55 + if isinstance(field, DateTimeField):
  56 + value = parse(element[field.name])
  57 + else:
  58 + value = element[field.name]
  59 +
  60 + setattr(_object, field.name, value)
  61 + except KeyError:
  62 + continue
  63 +
  64 + return _object
  65 +
  66 + def fetch_projects(self):
  67 + page = 1
  68 + projects = []
  69 +
  70 + while True:
  71 + json_data = self.get_json_data('/api/v3/projects/all', page)
  72 + page = page + 1
  73 +
  74 + if not len(json_data):
  75 + break
  76 +
  77 + for element in json_data:
  78 + project = GitlabProject()
  79 + self.fill_object_data(element, project)
  80 + projects.append(project)
  81 +
  82 + return projects
  83 +
  84 + def fetch_merge_request(self, projects):
  85 + all_merge_request = []
  86 +
  87 + for project in projects:
  88 + page = 1
  89 + while True:
  90 + url = '/api/v3/projects/{}/merge_requests'.format(project.id)
  91 + json_data_mr = self.get_json_data(url, page)
  92 + page = page + 1
  93 +
  94 + if len(json_data_mr) == 0:
  95 + break
  96 +
  97 + for element in json_data_mr:
  98 + single_merge_request = GitlabMergeRequest()
  99 + self.fill_object_data(element, single_merge_request)
  100 + all_merge_request.append(single_merge_request)
  101 +
  102 + return all_merge_request
  103 +
  104 + def fetch_issue(self, projects):
  105 + all_issues = []
  106 +
  107 + for project in projects:
  108 + page = 1
  109 + while True:
  110 + url = '/api/v3/projects/{}/issues'.format(project.id)
  111 + json_data_issue = self.get_json_data(url, page)
  112 + page = page + 1
  113 +
  114 + if len(json_data_issue) == 0:
  115 + break
  116 +
  117 + for element in json_data_issue:
  118 + single_issue = GitlabIssue()
  119 + self.fill_object_data(element, single_issue)
  120 + all_issues.append(single_issue)
  121 +
  122 + return all_issues
  123 +
  124 + def fetch_comments(self):
  125 + all_comments = []
  126 + all_comments.extend(self.fetch_comments_MR())
  127 + all_comments.extend(self.fetch_comments_issues())
  128 +
  129 + return all_comments
  130 +
  131 + def fetch_comments_MR(self):
  132 + all_comments = []
  133 + all_merge_requests = GitlabMergeRequest.objects.all()
  134 +
  135 + for merge_request in all_merge_requests:
  136 + page = 1
  137 + while True:
  138 + url = '/api/v3/projects/{}/merge_requests/{}/notes'.format(
  139 + merge_request.project_id, merge_request.id)
  140 + json_data_mr = self.get_json_data(url, page)
  141 + page = page + 1
  142 +
  143 + if len(json_data_mr) == 0:
  144 + break
  145 +
  146 + for element in json_data_mr:
  147 + single_comment = GitlabComment()
  148 + self.fill_object_data(element, single_comment)
  149 + single_comment.project = merge_request.project
  150 + single_comment.issue_comment = False
  151 + single_comment.parent_id = merge_request.id
  152 + all_comments.append(single_comment)
  153 +
  154 + return all_comments
  155 +
  156 + def fetch_comments_issues(self):
  157 + all_comments = []
  158 + all_issues = GitlabIssue.objects.all()
  159 +
  160 + for issue in all_issues:
  161 + page = 1
  162 + while True:
  163 + url = '/api/v3/projects/{}/issues/{}/notes'.format(
  164 + issue.project_id, issue.id)
  165 + json_data_mr = self.get_json_data(url, page)
  166 + page = page + 1
  167 +
  168 + if len(json_data_mr) == 0:
  169 + break
  170 +
  171 + for element in json_data_mr:
  172 + single_comment = GitlabComment()
  173 + self.fill_object_data(element, single_comment)
  174 + single_comment.project = issue.project
  175 + single_comment.issue_comment = True
  176 + single_comment.parent_id = issue.id
  177 + all_comments.append(single_comment)
  178 +
  179 + return all_comments
  180 +
  181 +
  182 +class GitlabProjectImporter(GitlabDataImporter):
  183 +
  184 + def fetch_data(self):
  185 + LOGGER.info("Importing Projects")
  186 + projects = self.fetch_projects()
  187 + for datum in projects:
  188 + datum.save()
  189 +
  190 +
  191 +class GitlabMergeRequestImporter(GitlabDataImporter):
  192 +
  193 + def fetch_data(self):
  194 + LOGGER.info("Importing Merge Requests")
  195 + projects = GitlabProject.objects.all()
  196 + merge_request_list = self.fetch_merge_request(projects)
  197 + for datum in merge_request_list:
  198 + datum.save()
  199 +
  200 +
  201 +class GitlabIssueImporter(GitlabDataImporter):
  202 +
  203 + def fetch_data(self):
  204 + LOGGER.info("Importing Issues")
  205 + projects = GitlabProject.objects.all()
  206 + issue_list = self.fetch_issue(projects)
  207 + for datum in issue_list:
  208 + datum.save()
  209 +
  210 +
  211 +class GitlabCommentImporter(GitlabDataImporter):
  212 +
  213 + def fetch_data(self):
  214 + LOGGER.info("Importing Comments")
  215 + comments_list = self.fetch_comments()
  216 + for datum in comments_list:
  217 + datum.save()
colab/plugins/gitlab/views.py
1 1
2 -from ..utils.views import ColabProxyView 2 +from colab.plugins.views import ColabProxyView
3 3
4 4
5 class GitlabProxyView(ColabProxyView): 5 class GitlabProxyView(ColabProxyView):
colab/plugins/management/__init__.py
colab/plugins/management/commands/__init__.py
colab/plugins/management/commands/import_proxy_data.py
@@ -1,31 +0,0 @@ @@ -1,31 +0,0 @@
1 -#!/usr/bin/env python  
2 -  
3 -import importlib  
4 -import inspect  
5 -  
6 -from django.core.management.base import BaseCommand  
7 -from django.conf import settings  
8 -  
9 -from colab.plugins.utils.proxy_data_api import ProxyDataAPI  
10 -  
11 -  
12 -class Command(BaseCommand):  
13 - help = "Import proxy data into colab database"  
14 -  
15 - def handle(self, *args, **kwargs):  
16 - print "Executing extraction command..."  
17 -  
18 - for module_name in settings.PROXIED_APPS.keys():  
19 - module_path = \  
20 - 'colab.plugins.{}.data_api'.format(module_name.split('.')[-1])  
21 - module = importlib.import_module(module_path)  
22 -  
23 - for module_item_name in dir(module):  
24 - module_item = getattr(module, module_item_name)  
25 - if not inspect.isclass(module_item):  
26 - continue  
27 - if issubclass(module_item, ProxyDataAPI):  
28 - if module_item != ProxyDataAPI:  
29 - api = module_item()  
30 - api.fetch_data()  
31 - break  
colab/plugins/mezuro/__init__.py
1 1
2 2
3 -default_app_config = 'colab.plugins.mezuro.apps.ProxyMezuroAppConfig' 3 +default_app_config = 'colab.plugins.mezuro.apps.MezuroPluginAppConfig'
colab/plugins/mezuro/apps.py
1 1
2 -from ..utils.apps import ColabProxiedAppConfig 2 +from ..utils.apps import ColabPluginAppConfig
3 3
4 4
5 -class ProxyMezuroAppConfig(ColabProxiedAppConfig): 5 +class MezuroPluginAppConfig(ColabPluginAppConfig):
6 name = 'colab.plugins.mezuro' 6 name = 'colab.plugins.mezuro'
7 - verbose_name = 'Mezuro Proxy' 7 + verbose_name = 'Mezuro Plugin'
colab/plugins/mezuro/views.py
1 -from ..utils.views import ColabProxyView 1 +
  2 +from colab.plugins.views import ColabProxyView
2 3
3 4
4 class MezuroProxyView(ColabProxyView): 5 class MezuroProxyView(ColabProxyView):
colab/plugins/noosfero/__init__.py
1 1
2 2
3 -default_app_config = 'colab.plugins.noosfero.apps.ProxyNoosferoAppConfig' 3 +default_app_config = 'colab.plugins.noosfero.apps.NoosferoPluginAppConfig'
colab/plugins/noosfero/apps.py
1 1
2 -from ..utils.apps import ColabProxiedAppConfig 2 +from ..utils.apps import ColabPluginAppConfig
3 3
4 4
5 -class ProxyNoosferoAppConfig(ColabProxiedAppConfig): 5 +class NoosferoPluginAppConfig(ColabPluginAppConfig):
6 name = 'colab.plugins.noosfero' 6 name = 'colab.plugins.noosfero'
7 - verbose_name = 'Noosfero Proxy' 7 + verbose_name = 'Noosfero Plugin'
colab/plugins/noosfero/data_api.py
@@ -1,109 +0,0 @@ @@ -1,109 +0,0 @@
1 -import json  
2 -import urllib  
3 -import urllib2  
4 -import logging  
5 -  
6 -from dateutil.parser import parse  
7 -  
8 -from django.conf import settings  
9 -from django.db.models.fields import DateTimeField  
10 -  
11 -from colab.plugins.noosfero.models import (NoosferoArticle, NoosferoCommunity,  
12 - NoosferoCategory)  
13 -from colab.plugins.utils.proxy_data_api import ProxyDataAPI  
14 -  
15 -LOGGER = logging.getLogger('colab.plugin.debug')  
16 -  
17 -  
18 -class NoosferoDataAPI(ProxyDataAPI):  
19 -  
20 - def get_request_url(self, path, **kwargs):  
21 - proxy_config = settings.PROXIED_APPS.get(self.app_label, {})  
22 -  
23 - upstream = proxy_config.get('upstream')  
24 - kwargs['private_token'] = proxy_config.get('private_token')  
25 - params = urllib.urlencode(kwargs)  
26 -  
27 - if upstream[-1] == '/':  
28 - upstream = upstream[:-1]  
29 -  
30 - return u'{}{}?{}'.format(upstream, path, params)  
31 -  
32 - def get_json_data(self, api_url, page, pages=1000):  
33 - url = self.get_request_url(api_url, per_page=pages,  
34 - page=page)  
35 - try:  
36 - data = urllib2.urlopen(url, timeout=10)  
37 - json_data = json.load(data)  
38 - except urllib2.URLError:  
39 - LOGGER.exception("Connection timeout: " + url)  
40 - json_data = []  
41 -  
42 - return json_data  
43 -  
44 - def fill_object_data(self, element, _object):  
45 - for field in _object._meta.fields:  
46 - try:  
47 - if field.name == "user":  
48 - _object.update_user(  
49 - element["author"]["name"])  
50 - continue  
51 -  
52 - if field.name == "profile_identifier":  
53 - _object.profile_identifier = \  
54 - element["profile"]["identifier"]  
55 - continue  
56 -  
57 - if isinstance(field, DateTimeField):  
58 - value = parse(element[field.name])  
59 - else:  
60 - value = element[field.name]  
61 -  
62 - setattr(_object, field.name, value)  
63 - except KeyError:  
64 - continue  
65 - except TypeError:  
66 - continue  
67 -  
68 - return _object  
69 -  
70 - def fetch_communities(self):  
71 - json_data = self.get_json_data('/api/v1/communities', 1)  
72 -  
73 - json_data = json_data['communities']  
74 - for element in json_data:  
75 - community = NoosferoCommunity()  
76 - self.fill_object_data(element, community)  
77 - community.save()  
78 -  
79 - if 'categories' in element:  
80 - for category_json in element["categories"]:  
81 - category = NoosferoCategory.objects.get_or_create(  
82 - id=category_json["id"], name=category_json["name"])[0]  
83 - community.categories.add(category.id)  
84 -  
85 - def fetch_articles(self):  
86 - json_data = self.get_json_data('/api/v1/articles', 1)  
87 -  
88 - json_data = json_data['articles']  
89 -  
90 - for element in json_data:  
91 - article = NoosferoArticle()  
92 - self.fill_object_data(element, article)  
93 - article.save()  
94 -  
95 - for category_json in element["categories"]:  
96 - category = NoosferoCategory.objects.get_or_create(  
97 - id=category_json["id"], name=category_json["name"])[0]  
98 - article.categories.add(category.id)  
99 -  
100 - def fetch_data(self):  
101 - LOGGER.info("Importing Communities")  
102 - self.fetch_communities()  
103 -  
104 - LOGGER.info("Importing Articles")  
105 - self.fetch_articles()  
106 -  
107 - @property  
108 - def app_label(self):  
109 - return 'noosfero'  
colab/plugins/noosfero/data_importer.py 0 → 100644
@@ -0,0 +1,106 @@ @@ -0,0 +1,106 @@
  1 +import json
  2 +import urllib
  3 +import urllib2
  4 +import logging
  5 +
  6 +from dateutil.parser import parse
  7 +
  8 +from django.db.models.fields import DateTimeField
  9 +
  10 +from colab.plugins.data import PluginDataImporter
  11 +
  12 +from .models import NoosferoArticle, NoosferoCommunity, NoosferoCategory
  13 +
  14 +LOGGER = logging.getLogger('colab.plugin.debug')
  15 +
  16 +
  17 +class NoosferoDataImporter(PluginDataImporter):
  18 +
  19 + def get_request_url(self, path, **kwargs):
  20 + upstream = self.config.get('upstream')
  21 + kwargs['private_token'] = self.config.get('private_token')
  22 + params = urllib.urlencode(kwargs)
  23 +
  24 + if upstream[-1] == '/':
  25 + upstream = upstream[:-1]
  26 +
  27 + return u'{}{}?{}'.format(upstream, path, params)
  28 +
  29 + def get_json_data(self, api_url, page, pages=1000):
  30 + url = self.get_request_url(api_url, per_page=pages,
  31 + page=page)
  32 + try:
  33 + data = urllib2.urlopen(url, timeout=10)
  34 + json_data = json.load(data)
  35 + except urllib2.URLError:
  36 + LOGGER.exception("Connection timeout: " + url)
  37 + json_data = []
  38 +
  39 + return json_data
  40 +
  41 + def fill_object_data(self, element, _object):
  42 + for field in _object._meta.fields:
  43 + try:
  44 + if field.name == "user":
  45 + _object.update_user(
  46 + element["author"]["name"])
  47 + continue
  48 +
  49 + if field.name == "profile_identifier":
  50 + _object.profile_identifier = \
  51 + element["profile"]["identifier"]
  52 + continue
  53 +
  54 + if isinstance(field, DateTimeField):
  55 + value = parse(element[field.name])
  56 + else:
  57 + value = element[field.name]
  58 +
  59 + setattr(_object, field.name, value)
  60 + except KeyError:
  61 + continue
  62 + except TypeError:
  63 + continue
  64 +
  65 + return _object
  66 +
  67 + def fetch_communities(self):
  68 + json_data = self.get_json_data('/api/v1/communities', 1)
  69 +
  70 + json_data = json_data['communities']
  71 + for element in json_data:
  72 + community = NoosferoCommunity()
  73 + self.fill_object_data(element, community)
  74 + community.save()
  75 +
  76 + if 'categories' in element:
  77 + for category_json in element["categories"]:
  78 + category = NoosferoCategory.objects.get_or_create(
  79 + id=category_json["id"], name=category_json["name"])[0]
  80 + community.categories.add(category.id)
  81 +
  82 + def fetch_articles(self):
  83 + json_data = self.get_json_data('/api/v1/articles', 1)
  84 +
  85 + json_data = json_data['articles']
  86 +
  87 + for element in json_data:
  88 + article = NoosferoArticle()
  89 + self.fill_object_data(element, article)
  90 + article.save()
  91 +
  92 + for category_json in element["categories"]:
  93 + category = NoosferoCategory.objects.get_or_create(
  94 + id=category_json["id"], name=category_json["name"])[0]
  95 + article.categories.add(category.id)
  96 +
  97 + def fetch_data(self):
  98 + LOGGER.info("Importing Communities")
  99 + self.fetch_communities()
  100 +
  101 + LOGGER.info("Importing Articles")
  102 + self.fetch_articles()
  103 +
  104 + @property
  105 + def app_label(self):
  106 + return 'noosfero'
colab/plugins/noosfero/views.py
1 1
2 from django.conf import settings 2 from django.conf import settings
3 3
4 -from ..utils.views import ColabProxyView 4 +from colab.plugins.views import ColabProxyView
5 5
6 6
7 class NoosferoProxyView(ColabProxyView): 7 class NoosferoProxyView(ColabProxyView):
colab/plugins/tasks.py 0 → 100644
@@ -0,0 +1,11 @@ @@ -0,0 +1,11 @@
  1 +
  2 +from datetime import timedelta
  3 +from celery.decorators import periodic_task
  4 +
  5 +from .data import TASKS
  6 +
  7 +
  8 +@periodic_task(run_every=timedelta(seconds=60))
  9 +def import_plugin_data():
  10 + for task in TASKS:
  11 + task.delay()
colab/plugins/templatetags/plugins.py
@@ -11,10 +11,11 @@ register = template.Library() @@ -11,10 +11,11 @@ register = template.Library()
11 @register.simple_tag(takes_context=True) 11 @register.simple_tag(takes_context=True)
12 def plugins_menu(context): 12 def plugins_menu(context):
13 13
  14 + # TODO: Cache has to take language into account
14 if context['user'].is_authenticated(): 15 if context['user'].is_authenticated():
15 - cache_key = 'colab-proxy-menu-authenticated' 16 + cache_key = 'colab-plugin-menu-authenticated'
16 else: 17 else:
17 - cache_key = 'colab-proxy-menu-anonymous' 18 + cache_key = 'colab-plugin-menu-anonymous'
18 19
19 lang = get_language() 20 lang = get_language()
20 cache_key += '-{}'.format(lang) 21 cache_key += '-{}'.format(lang)
colab/plugins/utils/apps.py
@@ -2,7 +2,7 @@ @@ -2,7 +2,7 @@
2 from django.apps import AppConfig 2 from django.apps import AppConfig
3 3
4 4
5 -class ColabProxiedAppConfig(AppConfig): 5 +class ColabPluginAppConfig(AppConfig):
6 colab_proxied_app = True 6 colab_proxied_app = True
7 7
8 def register_signals(self): 8 def register_signals(self):
colab/plugins/utils/proxy_data_api.py
@@ -1,6 +0,0 @@ @@ -1,6 +0,0 @@
1 -  
2 -  
3 -class ProxyDataAPI(object):  
4 -  
5 - def fetch_data(self):  
6 - raise NotImplementedError('fetchData not yet implemented')  
colab/plugins/utils/views.py
@@ -1,37 +0,0 @@ @@ -1,37 +0,0 @@
1 -  
2 -import json  
3 -  
4 -from django.conf import settings  
5 -  
6 -from revproxy.views import DiazoProxyView  
7 -  
8 -  
9 -class ColabProxyView(DiazoProxyView):  
10 - add_remote_user = settings.REVPROXY_ADD_REMOTE_USER  
11 - diazo_theme_template = 'base.html'  
12 - html5 = True  
13 -  
14 - @property  
15 - def upstream(self):  
16 - proxy_config = settings.PROXIED_APPS.get(self.app_label, {})  
17 - return proxy_config.get('upstream')  
18 -  
19 - @property  
20 - def app_label(self):  
21 - raise NotImplementedError('app_label attribute must be set')  
22 -  
23 - def dispatch(self, request, *args, **kwargs):  
24 -  
25 - if request.user.is_authenticated():  
26 -  
27 - remote_user_data = {}  
28 -  
29 - remote_user_data['email'] = request.user.email  
30 - remote_user_data['name'] = request.user.get_full_name()  
31 -  
32 - request.META['HTTP_REMOTE_USER_DATA'] = json.dumps(  
33 - remote_user_data,  
34 - sort_keys=True,  
35 - )  
36 -  
37 - return super(ColabProxyView, self).dispatch(request, *args, **kwargs)  
colab/plugins/views.py 0 → 100644
@@ -0,0 +1,39 @@ @@ -0,0 +1,39 @@
  1 +
  2 +import json
  3 +
  4 +from django.conf import settings
  5 +
  6 +from revproxy.views import DiazoProxyView
  7 +
  8 +from .conf import get_plugin_config
  9 +
  10 +
  11 +class ColabProxyView(DiazoProxyView):
  12 + add_remote_user = settings.REVPROXY_ADD_REMOTE_USER
  13 + diazo_theme_template = 'base.html'
  14 + html5 = True
  15 +
  16 + @property
  17 + def upstream(self):
  18 + config = get_plugin_config(self.app_label)
  19 + return config.get('upstream')
  20 +
  21 + @property
  22 + def app_label(self):
  23 + raise NotImplementedError('app_label attribute must be set')
  24 +
  25 + def dispatch(self, request, *args, **kwargs):
  26 +
  27 + if request.user.is_authenticated():
  28 +
  29 + remote_user_data = {}
  30 +
  31 + remote_user_data['email'] = request.user.email
  32 + remote_user_data['name'] = request.user.get_full_name()
  33 +
  34 + request.META['HTTP_REMOTE_USER_DATA'] = json.dumps(
  35 + remote_user_data,
  36 + sort_keys=True,
  37 + )
  38 +
  39 + return super(ColabProxyView, self).dispatch(request, *args, **kwargs)
colab/queue/__init__.py 0 → 100644
colab/queue/command.py 0 → 100644
@@ -0,0 +1,103 @@ @@ -0,0 +1,103 @@
  1 +from __future__ import absolute_import
  2 +
  3 +import celery
  4 +import os
  5 +import sys
  6 +
  7 +from django.core.management.base import BaseCommand
  8 +
  9 +DB_SHARED_THREAD = """\
  10 +DatabaseWrapper objects created in a thread can only \
  11 +be used in that same thread. The object with alias '%s' \
  12 +was created in thread id %s and this is thread id %s.\
  13 +"""
  14 +
  15 +
  16 +def patch_thread_ident():
  17 + # monkey patch django.
  18 + # This patch make sure that we use real threads to get the ident which
  19 + # is going to happen if we are using gevent or eventlet.
  20 + # -- patch taken from gunicorn
  21 + if getattr(patch_thread_ident, 'called', False):
  22 + return
  23 + try:
  24 + from django.db.backends import BaseDatabaseWrapper, DatabaseError
  25 +
  26 + if 'validate_thread_sharing' in BaseDatabaseWrapper.__dict__:
  27 + import thread
  28 + _get_ident = thread.get_ident
  29 +
  30 + __old__init__ = BaseDatabaseWrapper.__init__
  31 +
  32 + def _init(self, *args, **kwargs):
  33 + __old__init__(self, *args, **kwargs)
  34 + self._thread_ident = _get_ident()
  35 +
  36 + def _validate_thread_sharing(self):
  37 + if (not self.allow_thread_sharing
  38 + and self._thread_ident != _get_ident()):
  39 + raise DatabaseError(
  40 + DB_SHARED_THREAD % (
  41 + self.alias, self._thread_ident, _get_ident()),
  42 + )
  43 +
  44 + BaseDatabaseWrapper.__init__ = _init
  45 + BaseDatabaseWrapper.validate_thread_sharing = \
  46 + _validate_thread_sharing
  47 +
  48 + patch_thread_ident.called = True
  49 + except ImportError:
  50 + pass
  51 +patch_thread_ident()
  52 +
  53 +
  54 +class CeleryCommand(BaseCommand):
  55 + options = BaseCommand.option_list
  56 + skip_opts = ['--app', '--loader', '--config']
  57 + keep_base_opts = False
  58 +
  59 + def get_version(self):
  60 + return 'celery %s' % (celery.__version__)
  61 +
  62 + def execute(self, *args, **options):
  63 + broker = options.get('broker')
  64 + if broker:
  65 + self.set_broker(broker)
  66 + super(CeleryCommand, self).execute(*args, **options)
  67 +
  68 + def set_broker(self, broker):
  69 + os.environ['CELERY_BROKER_URL'] = broker
  70 +
  71 + def run_from_argv(self, argv):
  72 + self.handle_default_options(argv[2:])
  73 + return super(CeleryCommand, self).run_from_argv(argv)
  74 +
  75 + def handle_default_options(self, argv):
  76 + acc = []
  77 + broker = None
  78 + for i, arg in enumerate(argv):
  79 + if '--settings=' in arg:
  80 + _, settings_module = arg.split('=')
  81 + os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
  82 + elif '--pythonpath=' in arg:
  83 + _, pythonpath = arg.split('=')
  84 + sys.path.insert(0, pythonpath)
  85 + elif '--broker=' in arg:
  86 + _, broker = arg.split('=')
  87 + elif arg == '-b':
  88 + broker = argv[i + 1]
  89 + else:
  90 + acc.append(arg)
  91 + if broker:
  92 + self.set_broker(broker)
  93 + return argv if self.keep_base_opts else acc
  94 +
  95 + def die(self, msg):
  96 + sys.stderr.write(msg)
  97 + sys.stderr.write('\n')
  98 + sys.exit()
  99 +
  100 + @property
  101 + def option_list(self):
  102 + return [x for x in self.options
  103 + if x._long_opts[0] not in self.skip_opts]
colab/search/templates/search/includes/search_filters.html
@@ -112,28 +112,6 @@ @@ -112,28 +112,6 @@
112 112
113 <ul class="unstyled-list"> 113 <ul class="unstyled-list">
114 114
115 - {% if is_trac %}  
116 - <li>  
117 - <span class="glyphicon glyphicon-book"></span>  
118 - <a href="{% append_to_get type='wiki' %}">{% trans "Wiki" %}</a>  
119 - </li>  
120 - <li>  
121 - <span class="glyphicon glyphicon-tag"></span>  
122 - <a href="{% append_to_get type='ticket' %}">{% trans "Ticket" %}</a>  
123 - </li>  
124 - <li>  
125 - <span class="glyphicon glyphicon-align-right"></span>  
126 - <a href="{% append_to_get type='changeset' %}">{% trans "Changeset" %}</a>  
127 - </li>  
128 - <li>  
129 - <span class="glyphicon glyphicon-user"></span>  
130 - <a href="{% append_to_get type='user' %}">{% trans "User" %}</a>  
131 - </li>  
132 - <li>  
133 - <span class="glyphicon glyphicon-file"></span>  
134 - <a href="{% append_to_get type='attachment' %}">{% trans "Attachment" %}</a>  
135 - </li>  
136 - {% endif %}  
137 <li> 115 <li>
138 <span class="glyphicon glyphicon-envelope"></span> 116 <span class="glyphicon glyphicon-envelope"></span>
139 <a href="{% append_to_get type='thread' %}">{% trans "Discussion" %}</a> 117 <a href="{% append_to_get type='thread' %}">{% trans "Discussion" %}</a>
colab/search/utils.py
@@ -57,11 +57,10 @@ def get_collaboration_data(logged_user, filter_by_user=None): @@ -57,11 +57,10 @@ def get_collaboration_data(logged_user, filter_by_user=None):
57 57
58 latest_results.extend(messages) 58 latest_results.extend(messages)
59 59
60 - app_names = settings.PROXIED_APPS.keys() 60 + app_names = settings.COLAB_APPS.keys()
61 61
62 for app_name in app_names: 62 for app_name in app_names:
63 - module = importlib \  
64 - .import_module('colab.plugins.{}.models'.format(app_name)) 63 + module = importlib.import_module('{}.models'.format(app_name))
65 64
66 for module_item_name in dir(module): 65 for module_item_name in dir(module):
67 module_item = getattr(module, module_item_name) 66 module_item = getattr(module, module_item_name)
colab/settings.py
@@ -8,7 +8,8 @@ For the full list of settings and their values, see @@ -8,7 +8,8 @@ For the full list of settings and their values, see
8 https://docs.djangoproject.com/en/1.7/ref/settings/ 8 https://docs.djangoproject.com/en/1.7/ref/settings/
9 """ 9 """
10 10
11 -BROKER_URL = 'amqp://guest:guest@localhost:5672/' 11 +BROKER_URL = 'redis://localhost:6379/0'
  12 +CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
12 13
13 # Build paths inside the project like this: os.path.join(BASE_DIR, ...) 14 # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
14 import os 15 import os
@@ -49,6 +50,7 @@ INSTALLED_APPS = ( @@ -49,6 +50,7 @@ INSTALLED_APPS = (
49 'taggit', 50 'taggit',
50 51
51 # Own apps 52 # Own apps
  53 + 'colab',
52 'colab.home', 54 'colab.home',
53 'colab.plugins', 55 'colab.plugins',
54 'colab.super_archives', 56 'colab.super_archives',
@@ -257,7 +259,6 @@ locals().update(conf.load_py_settings()) @@ -257,7 +259,6 @@ locals().update(conf.load_py_settings())
257 locals().update(conf.load_colab_apps()) 259 locals().update(conf.load_colab_apps())
258 260
259 COLAB_APPS = locals().get('COLAB_APPS') or {} 261 COLAB_APPS = locals().get('COLAB_APPS') or {}
260 -PROXIED_APPS = {}  
261 262
262 for app_name, app in COLAB_APPS.items(): 263 for app_name, app in COLAB_APPS.items():
263 if 'dependencies' in app: 264 if 'dependencies' in app:
@@ -268,9 +269,6 @@ for app_name, app in COLAB_APPS.items(): @@ -268,9 +269,6 @@ for app_name, app in COLAB_APPS.items():
268 if app_name not in INSTALLED_APPS: 269 if app_name not in INSTALLED_APPS:
269 INSTALLED_APPS += (app_name,) 270 INSTALLED_APPS += (app_name,)
270 271
271 - if app.get('upstream'):  
272 - PROXIED_APPS[app_name.split('.')[-1]] = app  
273 -  
274 if 'middlewares' in app: 272 if 'middlewares' in app:
275 for middleware in app.get('middlewares'): 273 for middleware in app.get('middlewares'):
276 if middleware not in MIDDLEWARE_CLASSES: 274 if middleware not in MIDDLEWARE_CLASSES:
colab/super_archives/templates/superarchives/thread-dashboard.html
@@ -12,9 +12,6 @@ @@ -12,9 +12,6 @@
12 <h3><b>{{ listname|title|lower }} {% if description %} ({{ description }}){% endif %}</b></h3> 12 <h3><b>{{ listname|title|lower }} {% if description %} ({{ description }}){% endif %}</b></h3>
13 <div class="btn-group btn-group-sm"> 13 <div class="btn-group btn-group-sm">
14 <a href="#" class="btn btn-default" disabled="disabled">{% blocktrans %}{{ number_of_users }} members{% endblocktrans %}</a> 14 <a href="#" class="btn btn-default" disabled="disabled">{% blocktrans %}{{ number_of_users }} members{% endblocktrans %}</a>
15 - {% if proxy.trac %}  
16 - <a href="/wiki/grupos/{{ listname }}" class="btn btn-default">Wiki</a>  
17 - {% endif %}  
18 </div> 15 </div>
19 <hr/> 16 <hr/>
20 17
colab/utils/conf.py
@@ -3,7 +3,6 @@ import os @@ -3,7 +3,6 @@ import os
3 import sys 3 import sys
4 import logging 4 import logging
5 import importlib 5 import importlib
6 -import warnings  
7 6
8 from django.core.exceptions import ImproperlyConfigured 7 from django.core.exceptions import ImproperlyConfigured
9 8
@@ -96,17 +95,21 @@ def load_colab_apps(): @@ -96,17 +95,21 @@ def load_colab_apps():
96 return {'COLAB_APPS': COLAB_APPS} 95 return {'COLAB_APPS': COLAB_APPS}
97 96
98 for file_name in os.listdir(plugins_dir): 97 for file_name in os.listdir(plugins_dir):
99 - if not file_name.endswith('.py'):  
100 - continue  
101 -  
102 file_module = file_name.split('.')[0] 98 file_module = file_name.split('.')[0]
  99 +
  100 + logger.info('Loaded plugin settings: %s%s', plugins_dir, file_name)
103 py_settings_d = _load_py_file(file_module, plugins_dir) 101 py_settings_d = _load_py_file(file_module, plugins_dir)
104 - logger.info('Loaded plugin settings: %s/%s', plugins_dir, file_name)  
105 102
106 - app_name = py_settings_d.get('name') 103 + if os.path.isdir(os.path.join(plugins_dir, file_name)):
  104 + app_name = file_name
  105 +
  106 + elif file_name.endswith('.py'):
  107 + app_name = py_settings_d.get('name')
  108 +
107 if not app_name: 109 if not app_name:
108 - warnings.warn("Plugin missing name variable") 110 + logger.warning("Plugin missing name variable (%s)", file_name)
109 continue 111 continue
  112 +
110 try: 113 try:
111 importlib.import_module(app_name) 114 importlib.import_module(app_name)
112 except ImportError: 115 except ImportError:
colab/utils/runner.py 0 → 100644
@@ -0,0 +1,14 @@ @@ -0,0 +1,14 @@
  1 +
  2 +import os
  3 +
  4 +from django.core.management import ManagementUtility
  5 +
  6 +
  7 +def execute_from_command_line(argv=None):
  8 + """
  9 + A simple method that runs a ManagementUtility.
  10 + """
  11 + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "colab.settings")
  12 +
  13 + utility = ManagementUtility(argv)
  14 + utility.execute()
docs/source/plugindev.rst
@@ -43,16 +43,16 @@ signals structure, some steps are required: @@ -43,16 +43,16 @@ signals structure, some steps are required:
43 * With signals registered and handling method defined you must connect them. 43 * With signals registered and handling method defined you must connect them.
44 To do it you must call connect_signal passing signal name, sender and handling 44 To do it you must call connect_signal passing signal name, sender and handling
45 method as arguments. These should be implemented on plugin's apps.py. It must 45 method as arguments. These should be implemented on plugin's apps.py. It must
46 - be said that the plugin app class must extend ColabProxiedAppConfig. An 46 + be said that the plugin app class must extend ColabPluginAppConfig. An
47 example of this configuration can be seen below: 47 example of this configuration can be seen below:
48 48
49 49
50 .. code-block:: python 50 .. code-block:: python
51 - from colab.plugins.utils.apps import ColabProxiedAppConfig 51 + from colab.plugins.utils.apps import ColabPluginAppConfig
52 from colab.signals.signals import register_signal, connect_signal 52 from colab.signals.signals import register_signal, connect_signal
53 from colab.plugins.PLUGIN.tasks import HANDLING_METHOD 53 from colab.plugins.PLUGIN.tasks import HANDLING_METHOD
54 54
55 - class PluginApps(ColabProxiedAppConfig): 55 + class PluginApps(ColabPluginAppConfig):
56 short_name = PLUGIN_NAME 56 short_name = PLUGIN_NAME
57 signals_list = [SIGNAL1, SIGNAL2] 57 signals_list = [SIGNAL1, SIGNAL2]
58 58
@@ -16,7 +16,7 @@ REQUIREMENTS = [ @@ -16,7 +16,7 @@ REQUIREMENTS = [
16 'diazo>=1.0.5', 16 'diazo>=1.0.5',
17 17
18 # Async Signals 18 # Async Signals
19 - 'celery>=3.1', 19 + 'celery[redis]>=3.1',
20 20
21 ### Move out of colab (as plugins): 21 ### Move out of colab (as plugins):
22 22
@@ -55,8 +55,7 @@ setup( @@ -55,8 +55,7 @@ setup(
55 packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES), 55 packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
56 include_package_data=True, 56 include_package_data=True,
57 entry_points={'console_scripts': [ 57 entry_points={'console_scripts': [
58 - 'colab-admin = colab.management:execute_from_command_line',  
59 - 'colab-init-config = colab.management:initconfig', 58 + 'colab-admin = colab.utils.runner:execute_from_command_line',
60 ]}, 59 ]},
61 zip_safe=False, 60 zip_safe=False,
62 long_description=open('README.rst').read(), 61 long_description=open('README.rst').read(),
vagrant/centos.sh
@@ -22,9 +22,9 @@ yum install -y epel-release @@ -22,9 +22,9 @@ yum install -y epel-release
22 22
23 yum -y groupinstall "Development tools" 23 yum -y groupinstall "Development tools"
24 24
25 -yum install -y git unzip gettext libxml2-devel libxslt-devel openssl-devel libffi-devel python-devel python-pip python-virtualenvwrapper rabbitmq-server 25 +yum install -y git unzip gettext libxml2-devel libxslt-devel openssl-devel libffi-devel python-devel python-pip python-virtualenvwrapper redis
26 26
27 27
28 -### Init Rabbitmq  
29 -chkconfig rabbitmq-server on  
30 -systemctl start rabbitmq-server 28 +### Init Redis
  29 +systemctl enable redis
  30 +systemctl start redis
vagrant/misc/etc/default/celerybeat 0 → 100644
@@ -0,0 +1,22 @@ @@ -0,0 +1,22 @@
  1 +# Absolute or relative path to the 'celery' command:
  2 +CELERY_BIN="/home/vagrant/.virtualenvs/colab/bin/celery"
  3 +
  4 +# App instance to use
  5 +# comment out this line if you don't use an app
  6 +CELERY_APP="colab.celery:app"
  7 +
  8 +# Where to chdir at start.
  9 +CELERYBEAT_CHDIR="/vagrant/"
  10 +
  11 +# Extra arguments to celerybeat
  12 +CELERYBEAT_OPTS="--schedule=/var/run/celery/celerybeat-schedule"
  13 +
  14 +CELERTBEAT_LOG_FILE="/var/log/celery/beat.log"
  15 +CELERYBEAT_PID_FILE="/var/run/celery/beat.pid"
  16 +
  17 +CELERYBEAT_USER="vagrant"
  18 +CELERYBEAT_GROUP="vagrant"
  19 +
  20 +# If enabled pid and log directories will be created if missing,
  21 +# and owned by the userid/group configured.
  22 +CELERY_CREATE_DIRS=1
vagrant/misc/etc/default/celeryd
@@ -5,13 +5,13 @@ CELERYD_NODES=&quot;worker1&quot; @@ -5,13 +5,13 @@ CELERYD_NODES=&quot;worker1&quot;
5 CELERY_BIN="/home/vagrant/.virtualenvs/colab/bin/celery" 5 CELERY_BIN="/home/vagrant/.virtualenvs/colab/bin/celery"
6 6
7 # comment out this line if you don't use an app 7 # comment out this line if you don't use an app
8 -CELERY_APP="colab" 8 +CELERY_APP="colab.celery:app"
9 9
10 # Where to chdir at start. 10 # Where to chdir at start.
11 CELERYD_CHDIR="/vagrant/" 11 CELERYD_CHDIR="/vagrant/"
12 12
13 # Extra command-line arguments to the worker 13 # Extra command-line arguments to the worker
14 -CELERYD_OPTS="--time-limit=300 --concurrency=8" 14 +CELERYD_OPTS="--time-limit=300 --concurrency=2"
15 15
16 # %N will be replaced with the first part of the nodename. 16 # %N will be replaced with the first part of the nodename.
17 CELERYD_LOG_FILE="/var/log/celery/%N.log" 17 CELERYD_LOG_FILE="/var/log/celery/%N.log"
vagrant/misc/etc/init.d/celerybeat 0 → 100755
@@ -0,0 +1,318 @@ @@ -0,0 +1,318 @@
  1 +#!/bin/sh -e
  2 +# =========================================================
  3 +# celerybeat - Starts the Celery periodic task scheduler.
  4 +# =========================================================
  5 +#
  6 +# :Usage: /etc/init.d/celerybeat {start|stop|force-reload|restart|try-restart|status}
  7 +# :Configuration file: /etc/default/celerybeat or /etc/default/celeryd
  8 +#
  9 +# See http://docs.celeryproject.org/en/latest/tutorials/daemonizing.html#generic-init-scripts
  10 +
  11 +### BEGIN INIT INFO
  12 +# Provides: celerybeat
  13 +# Required-Start: $network $local_fs $remote_fs
  14 +# Required-Stop: $network $local_fs $remote_fs
  15 +# Default-Start: 2 3 4 5
  16 +# Default-Stop: 0 1 6
  17 +# Short-Description: celery periodic task scheduler
  18 +### END INIT INFO
  19 +
  20 +# Cannot use set -e/bash -e since the kill -0 command will abort
  21 +# abnormally in the absence of a valid process ID.
  22 +#set -e
  23 +VERSION=10.1
  24 +echo "celery init v${VERSION}."
  25 +
  26 +if [ $(id -u) -ne 0 ]; then
  27 + echo "Error: This program can only be used by the root user."
  28 + echo " Unpriviliged users must use 'celery beat --detach'"
  29 + exit 1
  30 +fi
  31 +
  32 +
  33 +# May be a runlevel symlink (e.g. S02celeryd)
  34 +if [ -L "$0" ]; then
  35 + SCRIPT_FILE=$(readlink "$0")
  36 +else
  37 + SCRIPT_FILE="$0"
  38 +fi
  39 +SCRIPT_NAME="$(basename "$SCRIPT_FILE")"
  40 +
  41 +# /etc/init.d/celerybeat: start and stop the celery periodic task scheduler daemon.
  42 +
  43 +# Make sure executable configuration script is owned by root
  44 +_config_sanity() {
  45 + local path="$1"
  46 + local owner=$(ls -ld "$path" | awk '{print $3}')
  47 + local iwgrp=$(ls -ld "$path" | cut -b 6)
  48 + local iwoth=$(ls -ld "$path" | cut -b 9)
  49 +
  50 + if [ "$(id -u $owner)" != "0" ]; then
  51 + echo "Error: Config script '$path' must be owned by root!"
  52 + echo
  53 + echo "Resolution:"
  54 + echo "Review the file carefully and make sure it has not been "
  55 + echo "modified with mailicious intent. When sure the "
  56 + echo "script is safe to execute with superuser privileges "
  57 + echo "you can change ownership of the script:"
  58 + echo " $ sudo chown root '$path'"
  59 + exit 1
  60 + fi
  61 +
  62 + if [ "$iwoth" != "-" ]; then # S_IWOTH
  63 + echo "Error: Config script '$path' cannot be writable by others!"
  64 + echo
  65 + echo "Resolution:"
  66 + echo "Review the file carefully and make sure it has not been "
  67 + echo "modified with malicious intent. When sure the "
  68 + echo "script is safe to execute with superuser privileges "
  69 + echo "you can change the scripts permissions:"
  70 + echo " $ sudo chmod 640 '$path'"
  71 + exit 1
  72 + fi
  73 + if [ "$iwgrp" != "-" ]; then # S_IWGRP
  74 + echo "Error: Config script '$path' cannot be writable by group!"
  75 + echo
  76 + echo "Resolution:"
  77 + echo "Review the file carefully and make sure it has not been "
  78 + echo "modified with malicious intent. When sure the "
  79 + echo "script is safe to execute with superuser privileges "
  80 + echo "you can change the scripts permissions:"
  81 + echo " $ sudo chmod 640 '$path'"
  82 + exit 1
  83 + fi
  84 +}
  85 +
  86 +scripts=""
  87 +
  88 +if test -f /etc/default/celeryd; then
  89 + scripts="/etc/default/celeryd"
  90 + _config_sanity /etc/default/celeryd
  91 + . /etc/default/celeryd
  92 +fi
  93 +
  94 +EXTRA_CONFIG="/etc/default/${SCRIPT_NAME}"
  95 +if test -f "$EXTRA_CONFIG"; then
  96 + scripts="$scripts, $EXTRA_CONFIG"
  97 + _config_sanity "$EXTRA_CONFIG"
  98 + . "$EXTRA_CONFIG"
  99 +fi
  100 +
  101 +echo "Using configuration: $scripts"
  102 +
  103 +CELERY_BIN=${CELERY_BIN:-"celery"}
  104 +DEFAULT_USER="celery"
  105 +DEFAULT_PID_FILE="/var/run/celery/beat.pid"
  106 +DEFAULT_LOG_FILE="/var/log/celery/beat.log"
  107 +DEFAULT_LOG_LEVEL="INFO"
  108 +DEFAULT_CELERYBEAT="$CELERY_BIN beat"
  109 +
  110 +CELERYBEAT=${CELERYBEAT:-$DEFAULT_CELERYBEAT}
  111 +CELERYBEAT_LOG_LEVEL=${CELERYBEAT_LOG_LEVEL:-${CELERYBEAT_LOGLEVEL:-$DEFAULT_LOG_LEVEL}}
  112 +
  113 +# Sets --app argument for CELERY_BIN
  114 +CELERY_APP_ARG=""
  115 +if [ ! -z "$CELERY_APP" ]; then
  116 + CELERY_APP_ARG="--app=$CELERY_APP"
  117 +fi
  118 +
  119 +CELERYBEAT_USER=${CELERYBEAT_USER:-${CELERYD_USER:-$DEFAULT_USER}}
  120 +
  121 +# Set CELERY_CREATE_DIRS to always create log/pid dirs.
  122 +CELERY_CREATE_DIRS=${CELERY_CREATE_DIRS:-0}
  123 +CELERY_CREATE_RUNDIR=$CELERY_CREATE_DIRS
  124 +CELERY_CREATE_LOGDIR=$CELERY_CREATE_DIRS
  125 +if [ -z "$CELERYBEAT_PID_FILE" ]; then
  126 + CELERYBEAT_PID_FILE="$DEFAULT_PID_FILE"
  127 + CELERY_CREATE_RUNDIR=1
  128 +fi
  129 +if [ -z "$CELERYBEAT_LOG_FILE" ]; then
  130 + CELERYBEAT_LOG_FILE="$DEFAULT_LOG_FILE"
  131 + CELERY_CREATE_LOGDIR=1
  132 +fi
  133 +
  134 +export CELERY_LOADER
  135 +
  136 +CELERYBEAT_OPTS="$CELERYBEAT_OPTS -f $CELERYBEAT_LOG_FILE -l $CELERYBEAT_LOG_LEVEL"
  137 +
  138 +if [ -n "$2" ]; then
  139 + CELERYBEAT_OPTS="$CELERYBEAT_OPTS $2"
  140 +fi
  141 +
  142 +CELERYBEAT_LOG_DIR=`dirname $CELERYBEAT_LOG_FILE`
  143 +CELERYBEAT_PID_DIR=`dirname $CELERYBEAT_PID_FILE`
  144 +
  145 +# Extra start-stop-daemon options, like user/group.
  146 +
  147 +CELERYBEAT_CHDIR=${CELERYBEAT_CHDIR:-$CELERYD_CHDIR}
  148 +if [ -n "$CELERYBEAT_CHDIR" ]; then
  149 + DAEMON_OPTS="$DAEMON_OPTS --workdir=$CELERYBEAT_CHDIR"
  150 +fi
  151 +
  152 +
  153 +export PATH="${PATH:+$PATH:}/usr/sbin:/sbin"
  154 +
  155 +check_dev_null() {
  156 + if [ ! -c /dev/null ]; then
  157 + echo "/dev/null is not a character device!"
  158 + exit 75 # EX_TEMPFAIL
  159 + fi
  160 +}
  161 +
  162 +maybe_die() {
  163 + if [ $? -ne 0 ]; then
  164 + echo "Exiting: $*"
  165 + exit 77 # EX_NOPERM
  166 + fi
  167 +}
  168 +
  169 +create_default_dir() {
  170 + if [ ! -d "$1" ]; then
  171 + echo "- Creating default directory: '$1'"
  172 + mkdir -p "$1"
  173 + maybe_die "Couldn't create directory $1"
  174 + echo "- Changing permissions of '$1' to 02755"
  175 + chmod 02755 "$1"
  176 + maybe_die "Couldn't change permissions for $1"
  177 + if [ -n "$CELERYBEAT_USER" ]; then
  178 + echo "- Changing owner of '$1' to '$CELERYBEAT_USER'"
  179 + chown "$CELERYBEAT_USER" "$1"
  180 + maybe_die "Couldn't change owner of $1"
  181 + fi
  182 + if [ -n "$CELERYBEAT_GROUP" ]; then
  183 + echo "- Changing group of '$1' to '$CELERYBEAT_GROUP'"
  184 + chgrp "$CELERYBEAT_GROUP" "$1"
  185 + maybe_die "Couldn't change group of $1"
  186 + fi
  187 + fi
  188 +}
  189 +
  190 +check_paths() {
  191 + if [ $CELERY_CREATE_LOGDIR -eq 1 ]; then
  192 + create_default_dir "$CELERYBEAT_LOG_DIR"
  193 + fi
  194 + if [ $CELERY_CREATE_RUNDIR -eq 1 ]; then
  195 + create_default_dir "$CELERYBEAT_PID_DIR"
  196 + fi
  197 +}
  198 +
  199 +
  200 +create_paths () {
  201 + create_default_dir "$CELERYBEAT_LOG_DIR"
  202 + create_default_dir "$CELERYBEAT_PID_DIR"
  203 +}
  204 +
  205 +
  206 +wait_pid () {
  207 + pid=$1
  208 + forever=1
  209 + i=0
  210 + while [ $forever -gt 0 ]; do
  211 + kill -0 $pid 1>/dev/null 2>&1
  212 + if [ $? -eq 1 ]; then
  213 + echo "OK"
  214 + forever=0
  215 + else
  216 + kill -TERM "$pid"
  217 + i=$((i + 1))
  218 + if [ $i -gt 60 ]; then
  219 + echo "ERROR"
  220 + echo "Timed out while stopping (30s)"
  221 + forever=0
  222 + else
  223 + sleep 0.5
  224 + fi
  225 + fi
  226 + done
  227 +}
  228 +
  229 +
  230 +stop_beat () {
  231 + echo -n "Stopping ${SCRIPT_NAME}... "
  232 + if [ -f "$CELERYBEAT_PID_FILE" ]; then
  233 + wait_pid $(cat "$CELERYBEAT_PID_FILE")
  234 + else
  235 + echo "NOT RUNNING"
  236 + fi
  237 +}
  238 +
  239 +_chuid () {
  240 + su "$CELERYBEAT_USER" -c "$CELERYBEAT $*"
  241 +}
  242 +
  243 +start_beat () {
  244 + echo "Starting ${SCRIPT_NAME}..."
  245 + _chuid $CELERY_APP_ARG $CELERYBEAT_OPTS $DAEMON_OPTS --detach \
  246 + --pidfile="$CELERYBEAT_PID_FILE"
  247 +}
  248 +
  249 +
  250 +check_status () {
  251 + local failed=
  252 + local pid_file=$CELERYBEAT_PID_FILE
  253 + if [ ! -e $pid_file ]; then
  254 + echo "${SCRIPT_NAME} is up: no pid file found"
  255 + failed=true
  256 + elif [ ! -r $pid_file ]; then
  257 + echo "${SCRIPT_NAME} is in unknown state, user cannot read pid file."
  258 + failed=true
  259 + else
  260 + local pid=`cat "$pid_file"`
  261 + local cleaned_pid=`echo "$pid" | sed -e 's/[^0-9]//g'`
  262 + if [ -z "$pid" ] || [ "$cleaned_pid" != "$pid" ]; then
  263 + echo "${SCRIPT_NAME}: bad pid file ($pid_file)"
  264 + failed=true
  265 + else
  266 + local failed=
  267 + kill -0 $pid 2> /dev/null || failed=true
  268 + if [ "$failed" ]; then
  269 + echo "${SCRIPT_NAME} (pid $pid) is down, but pid file exists!"
  270 + failed=true
  271 + else
  272 + echo "${SCRIPT_NAME} (pid $pid) is up..."
  273 + fi
  274 + fi
  275 + fi
  276 +
  277 + [ "$failed" ] && exit 1 || exit 0
  278 +}
  279 +
  280 +
  281 +case "$1" in
  282 + start)
  283 + check_dev_null
  284 + check_paths
  285 + start_beat
  286 + ;;
  287 + stop)
  288 + check_paths
  289 + stop_beat
  290 + ;;
  291 + reload|force-reload)
  292 + echo "Use start+stop"
  293 + ;;
  294 + status)
  295 + check_status
  296 + ;;
  297 + restart)
  298 + echo "Restarting celery periodic task scheduler"
  299 + check_paths
  300 + stop_beat
  301 + check_dev_null
  302 + start_beat
  303 + ;;
  304 + create-paths)
  305 + check_dev_null
  306 + create_paths
  307 + ;;
  308 + check-paths)
  309 + check_dev_null
  310 + check_paths
  311 + ;;
  312 + *)
  313 + echo "Usage: /etc/init.d/${SCRIPT_NAME} {start|stop|restart|create-paths|status}"
  314 + exit 64 # EX_USAGE
  315 + ;;
  316 +esac
  317 +
  318 +exit 0
vagrant/provision.sh
@@ -36,15 +36,19 @@ sudo mkdir -p /etc/colab @@ -36,15 +36,19 @@ sudo mkdir -p /etc/colab
36 sudo chown vagrant:vagrant /etc/colab 36 sudo chown vagrant:vagrant /etc/colab
37 37
38 if [ ! -s /etc/colab/settings.py ]; then 38 if [ ! -s /etc/colab/settings.py ]; then
39 - colab-init-config > /etc/colab/settings.py 39 + colab-admin initconfig > /etc/colab/settings.py
40 fi 40 fi
41 41
42 colab-admin migrate 42 colab-admin migrate
43 colab-admin loaddata /vagrant/tests/test_data.json 43 colab-admin loaddata /vagrant/tests/test_data.json
44 44
45 # Init.d Celery files 45 # Init.d Celery files
46 -sudo cp $basedir/vagrant/misc/etc/init.d/celeryd /etc/init.d/  
47 -sudo cp $basedir/vagrant/misc/etc/default/celeryd /etc/default/ 46 +sudo cp $basedir/vagrant/misc/etc/init.d/celery* /etc/init.d/
  47 +sudo cp $basedir/vagrant/misc/etc/default/celery* /etc/default/
  48 +sudo service celeryd stop || echo
  49 +sudo service celerybeat stop || echo
  50 +sleep 2
48 sudo service celeryd start 51 sudo service celeryd start
  52 +sudo service celerybeat start
49 53
50 colab-admin rebuild_index --noinput 54 colab-admin rebuild_index --noinput
vagrant/ubuntu.sh
@@ -5,4 +5,4 @@ set -ex @@ -5,4 +5,4 @@ set -ex
5 ### Install dependencies 5 ### Install dependencies
6 apt-get update 6 apt-get update
7 7
8 -apt-get install curl git unzip build-essential gettext libxml2-dev libxslt1-dev libssl-dev libffi-dev python-dev virtualenvwrapper python-pip rabbitmq-server -y 8 +apt-get install curl git unzip build-essential gettext libxml2-dev libxslt1-dev libssl-dev libffi-dev python-dev virtualenvwrapper python-pip redis-server -y