diff --git a/colab/management/commands/initconfig.py b/colab/management/commands/initconfig.py index 714a394..02b2ca8 100644 --- a/colab/management/commands/initconfig.py +++ b/colab/management/commands/initconfig.py @@ -80,7 +80,7 @@ LOGGING = {{ # from colab.plugins.utils.menu import colab_url_factory # # name = 'colab.plugins.gitlab' -# verbose_name = 'Gitlab Proxy' +# verbose_name = 'Gitlab Plugin' # # upstream = 'localhost' # #middlewares = [] diff --git a/colab/plugins/apps.py b/colab/plugins/apps.py index 2c11d6b..8c5c13b 100644 --- a/colab/plugins/apps.py +++ b/colab/plugins/apps.py @@ -1,7 +1,7 @@ from django.apps import AppConfig -from .utils.data import register_tasks +from .data import register_tasks from .utils.signals import connect_signal, register_signal diff --git a/colab/plugins/conf.py b/colab/plugins/conf.py new file mode 100644 index 0000000..7c8f958 --- /dev/null +++ b/colab/plugins/conf.py @@ -0,0 +1,6 @@ + +from django.conf import settings + + +def get_plugin_config(app_label): + return settings.COLAB_APPS.get('gitlab', {}) diff --git a/colab/plugins/data/__init__.py b/colab/plugins/data/__init__.py new file mode 100644 index 0000000..109779e --- /dev/null +++ b/colab/plugins/data/__init__.py @@ -0,0 +1,3 @@ + +from .base_importer import PluginDataImporter # noqa +from .tasks import TASKS, data_import, register_tasks # noqa diff --git a/colab/plugins/data/base_importer.py b/colab/plugins/data/base_importer.py new file mode 100644 index 0000000..443e400 --- /dev/null +++ b/colab/plugins/data/base_importer.py @@ -0,0 +1,14 @@ + +import abc + +from django.conf import settings + + +class PluginDataImporter(object): + + def __init__(self): + self.config = settings.COLAB_APPS.get(self.app_label, {}) + + @abc.abstractmethod + def fetch_data(self): + raise NotImplementedError('fetchData not yet implemented') diff --git a/colab/plugins/data/tasks.py b/colab/plugins/data/tasks.py new file mode 100644 index 0000000..cf2ef8a --- /dev/null +++ b/colab/plugins/data/tasks.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +import importlib +import logging + +from django.conf import settings + +from colab.celery import app + +from . import PluginDataImporter + +LOGGER = logging.getLogger('colab.plugins.data') +TASKS = set() + + +def register_tasks(): + + global TASKS + + for app_name in settings.INSTALLED_APPS: + + module_name = '{}.data_importer'.format(app_name) + try: + module = importlib.import_module(module_name) + except ImportError: + continue + + for item_name in dir(module): + item = getattr(module, item_name) + if item is PluginDataImporter: + continue + + if callable(getattr(item, 'fetch_data', None)): + instance = item() + task_name = '{}.{}'.format(module.__name__, item_name) + task = app.task(name=task_name, bind=True)(instance.fetch_data) + TASKS.add(task) + LOGGER.debug('Registered task: %s', task_name) + + LOGGER.debug(TASKS) + return TASKS + + +def data_import(self): + for task in TASKS: + task.delay() diff --git a/colab/plugins/gitlab/__init__.py b/colab/plugins/gitlab/__init__.py index b6dd530..a3f8019 100644 --- a/colab/plugins/gitlab/__init__.py +++ b/colab/plugins/gitlab/__init__.py @@ -1,3 +1,3 @@ -default_app_config = 'colab.plugins.gitlab.apps.ProxyGitlabAppConfig' +default_app_config = 'colab.plugins.gitlab.apps.GitlabPluginAppConfig' diff --git a/colab/plugins/gitlab/apps.py b/colab/plugins/gitlab/apps.py index 7ef8e90..33bdb8b 100644 --- a/colab/plugins/gitlab/apps.py +++ b/colab/plugins/gitlab/apps.py @@ -4,7 +4,7 @@ from colab.plugins.gitlab.tasks import handling_method from colab.signals.signals import register_signal, connect_signal -class ProxyGitlabAppConfig(ColabPluginAppConfig): +class GitlabPluginAppConfig(ColabPluginAppConfig): name = 'colab.plugins.gitlab' verbose_name = 'Gitlab Plugin' short_name = 'gitlab' diff --git a/colab/plugins/gitlab/data_api.py b/colab/plugins/gitlab/data_api.py deleted file mode 100644 index afc2bc3..0000000 --- a/colab/plugins/gitlab/data_api.py +++ /dev/null @@ -1,215 +0,0 @@ -import json -import urllib -import urllib2 -import logging - -from dateutil.parser import parse - -from django.db.models.fields import DateTimeField - -from colab.plugins.gitlab.models import (GitlabProject, GitlabMergeRequest, - GitlabComment, GitlabIssue) -from colab.plugins.utils.proxy_data_api import ProxyDataAPI - - -LOGGER = logging.getLogger('colab.plugin.gitlab') - - -class GitlabDataImporter(ProxyDataAPI): - app_label = 'gitlab' - - def get_request_url(self, path, **kwargs): - upstream = self.config.get('upstream') - kwargs['private_token'] = self.config.get('private_token') - params = urllib.urlencode(kwargs) - - if upstream[-1] == '/': - upstream = upstream[:-1] - - return u'{}{}?{}'.format(upstream, path, params) - - def get_json_data(self, api_url, page, pages=1000): - url = self.get_request_url(api_url, per_page=pages, - page=page) - - try: - data = urllib2.urlopen(url, timeout=10) - json_data = json.load(data) - except urllib2.URLError: - LOGGER.exception("Connection timeout: " + url) - json_data = [] - - return json_data - - def fill_object_data(self, element, _object): - for field in _object._meta.fields: - try: - if field.name == "user": - _object.update_user( - element["author"]["username"]) - continue - if field.name == "project": - _object.project_id = element["project_id"] - continue - - if isinstance(field, DateTimeField): - value = parse(element[field.name]) - else: - value = element[field.name] - - setattr(_object, field.name, value) - except KeyError: - continue - - return _object - - def fetch_projects(self): - page = 1 - projects = [] - - while True: - json_data = self.get_json_data('/api/v3/projects/all', page) - page = page + 1 - - if not len(json_data): - break - - for element in json_data: - project = GitlabProject() - self.fill_object_data(element, project) - projects.append(project) - - return projects - - def fetch_merge_request(self, projects): - all_merge_request = [] - - for project in projects: - page = 1 - while True: - url = '/api/v3/projects/{}/merge_requests'.format(project.id) - json_data_mr = self.get_json_data(url, page) - page = page + 1 - - if len(json_data_mr) == 0: - break - - for element in json_data_mr: - single_merge_request = GitlabMergeRequest() - self.fill_object_data(element, single_merge_request) - all_merge_request.append(single_merge_request) - - return all_merge_request - - def fetch_issue(self, projects): - all_issues = [] - - for project in projects: - page = 1 - while True: - url = '/api/v3/projects/{}/issues'.format(project.id) - json_data_issue = self.get_json_data(url, page) - page = page + 1 - - if len(json_data_issue) == 0: - break - - for element in json_data_issue: - single_issue = GitlabIssue() - self.fill_object_data(element, single_issue) - all_issues.append(single_issue) - - return all_issues - - def fetch_comments(self): - all_comments = [] - all_comments.extend(self.fetch_comments_MR()) - all_comments.extend(self.fetch_comments_issues()) - - return all_comments - - def fetch_comments_MR(self): - all_comments = [] - all_merge_requests = GitlabMergeRequest.objects.all() - - for merge_request in all_merge_requests: - page = 1 - while True: - url = '/api/v3/projects/{}/merge_requests/{}/notes'.format( - merge_request.project_id, merge_request.id) - json_data_mr = self.get_json_data(url, page) - page = page + 1 - - if len(json_data_mr) == 0: - break - - for element in json_data_mr: - single_comment = GitlabComment() - self.fill_object_data(element, single_comment) - single_comment.project = merge_request.project - single_comment.issue_comment = False - single_comment.parent_id = merge_request.id - all_comments.append(single_comment) - - return all_comments - - def fetch_comments_issues(self): - all_comments = [] - all_issues = GitlabIssue.objects.all() - - for issue in all_issues: - page = 1 - while True: - url = '/api/v3/projects/{}/issues/{}/notes'.format( - issue.project_id, issue.id) - json_data_mr = self.get_json_data(url, page) - page = page + 1 - - if len(json_data_mr) == 0: - break - - for element in json_data_mr: - single_comment = GitlabComment() - self.fill_object_data(element, single_comment) - single_comment.project = issue.project - single_comment.issue_comment = True - single_comment.parent_id = issue.id - all_comments.append(single_comment) - - return all_comments - - -class GitlabProjectImporter(GitlabDataImporter): - - def fetch_data(self): - LOGGER.info("Importing Projects") - projects = self.fetch_projects() - for datum in projects: - datum.save() - - -class GitlabMergeRequestImporter(GitlabDataImporter): - - def fetch_data(self): - LOGGER.info("Importing Merge Requests") - merge_request_list = self.fetch_merge_request(projects) - for datum in merge_request_list: - datum.save() - - -class GitlabIssueImporter(GitlabDataImporter): - - def fetch_data(self): - LOGGER.info("Importing Issues") - issue_list = self.fetch_issue(projects) - for datum in issue_list: - datum.save() - - -class GitlabCommentImporter(GitlabDataImporter): - - def fetch_data(self): - LOGGER.info("Importing Comments") - comments_list = self.fetch_comments() - for datum in comments_list: - datum.save() diff --git a/colab/plugins/gitlab/data_importer.py b/colab/plugins/gitlab/data_importer.py new file mode 100644 index 0000000..3473b8f --- /dev/null +++ b/colab/plugins/gitlab/data_importer.py @@ -0,0 +1,217 @@ +import json +import urllib +import urllib2 +import logging + +from dateutil.parser import parse + +from django.db.models.fields import DateTimeField +from colab.plugins.data import PluginDataImporter + +from .models import (GitlabProject, GitlabMergeRequest, + GitlabComment, GitlabIssue) + + +LOGGER = logging.getLogger('colab.plugin.gitlab') + + +class GitlabDataImporter(PluginDataImporter): + app_label = 'gitlab' + + def get_request_url(self, path, **kwargs): + upstream = self.config.get('upstream') + kwargs['private_token'] = self.config.get('private_token') + params = urllib.urlencode(kwargs) + + if upstream[-1] == '/': + upstream = upstream[:-1] + + return u'{}{}?{}'.format(upstream, path, params) + + def get_json_data(self, api_url, page, pages=1000): + url = self.get_request_url(api_url, per_page=pages, + page=page) + + try: + data = urllib2.urlopen(url, timeout=10) + json_data = json.load(data) + except urllib2.URLError: + LOGGER.exception("Connection timeout: " + url) + json_data = [] + + return json_data + + def fill_object_data(self, element, _object): + for field in _object._meta.fields: + try: + if field.name == "user": + _object.update_user( + element["author"]["username"]) + continue + if field.name == "project": + _object.project_id = element["project_id"] + continue + + if isinstance(field, DateTimeField): + value = parse(element[field.name]) + else: + value = element[field.name] + + setattr(_object, field.name, value) + except KeyError: + continue + + return _object + + def fetch_projects(self): + page = 1 + projects = [] + + while True: + json_data = self.get_json_data('/api/v3/projects/all', page) + page = page + 1 + + if not len(json_data): + break + + for element in json_data: + project = GitlabProject() + self.fill_object_data(element, project) + projects.append(project) + + return projects + + def fetch_merge_request(self, projects): + all_merge_request = [] + + for project in projects: + page = 1 + while True: + url = '/api/v3/projects/{}/merge_requests'.format(project.id) + json_data_mr = self.get_json_data(url, page) + page = page + 1 + + if len(json_data_mr) == 0: + break + + for element in json_data_mr: + single_merge_request = GitlabMergeRequest() + self.fill_object_data(element, single_merge_request) + all_merge_request.append(single_merge_request) + + return all_merge_request + + def fetch_issue(self, projects): + all_issues = [] + + for project in projects: + page = 1 + while True: + url = '/api/v3/projects/{}/issues'.format(project.id) + json_data_issue = self.get_json_data(url, page) + page = page + 1 + + if len(json_data_issue) == 0: + break + + for element in json_data_issue: + single_issue = GitlabIssue() + self.fill_object_data(element, single_issue) + all_issues.append(single_issue) + + return all_issues + + def fetch_comments(self): + all_comments = [] + all_comments.extend(self.fetch_comments_MR()) + all_comments.extend(self.fetch_comments_issues()) + + return all_comments + + def fetch_comments_MR(self): + all_comments = [] + all_merge_requests = GitlabMergeRequest.objects.all() + + for merge_request in all_merge_requests: + page = 1 + while True: + url = '/api/v3/projects/{}/merge_requests/{}/notes'.format( + merge_request.project_id, merge_request.id) + json_data_mr = self.get_json_data(url, page) + page = page + 1 + + if len(json_data_mr) == 0: + break + + for element in json_data_mr: + single_comment = GitlabComment() + self.fill_object_data(element, single_comment) + single_comment.project = merge_request.project + single_comment.issue_comment = False + single_comment.parent_id = merge_request.id + all_comments.append(single_comment) + + return all_comments + + def fetch_comments_issues(self): + all_comments = [] + all_issues = GitlabIssue.objects.all() + + for issue in all_issues: + page = 1 + while True: + url = '/api/v3/projects/{}/issues/{}/notes'.format( + issue.project_id, issue.id) + json_data_mr = self.get_json_data(url, page) + page = page + 1 + + if len(json_data_mr) == 0: + break + + for element in json_data_mr: + single_comment = GitlabComment() + self.fill_object_data(element, single_comment) + single_comment.project = issue.project + single_comment.issue_comment = True + single_comment.parent_id = issue.id + all_comments.append(single_comment) + + return all_comments + + +class GitlabProjectImporter(GitlabDataImporter): + + def fetch_data(self): + LOGGER.info("Importing Projects") + projects = self.fetch_projects() + for datum in projects: + datum.save() + + +class GitlabMergeRequestImporter(GitlabDataImporter): + + def fetch_data(self): + LOGGER.info("Importing Merge Requests") + projects = GitlabProject.objects.all() + merge_request_list = self.fetch_merge_request(projects) + for datum in merge_request_list: + datum.save() + + +class GitlabIssueImporter(GitlabDataImporter): + + def fetch_data(self): + LOGGER.info("Importing Issues") + projects = GitlabProject.objects.all() + issue_list = self.fetch_issue(projects) + for datum in issue_list: + datum.save() + + +class GitlabCommentImporter(GitlabDataImporter): + + def fetch_data(self): + LOGGER.info("Importing Comments") + comments_list = self.fetch_comments() + for datum in comments_list: + datum.save() diff --git a/colab/plugins/gitlab/views.py b/colab/plugins/gitlab/views.py index 4ce0edf..0839a7f 100644 --- a/colab/plugins/gitlab/views.py +++ b/colab/plugins/gitlab/views.py @@ -1,5 +1,5 @@ -from ..utils.views import ColabProxyView +from colab.plugins.views import ColabProxyView class GitlabProxyView(ColabProxyView): diff --git a/colab/plugins/mezuro/__init__.py b/colab/plugins/mezuro/__init__.py index 58fef2c..68d3d02 100644 --- a/colab/plugins/mezuro/__init__.py +++ b/colab/plugins/mezuro/__init__.py @@ -1,3 +1,3 @@ -default_app_config = 'colab.plugins.mezuro.apps.ProxyMezuroAppConfig' +default_app_config = 'colab.plugins.mezuro.apps.MezuroPluginAppConfig' diff --git a/colab/plugins/mezuro/apps.py b/colab/plugins/mezuro/apps.py index 60d3d1d..6a406fd 100644 --- a/colab/plugins/mezuro/apps.py +++ b/colab/plugins/mezuro/apps.py @@ -2,6 +2,6 @@ from ..utils.apps import ColabPluginAppConfig -class ProxyMezuroAppConfig(ColabPluginAppConfig): +class MezuroPluginAppConfig(ColabPluginAppConfig): name = 'colab.plugins.mezuro' - verbose_name = 'Mezuro Proxy' + verbose_name = 'Mezuro Plugin' diff --git a/colab/plugins/mezuro/views.py b/colab/plugins/mezuro/views.py index 355cb98..eb6bcd1 100644 --- a/colab/plugins/mezuro/views.py +++ b/colab/plugins/mezuro/views.py @@ -1,4 +1,5 @@ -from ..utils.views import ColabProxyView + +from colab.plugins.views import ColabProxyView class MezuroProxyView(ColabProxyView): diff --git a/colab/plugins/noosfero/__init__.py b/colab/plugins/noosfero/__init__.py index ace4454..244910d 100644 --- a/colab/plugins/noosfero/__init__.py +++ b/colab/plugins/noosfero/__init__.py @@ -1,3 +1,3 @@ -default_app_config = 'colab.plugins.noosfero.apps.ProxyNoosferoAppConfig' +default_app_config = 'colab.plugins.noosfero.apps.NoosferoPluginAppConfig' diff --git a/colab/plugins/noosfero/apps.py b/colab/plugins/noosfero/apps.py index ca2f6a6..10bd5e6 100644 --- a/colab/plugins/noosfero/apps.py +++ b/colab/plugins/noosfero/apps.py @@ -2,6 +2,6 @@ from ..utils.apps import ColabPluginAppConfig -class ProxyNoosferoAppConfig(ColabPluginAppConfig): +class NoosferoPluginAppConfig(ColabPluginAppConfig): name = 'colab.plugins.noosfero' - verbose_name = 'Noosfero Proxy' + verbose_name = 'Noosfero Plugin' diff --git a/colab/plugins/noosfero/data_api.py b/colab/plugins/noosfero/data_api.py deleted file mode 100644 index 4916b13..0000000 --- a/colab/plugins/noosfero/data_api.py +++ /dev/null @@ -1,109 +0,0 @@ -import json -import urllib -import urllib2 -import logging - -from dateutil.parser import parse - -from django.conf import settings -from django.db.models.fields import DateTimeField - -from colab.plugins.noosfero.models import (NoosferoArticle, NoosferoCommunity, - NoosferoCategory) -from colab.plugins.utils.proxy_data_api import ProxyDataAPI - -LOGGER = logging.getLogger('colab.plugin.debug') - - -class NoosferoDataAPI(ProxyDataAPI): - - def get_request_url(self, path, **kwargs): - proxy_config = settings.COLAB_APPS.get(self.app_label, {}) - - upstream = proxy_config.get('upstream') - kwargs['private_token'] = proxy_config.get('private_token') - params = urllib.urlencode(kwargs) - - if upstream[-1] == '/': - upstream = upstream[:-1] - - return u'{}{}?{}'.format(upstream, path, params) - - def get_json_data(self, api_url, page, pages=1000): - url = self.get_request_url(api_url, per_page=pages, - page=page) - try: - data = urllib2.urlopen(url, timeout=10) - json_data = json.load(data) - except urllib2.URLError: - LOGGER.exception("Connection timeout: " + url) - json_data = [] - - return json_data - - def fill_object_data(self, element, _object): - for field in _object._meta.fields: - try: - if field.name == "user": - _object.update_user( - element["author"]["name"]) - continue - - if field.name == "profile_identifier": - _object.profile_identifier = \ - element["profile"]["identifier"] - continue - - if isinstance(field, DateTimeField): - value = parse(element[field.name]) - else: - value = element[field.name] - - setattr(_object, field.name, value) - except KeyError: - continue - except TypeError: - continue - - return _object - - def fetch_communities(self): - json_data = self.get_json_data('/api/v1/communities', 1) - - json_data = json_data['communities'] - for element in json_data: - community = NoosferoCommunity() - self.fill_object_data(element, community) - community.save() - - if 'categories' in element: - for category_json in element["categories"]: - category = NoosferoCategory.objects.get_or_create( - id=category_json["id"], name=category_json["name"])[0] - community.categories.add(category.id) - - def fetch_articles(self): - json_data = self.get_json_data('/api/v1/articles', 1) - - json_data = json_data['articles'] - - for element in json_data: - article = NoosferoArticle() - self.fill_object_data(element, article) - article.save() - - for category_json in element["categories"]: - category = NoosferoCategory.objects.get_or_create( - id=category_json["id"], name=category_json["name"])[0] - article.categories.add(category.id) - - def fetch_data(self): - LOGGER.info("Importing Communities") - self.fetch_communities() - - LOGGER.info("Importing Articles") - self.fetch_articles() - - @property - def app_label(self): - return 'noosfero' diff --git a/colab/plugins/noosfero/data_importer.py b/colab/plugins/noosfero/data_importer.py new file mode 100644 index 0000000..7e6eb2c --- /dev/null +++ b/colab/plugins/noosfero/data_importer.py @@ -0,0 +1,106 @@ +import json +import urllib +import urllib2 +import logging + +from dateutil.parser import parse + +from django.db.models.fields import DateTimeField + +from colab.plugins.data import PluginDataImporter + +from .models import NoosferoArticle, NoosferoCommunity, NoosferoCategory + +LOGGER = logging.getLogger('colab.plugin.debug') + + +class NoosferoDataImporter(PluginDataImporter): + + def get_request_url(self, path, **kwargs): + upstream = self.config.get('upstream') + kwargs['private_token'] = self.config.get('private_token') + params = urllib.urlencode(kwargs) + + if upstream[-1] == '/': + upstream = upstream[:-1] + + return u'{}{}?{}'.format(upstream, path, params) + + def get_json_data(self, api_url, page, pages=1000): + url = self.get_request_url(api_url, per_page=pages, + page=page) + try: + data = urllib2.urlopen(url, timeout=10) + json_data = json.load(data) + except urllib2.URLError: + LOGGER.exception("Connection timeout: " + url) + json_data = [] + + return json_data + + def fill_object_data(self, element, _object): + for field in _object._meta.fields: + try: + if field.name == "user": + _object.update_user( + element["author"]["name"]) + continue + + if field.name == "profile_identifier": + _object.profile_identifier = \ + element["profile"]["identifier"] + continue + + if isinstance(field, DateTimeField): + value = parse(element[field.name]) + else: + value = element[field.name] + + setattr(_object, field.name, value) + except KeyError: + continue + except TypeError: + continue + + return _object + + def fetch_communities(self): + json_data = self.get_json_data('/api/v1/communities', 1) + + json_data = json_data['communities'] + for element in json_data: + community = NoosferoCommunity() + self.fill_object_data(element, community) + community.save() + + if 'categories' in element: + for category_json in element["categories"]: + category = NoosferoCategory.objects.get_or_create( + id=category_json["id"], name=category_json["name"])[0] + community.categories.add(category.id) + + def fetch_articles(self): + json_data = self.get_json_data('/api/v1/articles', 1) + + json_data = json_data['articles'] + + for element in json_data: + article = NoosferoArticle() + self.fill_object_data(element, article) + article.save() + + for category_json in element["categories"]: + category = NoosferoCategory.objects.get_or_create( + id=category_json["id"], name=category_json["name"])[0] + article.categories.add(category.id) + + def fetch_data(self): + LOGGER.info("Importing Communities") + self.fetch_communities() + + LOGGER.info("Importing Articles") + self.fetch_articles() + + @property + def app_label(self): + return 'noosfero' diff --git a/colab/plugins/noosfero/views.py b/colab/plugins/noosfero/views.py index 2df515c..fc0a789 100644 --- a/colab/plugins/noosfero/views.py +++ b/colab/plugins/noosfero/views.py @@ -1,7 +1,7 @@ from django.conf import settings -from ..utils.views import ColabProxyView +from colab.plugins.views import ColabProxyView class NoosferoProxyView(ColabProxyView): diff --git a/colab/plugins/templatetags/plugins.py b/colab/plugins/templatetags/plugins.py index c10d255..7c94399 100644 --- a/colab/plugins/templatetags/plugins.py +++ b/colab/plugins/templatetags/plugins.py @@ -11,10 +11,11 @@ register = template.Library() @register.simple_tag(takes_context=True) def plugins_menu(context): + # TODO: Cache has to take language into account if context['user'].is_authenticated(): - cache_key = 'colab-proxy-menu-authenticated' + cache_key = 'colab-plugin-menu-authenticated' else: - cache_key = 'colab-proxy-menu-anonymous' + cache_key = 'colab-plugin-menu-anonymous' lang = get_language() cache_key += '-{}'.format(lang) diff --git a/colab/plugins/utils/data.py b/colab/plugins/utils/data.py deleted file mode 100644 index 6692ed5..0000000 --- a/colab/plugins/utils/data.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -import importlib - -from django.conf import settings - -from colab.celery import app -from proxy_data_api import ProxyDataAPI - - -TASKS = set() - - -def register_tasks(): - - global TASKS - - for app_name in settings.INSTALLED_APPS: - - try: - module = importlib.import_module('{}.data_api'.format(app_name)) - except ImportError: - continue - - for item_name in dir(module): - item = getattr(module, item_name) - if item is ProxyDataAPI: - continue - - if callable(getattr(item, 'fetch_data', None)): - instance = item() - task_name = '{}.{}'.format(module.__name__, item_name) - task = app.task(name=task_name, bind=True)(instance.fetch_data) - TASKS.add(task) - - return TASKS - - -def data_import(self): - for task in TASKS: - task.delay() diff --git a/colab/plugins/utils/proxy_data_api.py b/colab/plugins/utils/proxy_data_api.py deleted file mode 100644 index a030a29..0000000 --- a/colab/plugins/utils/proxy_data_api.py +++ /dev/null @@ -1,14 +0,0 @@ - -import abc - -from django.conf import settings - - -class ProxyDataAPI(object): - - def __init__(self): - self.config = settings.COLAB_APPS.get(self.app_label, {}) - - @abc.abstractmethod - def fetch_data(self): - raise NotImplementedError('fetchData not yet implemented') diff --git a/colab/plugins/utils/views.py b/colab/plugins/utils/views.py deleted file mode 100644 index e8e128a..0000000 --- a/colab/plugins/utils/views.py +++ /dev/null @@ -1,37 +0,0 @@ - -import json - -from django.conf import settings - -from revproxy.views import DiazoProxyView - - -class ColabProxyView(DiazoProxyView): - add_remote_user = settings.REVPROXY_ADD_REMOTE_USER - diazo_theme_template = 'base.html' - html5 = True - - @property - def upstream(self): - proxy_config = settings.COLAB_APPS.get(self.app_label, {}) - return proxy_config.get('upstream') - - @property - def app_label(self): - raise NotImplementedError('app_label attribute must be set') - - def dispatch(self, request, *args, **kwargs): - - if request.user.is_authenticated(): - - remote_user_data = {} - - remote_user_data['email'] = request.user.email - remote_user_data['name'] = request.user.get_full_name() - - request.META['HTTP_REMOTE_USER_DATA'] = json.dumps( - remote_user_data, - sort_keys=True, - ) - - return super(ColabProxyView, self).dispatch(request, *args, **kwargs) diff --git a/colab/plugins/views.py b/colab/plugins/views.py new file mode 100644 index 0000000..ac01682 --- /dev/null +++ b/colab/plugins/views.py @@ -0,0 +1,39 @@ + +import json + +from django.conf import settings + +from revproxy.views import DiazoProxyView + +from .conf import get_plugin_config + + +class ColabProxyView(DiazoProxyView): + add_remote_user = settings.REVPROXY_ADD_REMOTE_USER + diazo_theme_template = 'base.html' + html5 = True + + @property + def upstream(self): + config = get_plugin_config(self.app_label) + return config.get('upstream') + + @property + def app_label(self): + raise NotImplementedError('app_label attribute must be set') + + def dispatch(self, request, *args, **kwargs): + + if request.user.is_authenticated(): + + remote_user_data = {} + + remote_user_data['email'] = request.user.email + remote_user_data['name'] = request.user.get_full_name() + + request.META['HTTP_REMOTE_USER_DATA'] = json.dumps( + remote_user_data, + sort_keys=True, + ) + + return super(ColabProxyView, self).dispatch(request, *args, **kwargs) diff --git a/colab/search/templates/search/includes/search_filters.html b/colab/search/templates/search/includes/search_filters.html index d9068d7..250ede2 100644 --- a/colab/search/templates/search/includes/search_filters.html +++ b/colab/search/templates/search/includes/search_filters.html @@ -112,28 +112,6 @@