Commit 540555adfbe3b316d0fe930185d5d4bdb42de6a1
1 parent
1efa7b87
Exists in
plugin_extra_configs
Refactored plugins to remove reference to proxy
Signed-off-by: Sergio Oliveira <sergio@tracy.com.br>
Showing
26 changed files
with
448 additions
and
456 deletions
Show diff stats
colab/management/commands/initconfig.py
@@ -80,7 +80,7 @@ LOGGING = {{ | @@ -80,7 +80,7 @@ LOGGING = {{ | ||
80 | # from colab.plugins.utils.menu import colab_url_factory | 80 | # from colab.plugins.utils.menu import colab_url_factory |
81 | # | 81 | # |
82 | # name = 'colab.plugins.gitlab' | 82 | # name = 'colab.plugins.gitlab' |
83 | -# verbose_name = 'Gitlab Proxy' | 83 | +# verbose_name = 'Gitlab Plugin' |
84 | # | 84 | # |
85 | # upstream = 'localhost' | 85 | # upstream = 'localhost' |
86 | # #middlewares = [] | 86 | # #middlewares = [] |
colab/plugins/apps.py
@@ -0,0 +1,14 @@ | @@ -0,0 +1,14 @@ | ||
1 | + | ||
2 | +import abc | ||
3 | + | ||
4 | +from django.conf import settings | ||
5 | + | ||
6 | + | ||
7 | +class PluginDataImporter(object): | ||
8 | + | ||
9 | + def __init__(self): | ||
10 | + self.config = settings.COLAB_APPS.get(self.app_label, {}) | ||
11 | + | ||
12 | + @abc.abstractmethod | ||
13 | + def fetch_data(self): | ||
14 | + raise NotImplementedError('fetchData not yet implemented') |
@@ -0,0 +1,46 @@ | @@ -0,0 +1,46 @@ | ||
1 | +#!/usr/bin/env python | ||
2 | + | ||
3 | +import importlib | ||
4 | +import logging | ||
5 | + | ||
6 | +from django.conf import settings | ||
7 | + | ||
8 | +from colab.celery import app | ||
9 | + | ||
10 | +from . import PluginDataImporter | ||
11 | + | ||
12 | +LOGGER = logging.getLogger('colab.plugins.data') | ||
13 | +TASKS = set() | ||
14 | + | ||
15 | + | ||
16 | +def register_tasks(): | ||
17 | + | ||
18 | + global TASKS | ||
19 | + | ||
20 | + for app_name in settings.INSTALLED_APPS: | ||
21 | + | ||
22 | + module_name = '{}.data_importer'.format(app_name) | ||
23 | + try: | ||
24 | + module = importlib.import_module(module_name) | ||
25 | + except ImportError: | ||
26 | + continue | ||
27 | + | ||
28 | + for item_name in dir(module): | ||
29 | + item = getattr(module, item_name) | ||
30 | + if item is PluginDataImporter: | ||
31 | + continue | ||
32 | + | ||
33 | + if callable(getattr(item, 'fetch_data', None)): | ||
34 | + instance = item() | ||
35 | + task_name = '{}.{}'.format(module.__name__, item_name) | ||
36 | + task = app.task(name=task_name, bind=True)(instance.fetch_data) | ||
37 | + TASKS.add(task) | ||
38 | + LOGGER.debug('Registered task: %s', task_name) | ||
39 | + | ||
40 | + LOGGER.debug(TASKS) | ||
41 | + return TASKS | ||
42 | + | ||
43 | + | ||
44 | +def data_import(self): | ||
45 | + for task in TASKS: | ||
46 | + task.delay() |
colab/plugins/gitlab/__init__.py
colab/plugins/gitlab/apps.py
@@ -4,7 +4,7 @@ from colab.plugins.gitlab.tasks import handling_method | @@ -4,7 +4,7 @@ from colab.plugins.gitlab.tasks import handling_method | ||
4 | from colab.signals.signals import register_signal, connect_signal | 4 | from colab.signals.signals import register_signal, connect_signal |
5 | 5 | ||
6 | 6 | ||
7 | -class ProxyGitlabAppConfig(ColabPluginAppConfig): | 7 | +class GitlabPluginAppConfig(ColabPluginAppConfig): |
8 | name = 'colab.plugins.gitlab' | 8 | name = 'colab.plugins.gitlab' |
9 | verbose_name = 'Gitlab Plugin' | 9 | verbose_name = 'Gitlab Plugin' |
10 | short_name = 'gitlab' | 10 | short_name = 'gitlab' |
colab/plugins/gitlab/data_api.py
@@ -1,215 +0,0 @@ | @@ -1,215 +0,0 @@ | ||
1 | -import json | ||
2 | -import urllib | ||
3 | -import urllib2 | ||
4 | -import logging | ||
5 | - | ||
6 | -from dateutil.parser import parse | ||
7 | - | ||
8 | -from django.db.models.fields import DateTimeField | ||
9 | - | ||
10 | -from colab.plugins.gitlab.models import (GitlabProject, GitlabMergeRequest, | ||
11 | - GitlabComment, GitlabIssue) | ||
12 | -from colab.plugins.utils.proxy_data_api import ProxyDataAPI | ||
13 | - | ||
14 | - | ||
15 | -LOGGER = logging.getLogger('colab.plugin.gitlab') | ||
16 | - | ||
17 | - | ||
18 | -class GitlabDataImporter(ProxyDataAPI): | ||
19 | - app_label = 'gitlab' | ||
20 | - | ||
21 | - def get_request_url(self, path, **kwargs): | ||
22 | - upstream = self.config.get('upstream') | ||
23 | - kwargs['private_token'] = self.config.get('private_token') | ||
24 | - params = urllib.urlencode(kwargs) | ||
25 | - | ||
26 | - if upstream[-1] == '/': | ||
27 | - upstream = upstream[:-1] | ||
28 | - | ||
29 | - return u'{}{}?{}'.format(upstream, path, params) | ||
30 | - | ||
31 | - def get_json_data(self, api_url, page, pages=1000): | ||
32 | - url = self.get_request_url(api_url, per_page=pages, | ||
33 | - page=page) | ||
34 | - | ||
35 | - try: | ||
36 | - data = urllib2.urlopen(url, timeout=10) | ||
37 | - json_data = json.load(data) | ||
38 | - except urllib2.URLError: | ||
39 | - LOGGER.exception("Connection timeout: " + url) | ||
40 | - json_data = [] | ||
41 | - | ||
42 | - return json_data | ||
43 | - | ||
44 | - def fill_object_data(self, element, _object): | ||
45 | - for field in _object._meta.fields: | ||
46 | - try: | ||
47 | - if field.name == "user": | ||
48 | - _object.update_user( | ||
49 | - element["author"]["username"]) | ||
50 | - continue | ||
51 | - if field.name == "project": | ||
52 | - _object.project_id = element["project_id"] | ||
53 | - continue | ||
54 | - | ||
55 | - if isinstance(field, DateTimeField): | ||
56 | - value = parse(element[field.name]) | ||
57 | - else: | ||
58 | - value = element[field.name] | ||
59 | - | ||
60 | - setattr(_object, field.name, value) | ||
61 | - except KeyError: | ||
62 | - continue | ||
63 | - | ||
64 | - return _object | ||
65 | - | ||
66 | - def fetch_projects(self): | ||
67 | - page = 1 | ||
68 | - projects = [] | ||
69 | - | ||
70 | - while True: | ||
71 | - json_data = self.get_json_data('/api/v3/projects/all', page) | ||
72 | - page = page + 1 | ||
73 | - | ||
74 | - if not len(json_data): | ||
75 | - break | ||
76 | - | ||
77 | - for element in json_data: | ||
78 | - project = GitlabProject() | ||
79 | - self.fill_object_data(element, project) | ||
80 | - projects.append(project) | ||
81 | - | ||
82 | - return projects | ||
83 | - | ||
84 | - def fetch_merge_request(self, projects): | ||
85 | - all_merge_request = [] | ||
86 | - | ||
87 | - for project in projects: | ||
88 | - page = 1 | ||
89 | - while True: | ||
90 | - url = '/api/v3/projects/{}/merge_requests'.format(project.id) | ||
91 | - json_data_mr = self.get_json_data(url, page) | ||
92 | - page = page + 1 | ||
93 | - | ||
94 | - if len(json_data_mr) == 0: | ||
95 | - break | ||
96 | - | ||
97 | - for element in json_data_mr: | ||
98 | - single_merge_request = GitlabMergeRequest() | ||
99 | - self.fill_object_data(element, single_merge_request) | ||
100 | - all_merge_request.append(single_merge_request) | ||
101 | - | ||
102 | - return all_merge_request | ||
103 | - | ||
104 | - def fetch_issue(self, projects): | ||
105 | - all_issues = [] | ||
106 | - | ||
107 | - for project in projects: | ||
108 | - page = 1 | ||
109 | - while True: | ||
110 | - url = '/api/v3/projects/{}/issues'.format(project.id) | ||
111 | - json_data_issue = self.get_json_data(url, page) | ||
112 | - page = page + 1 | ||
113 | - | ||
114 | - if len(json_data_issue) == 0: | ||
115 | - break | ||
116 | - | ||
117 | - for element in json_data_issue: | ||
118 | - single_issue = GitlabIssue() | ||
119 | - self.fill_object_data(element, single_issue) | ||
120 | - all_issues.append(single_issue) | ||
121 | - | ||
122 | - return all_issues | ||
123 | - | ||
124 | - def fetch_comments(self): | ||
125 | - all_comments = [] | ||
126 | - all_comments.extend(self.fetch_comments_MR()) | ||
127 | - all_comments.extend(self.fetch_comments_issues()) | ||
128 | - | ||
129 | - return all_comments | ||
130 | - | ||
131 | - def fetch_comments_MR(self): | ||
132 | - all_comments = [] | ||
133 | - all_merge_requests = GitlabMergeRequest.objects.all() | ||
134 | - | ||
135 | - for merge_request in all_merge_requests: | ||
136 | - page = 1 | ||
137 | - while True: | ||
138 | - url = '/api/v3/projects/{}/merge_requests/{}/notes'.format( | ||
139 | - merge_request.project_id, merge_request.id) | ||
140 | - json_data_mr = self.get_json_data(url, page) | ||
141 | - page = page + 1 | ||
142 | - | ||
143 | - if len(json_data_mr) == 0: | ||
144 | - break | ||
145 | - | ||
146 | - for element in json_data_mr: | ||
147 | - single_comment = GitlabComment() | ||
148 | - self.fill_object_data(element, single_comment) | ||
149 | - single_comment.project = merge_request.project | ||
150 | - single_comment.issue_comment = False | ||
151 | - single_comment.parent_id = merge_request.id | ||
152 | - all_comments.append(single_comment) | ||
153 | - | ||
154 | - return all_comments | ||
155 | - | ||
156 | - def fetch_comments_issues(self): | ||
157 | - all_comments = [] | ||
158 | - all_issues = GitlabIssue.objects.all() | ||
159 | - | ||
160 | - for issue in all_issues: | ||
161 | - page = 1 | ||
162 | - while True: | ||
163 | - url = '/api/v3/projects/{}/issues/{}/notes'.format( | ||
164 | - issue.project_id, issue.id) | ||
165 | - json_data_mr = self.get_json_data(url, page) | ||
166 | - page = page + 1 | ||
167 | - | ||
168 | - if len(json_data_mr) == 0: | ||
169 | - break | ||
170 | - | ||
171 | - for element in json_data_mr: | ||
172 | - single_comment = GitlabComment() | ||
173 | - self.fill_object_data(element, single_comment) | ||
174 | - single_comment.project = issue.project | ||
175 | - single_comment.issue_comment = True | ||
176 | - single_comment.parent_id = issue.id | ||
177 | - all_comments.append(single_comment) | ||
178 | - | ||
179 | - return all_comments | ||
180 | - | ||
181 | - | ||
182 | -class GitlabProjectImporter(GitlabDataImporter): | ||
183 | - | ||
184 | - def fetch_data(self): | ||
185 | - LOGGER.info("Importing Projects") | ||
186 | - projects = self.fetch_projects() | ||
187 | - for datum in projects: | ||
188 | - datum.save() | ||
189 | - | ||
190 | - | ||
191 | -class GitlabMergeRequestImporter(GitlabDataImporter): | ||
192 | - | ||
193 | - def fetch_data(self): | ||
194 | - LOGGER.info("Importing Merge Requests") | ||
195 | - merge_request_list = self.fetch_merge_request(projects) | ||
196 | - for datum in merge_request_list: | ||
197 | - datum.save() | ||
198 | - | ||
199 | - | ||
200 | -class GitlabIssueImporter(GitlabDataImporter): | ||
201 | - | ||
202 | - def fetch_data(self): | ||
203 | - LOGGER.info("Importing Issues") | ||
204 | - issue_list = self.fetch_issue(projects) | ||
205 | - for datum in issue_list: | ||
206 | - datum.save() | ||
207 | - | ||
208 | - | ||
209 | -class GitlabCommentImporter(GitlabDataImporter): | ||
210 | - | ||
211 | - def fetch_data(self): | ||
212 | - LOGGER.info("Importing Comments") | ||
213 | - comments_list = self.fetch_comments() | ||
214 | - for datum in comments_list: | ||
215 | - datum.save() |
@@ -0,0 +1,217 @@ | @@ -0,0 +1,217 @@ | ||
1 | +import json | ||
2 | +import urllib | ||
3 | +import urllib2 | ||
4 | +import logging | ||
5 | + | ||
6 | +from dateutil.parser import parse | ||
7 | + | ||
8 | +from django.db.models.fields import DateTimeField | ||
9 | +from colab.plugins.data import PluginDataImporter | ||
10 | + | ||
11 | +from .models import (GitlabProject, GitlabMergeRequest, | ||
12 | + GitlabComment, GitlabIssue) | ||
13 | + | ||
14 | + | ||
15 | +LOGGER = logging.getLogger('colab.plugin.gitlab') | ||
16 | + | ||
17 | + | ||
18 | +class GitlabDataImporter(PluginDataImporter): | ||
19 | + app_label = 'gitlab' | ||
20 | + | ||
21 | + def get_request_url(self, path, **kwargs): | ||
22 | + upstream = self.config.get('upstream') | ||
23 | + kwargs['private_token'] = self.config.get('private_token') | ||
24 | + params = urllib.urlencode(kwargs) | ||
25 | + | ||
26 | + if upstream[-1] == '/': | ||
27 | + upstream = upstream[:-1] | ||
28 | + | ||
29 | + return u'{}{}?{}'.format(upstream, path, params) | ||
30 | + | ||
31 | + def get_json_data(self, api_url, page, pages=1000): | ||
32 | + url = self.get_request_url(api_url, per_page=pages, | ||
33 | + page=page) | ||
34 | + | ||
35 | + try: | ||
36 | + data = urllib2.urlopen(url, timeout=10) | ||
37 | + json_data = json.load(data) | ||
38 | + except urllib2.URLError: | ||
39 | + LOGGER.exception("Connection timeout: " + url) | ||
40 | + json_data = [] | ||
41 | + | ||
42 | + return json_data | ||
43 | + | ||
44 | + def fill_object_data(self, element, _object): | ||
45 | + for field in _object._meta.fields: | ||
46 | + try: | ||
47 | + if field.name == "user": | ||
48 | + _object.update_user( | ||
49 | + element["author"]["username"]) | ||
50 | + continue | ||
51 | + if field.name == "project": | ||
52 | + _object.project_id = element["project_id"] | ||
53 | + continue | ||
54 | + | ||
55 | + if isinstance(field, DateTimeField): | ||
56 | + value = parse(element[field.name]) | ||
57 | + else: | ||
58 | + value = element[field.name] | ||
59 | + | ||
60 | + setattr(_object, field.name, value) | ||
61 | + except KeyError: | ||
62 | + continue | ||
63 | + | ||
64 | + return _object | ||
65 | + | ||
66 | + def fetch_projects(self): | ||
67 | + page = 1 | ||
68 | + projects = [] | ||
69 | + | ||
70 | + while True: | ||
71 | + json_data = self.get_json_data('/api/v3/projects/all', page) | ||
72 | + page = page + 1 | ||
73 | + | ||
74 | + if not len(json_data): | ||
75 | + break | ||
76 | + | ||
77 | + for element in json_data: | ||
78 | + project = GitlabProject() | ||
79 | + self.fill_object_data(element, project) | ||
80 | + projects.append(project) | ||
81 | + | ||
82 | + return projects | ||
83 | + | ||
84 | + def fetch_merge_request(self, projects): | ||
85 | + all_merge_request = [] | ||
86 | + | ||
87 | + for project in projects: | ||
88 | + page = 1 | ||
89 | + while True: | ||
90 | + url = '/api/v3/projects/{}/merge_requests'.format(project.id) | ||
91 | + json_data_mr = self.get_json_data(url, page) | ||
92 | + page = page + 1 | ||
93 | + | ||
94 | + if len(json_data_mr) == 0: | ||
95 | + break | ||
96 | + | ||
97 | + for element in json_data_mr: | ||
98 | + single_merge_request = GitlabMergeRequest() | ||
99 | + self.fill_object_data(element, single_merge_request) | ||
100 | + all_merge_request.append(single_merge_request) | ||
101 | + | ||
102 | + return all_merge_request | ||
103 | + | ||
104 | + def fetch_issue(self, projects): | ||
105 | + all_issues = [] | ||
106 | + | ||
107 | + for project in projects: | ||
108 | + page = 1 | ||
109 | + while True: | ||
110 | + url = '/api/v3/projects/{}/issues'.format(project.id) | ||
111 | + json_data_issue = self.get_json_data(url, page) | ||
112 | + page = page + 1 | ||
113 | + | ||
114 | + if len(json_data_issue) == 0: | ||
115 | + break | ||
116 | + | ||
117 | + for element in json_data_issue: | ||
118 | + single_issue = GitlabIssue() | ||
119 | + self.fill_object_data(element, single_issue) | ||
120 | + all_issues.append(single_issue) | ||
121 | + | ||
122 | + return all_issues | ||
123 | + | ||
124 | + def fetch_comments(self): | ||
125 | + all_comments = [] | ||
126 | + all_comments.extend(self.fetch_comments_MR()) | ||
127 | + all_comments.extend(self.fetch_comments_issues()) | ||
128 | + | ||
129 | + return all_comments | ||
130 | + | ||
131 | + def fetch_comments_MR(self): | ||
132 | + all_comments = [] | ||
133 | + all_merge_requests = GitlabMergeRequest.objects.all() | ||
134 | + | ||
135 | + for merge_request in all_merge_requests: | ||
136 | + page = 1 | ||
137 | + while True: | ||
138 | + url = '/api/v3/projects/{}/merge_requests/{}/notes'.format( | ||
139 | + merge_request.project_id, merge_request.id) | ||
140 | + json_data_mr = self.get_json_data(url, page) | ||
141 | + page = page + 1 | ||
142 | + | ||
143 | + if len(json_data_mr) == 0: | ||
144 | + break | ||
145 | + | ||
146 | + for element in json_data_mr: | ||
147 | + single_comment = GitlabComment() | ||
148 | + self.fill_object_data(element, single_comment) | ||
149 | + single_comment.project = merge_request.project | ||
150 | + single_comment.issue_comment = False | ||
151 | + single_comment.parent_id = merge_request.id | ||
152 | + all_comments.append(single_comment) | ||
153 | + | ||
154 | + return all_comments | ||
155 | + | ||
156 | + def fetch_comments_issues(self): | ||
157 | + all_comments = [] | ||
158 | + all_issues = GitlabIssue.objects.all() | ||
159 | + | ||
160 | + for issue in all_issues: | ||
161 | + page = 1 | ||
162 | + while True: | ||
163 | + url = '/api/v3/projects/{}/issues/{}/notes'.format( | ||
164 | + issue.project_id, issue.id) | ||
165 | + json_data_mr = self.get_json_data(url, page) | ||
166 | + page = page + 1 | ||
167 | + | ||
168 | + if len(json_data_mr) == 0: | ||
169 | + break | ||
170 | + | ||
171 | + for element in json_data_mr: | ||
172 | + single_comment = GitlabComment() | ||
173 | + self.fill_object_data(element, single_comment) | ||
174 | + single_comment.project = issue.project | ||
175 | + single_comment.issue_comment = True | ||
176 | + single_comment.parent_id = issue.id | ||
177 | + all_comments.append(single_comment) | ||
178 | + | ||
179 | + return all_comments | ||
180 | + | ||
181 | + | ||
182 | +class GitlabProjectImporter(GitlabDataImporter): | ||
183 | + | ||
184 | + def fetch_data(self): | ||
185 | + LOGGER.info("Importing Projects") | ||
186 | + projects = self.fetch_projects() | ||
187 | + for datum in projects: | ||
188 | + datum.save() | ||
189 | + | ||
190 | + | ||
191 | +class GitlabMergeRequestImporter(GitlabDataImporter): | ||
192 | + | ||
193 | + def fetch_data(self): | ||
194 | + LOGGER.info("Importing Merge Requests") | ||
195 | + projects = GitlabProject.objects.all() | ||
196 | + merge_request_list = self.fetch_merge_request(projects) | ||
197 | + for datum in merge_request_list: | ||
198 | + datum.save() | ||
199 | + | ||
200 | + | ||
201 | +class GitlabIssueImporter(GitlabDataImporter): | ||
202 | + | ||
203 | + def fetch_data(self): | ||
204 | + LOGGER.info("Importing Issues") | ||
205 | + projects = GitlabProject.objects.all() | ||
206 | + issue_list = self.fetch_issue(projects) | ||
207 | + for datum in issue_list: | ||
208 | + datum.save() | ||
209 | + | ||
210 | + | ||
211 | +class GitlabCommentImporter(GitlabDataImporter): | ||
212 | + | ||
213 | + def fetch_data(self): | ||
214 | + LOGGER.info("Importing Comments") | ||
215 | + comments_list = self.fetch_comments() | ||
216 | + for datum in comments_list: | ||
217 | + datum.save() |
colab/plugins/gitlab/views.py
colab/plugins/mezuro/__init__.py
colab/plugins/mezuro/apps.py
@@ -2,6 +2,6 @@ | @@ -2,6 +2,6 @@ | ||
2 | from ..utils.apps import ColabPluginAppConfig | 2 | from ..utils.apps import ColabPluginAppConfig |
3 | 3 | ||
4 | 4 | ||
5 | -class ProxyMezuroAppConfig(ColabPluginAppConfig): | 5 | +class MezuroPluginAppConfig(ColabPluginAppConfig): |
6 | name = 'colab.plugins.mezuro' | 6 | name = 'colab.plugins.mezuro' |
7 | - verbose_name = 'Mezuro Proxy' | 7 | + verbose_name = 'Mezuro Plugin' |
colab/plugins/mezuro/views.py
colab/plugins/noosfero/__init__.py
colab/plugins/noosfero/apps.py
@@ -2,6 +2,6 @@ | @@ -2,6 +2,6 @@ | ||
2 | from ..utils.apps import ColabPluginAppConfig | 2 | from ..utils.apps import ColabPluginAppConfig |
3 | 3 | ||
4 | 4 | ||
5 | -class ProxyNoosferoAppConfig(ColabPluginAppConfig): | 5 | +class NoosferoPluginAppConfig(ColabPluginAppConfig): |
6 | name = 'colab.plugins.noosfero' | 6 | name = 'colab.plugins.noosfero' |
7 | - verbose_name = 'Noosfero Proxy' | 7 | + verbose_name = 'Noosfero Plugin' |
colab/plugins/noosfero/data_api.py
@@ -1,109 +0,0 @@ | @@ -1,109 +0,0 @@ | ||
1 | -import json | ||
2 | -import urllib | ||
3 | -import urllib2 | ||
4 | -import logging | ||
5 | - | ||
6 | -from dateutil.parser import parse | ||
7 | - | ||
8 | -from django.conf import settings | ||
9 | -from django.db.models.fields import DateTimeField | ||
10 | - | ||
11 | -from colab.plugins.noosfero.models import (NoosferoArticle, NoosferoCommunity, | ||
12 | - NoosferoCategory) | ||
13 | -from colab.plugins.utils.proxy_data_api import ProxyDataAPI | ||
14 | - | ||
15 | -LOGGER = logging.getLogger('colab.plugin.debug') | ||
16 | - | ||
17 | - | ||
18 | -class NoosferoDataAPI(ProxyDataAPI): | ||
19 | - | ||
20 | - def get_request_url(self, path, **kwargs): | ||
21 | - proxy_config = settings.COLAB_APPS.get(self.app_label, {}) | ||
22 | - | ||
23 | - upstream = proxy_config.get('upstream') | ||
24 | - kwargs['private_token'] = proxy_config.get('private_token') | ||
25 | - params = urllib.urlencode(kwargs) | ||
26 | - | ||
27 | - if upstream[-1] == '/': | ||
28 | - upstream = upstream[:-1] | ||
29 | - | ||
30 | - return u'{}{}?{}'.format(upstream, path, params) | ||
31 | - | ||
32 | - def get_json_data(self, api_url, page, pages=1000): | ||
33 | - url = self.get_request_url(api_url, per_page=pages, | ||
34 | - page=page) | ||
35 | - try: | ||
36 | - data = urllib2.urlopen(url, timeout=10) | ||
37 | - json_data = json.load(data) | ||
38 | - except urllib2.URLError: | ||
39 | - LOGGER.exception("Connection timeout: " + url) | ||
40 | - json_data = [] | ||
41 | - | ||
42 | - return json_data | ||
43 | - | ||
44 | - def fill_object_data(self, element, _object): | ||
45 | - for field in _object._meta.fields: | ||
46 | - try: | ||
47 | - if field.name == "user": | ||
48 | - _object.update_user( | ||
49 | - element["author"]["name"]) | ||
50 | - continue | ||
51 | - | ||
52 | - if field.name == "profile_identifier": | ||
53 | - _object.profile_identifier = \ | ||
54 | - element["profile"]["identifier"] | ||
55 | - continue | ||
56 | - | ||
57 | - if isinstance(field, DateTimeField): | ||
58 | - value = parse(element[field.name]) | ||
59 | - else: | ||
60 | - value = element[field.name] | ||
61 | - | ||
62 | - setattr(_object, field.name, value) | ||
63 | - except KeyError: | ||
64 | - continue | ||
65 | - except TypeError: | ||
66 | - continue | ||
67 | - | ||
68 | - return _object | ||
69 | - | ||
70 | - def fetch_communities(self): | ||
71 | - json_data = self.get_json_data('/api/v1/communities', 1) | ||
72 | - | ||
73 | - json_data = json_data['communities'] | ||
74 | - for element in json_data: | ||
75 | - community = NoosferoCommunity() | ||
76 | - self.fill_object_data(element, community) | ||
77 | - community.save() | ||
78 | - | ||
79 | - if 'categories' in element: | ||
80 | - for category_json in element["categories"]: | ||
81 | - category = NoosferoCategory.objects.get_or_create( | ||
82 | - id=category_json["id"], name=category_json["name"])[0] | ||
83 | - community.categories.add(category.id) | ||
84 | - | ||
85 | - def fetch_articles(self): | ||
86 | - json_data = self.get_json_data('/api/v1/articles', 1) | ||
87 | - | ||
88 | - json_data = json_data['articles'] | ||
89 | - | ||
90 | - for element in json_data: | ||
91 | - article = NoosferoArticle() | ||
92 | - self.fill_object_data(element, article) | ||
93 | - article.save() | ||
94 | - | ||
95 | - for category_json in element["categories"]: | ||
96 | - category = NoosferoCategory.objects.get_or_create( | ||
97 | - id=category_json["id"], name=category_json["name"])[0] | ||
98 | - article.categories.add(category.id) | ||
99 | - | ||
100 | - def fetch_data(self): | ||
101 | - LOGGER.info("Importing Communities") | ||
102 | - self.fetch_communities() | ||
103 | - | ||
104 | - LOGGER.info("Importing Articles") | ||
105 | - self.fetch_articles() | ||
106 | - | ||
107 | - @property | ||
108 | - def app_label(self): | ||
109 | - return 'noosfero' |
@@ -0,0 +1,106 @@ | @@ -0,0 +1,106 @@ | ||
1 | +import json | ||
2 | +import urllib | ||
3 | +import urllib2 | ||
4 | +import logging | ||
5 | + | ||
6 | +from dateutil.parser import parse | ||
7 | + | ||
8 | +from django.db.models.fields import DateTimeField | ||
9 | + | ||
10 | +from colab.plugins.data import PluginDataImporter | ||
11 | + | ||
12 | +from .models import NoosferoArticle, NoosferoCommunity, NoosferoCategory | ||
13 | + | ||
14 | +LOGGER = logging.getLogger('colab.plugin.debug') | ||
15 | + | ||
16 | + | ||
17 | +class NoosferoDataImporter(PluginDataImporter): | ||
18 | + | ||
19 | + def get_request_url(self, path, **kwargs): | ||
20 | + upstream = self.config.get('upstream') | ||
21 | + kwargs['private_token'] = self.config.get('private_token') | ||
22 | + params = urllib.urlencode(kwargs) | ||
23 | + | ||
24 | + if upstream[-1] == '/': | ||
25 | + upstream = upstream[:-1] | ||
26 | + | ||
27 | + return u'{}{}?{}'.format(upstream, path, params) | ||
28 | + | ||
29 | + def get_json_data(self, api_url, page, pages=1000): | ||
30 | + url = self.get_request_url(api_url, per_page=pages, | ||
31 | + page=page) | ||
32 | + try: | ||
33 | + data = urllib2.urlopen(url, timeout=10) | ||
34 | + json_data = json.load(data) | ||
35 | + except urllib2.URLError: | ||
36 | + LOGGER.exception("Connection timeout: " + url) | ||
37 | + json_data = [] | ||
38 | + | ||
39 | + return json_data | ||
40 | + | ||
41 | + def fill_object_data(self, element, _object): | ||
42 | + for field in _object._meta.fields: | ||
43 | + try: | ||
44 | + if field.name == "user": | ||
45 | + _object.update_user( | ||
46 | + element["author"]["name"]) | ||
47 | + continue | ||
48 | + | ||
49 | + if field.name == "profile_identifier": | ||
50 | + _object.profile_identifier = \ | ||
51 | + element["profile"]["identifier"] | ||
52 | + continue | ||
53 | + | ||
54 | + if isinstance(field, DateTimeField): | ||
55 | + value = parse(element[field.name]) | ||
56 | + else: | ||
57 | + value = element[field.name] | ||
58 | + | ||
59 | + setattr(_object, field.name, value) | ||
60 | + except KeyError: | ||
61 | + continue | ||
62 | + except TypeError: | ||
63 | + continue | ||
64 | + | ||
65 | + return _object | ||
66 | + | ||
67 | + def fetch_communities(self): | ||
68 | + json_data = self.get_json_data('/api/v1/communities', 1) | ||
69 | + | ||
70 | + json_data = json_data['communities'] | ||
71 | + for element in json_data: | ||
72 | + community = NoosferoCommunity() | ||
73 | + self.fill_object_data(element, community) | ||
74 | + community.save() | ||
75 | + | ||
76 | + if 'categories' in element: | ||
77 | + for category_json in element["categories"]: | ||
78 | + category = NoosferoCategory.objects.get_or_create( | ||
79 | + id=category_json["id"], name=category_json["name"])[0] | ||
80 | + community.categories.add(category.id) | ||
81 | + | ||
82 | + def fetch_articles(self): | ||
83 | + json_data = self.get_json_data('/api/v1/articles', 1) | ||
84 | + | ||
85 | + json_data = json_data['articles'] | ||
86 | + | ||
87 | + for element in json_data: | ||
88 | + article = NoosferoArticle() | ||
89 | + self.fill_object_data(element, article) | ||
90 | + article.save() | ||
91 | + | ||
92 | + for category_json in element["categories"]: | ||
93 | + category = NoosferoCategory.objects.get_or_create( | ||
94 | + id=category_json["id"], name=category_json["name"])[0] | ||
95 | + article.categories.add(category.id) | ||
96 | + | ||
97 | + def fetch_data(self): | ||
98 | + LOGGER.info("Importing Communities") | ||
99 | + self.fetch_communities() | ||
100 | + | ||
101 | + LOGGER.info("Importing Articles") | ||
102 | + self.fetch_articles() | ||
103 | + | ||
104 | + @property | ||
105 | + def app_label(self): | ||
106 | + return 'noosfero' |
colab/plugins/noosfero/views.py
colab/plugins/templatetags/plugins.py
@@ -11,10 +11,11 @@ register = template.Library() | @@ -11,10 +11,11 @@ register = template.Library() | ||
11 | @register.simple_tag(takes_context=True) | 11 | @register.simple_tag(takes_context=True) |
12 | def plugins_menu(context): | 12 | def plugins_menu(context): |
13 | 13 | ||
14 | + # TODO: Cache has to take language into account | ||
14 | if context['user'].is_authenticated(): | 15 | if context['user'].is_authenticated(): |
15 | - cache_key = 'colab-proxy-menu-authenticated' | 16 | + cache_key = 'colab-plugin-menu-authenticated' |
16 | else: | 17 | else: |
17 | - cache_key = 'colab-proxy-menu-anonymous' | 18 | + cache_key = 'colab-plugin-menu-anonymous' |
18 | 19 | ||
19 | lang = get_language() | 20 | lang = get_language() |
20 | cache_key += '-{}'.format(lang) | 21 | cache_key += '-{}'.format(lang) |
colab/plugins/utils/data.py
@@ -1,41 +0,0 @@ | @@ -1,41 +0,0 @@ | ||
1 | -#!/usr/bin/env python | ||
2 | - | ||
3 | -import importlib | ||
4 | - | ||
5 | -from django.conf import settings | ||
6 | - | ||
7 | -from colab.celery import app | ||
8 | -from proxy_data_api import ProxyDataAPI | ||
9 | - | ||
10 | - | ||
11 | -TASKS = set() | ||
12 | - | ||
13 | - | ||
14 | -def register_tasks(): | ||
15 | - | ||
16 | - global TASKS | ||
17 | - | ||
18 | - for app_name in settings.INSTALLED_APPS: | ||
19 | - | ||
20 | - try: | ||
21 | - module = importlib.import_module('{}.data_api'.format(app_name)) | ||
22 | - except ImportError: | ||
23 | - continue | ||
24 | - | ||
25 | - for item_name in dir(module): | ||
26 | - item = getattr(module, item_name) | ||
27 | - if item is ProxyDataAPI: | ||
28 | - continue | ||
29 | - | ||
30 | - if callable(getattr(item, 'fetch_data', None)): | ||
31 | - instance = item() | ||
32 | - task_name = '{}.{}'.format(module.__name__, item_name) | ||
33 | - task = app.task(name=task_name, bind=True)(instance.fetch_data) | ||
34 | - TASKS.add(task) | ||
35 | - | ||
36 | - return TASKS | ||
37 | - | ||
38 | - | ||
39 | -def data_import(self): | ||
40 | - for task in TASKS: | ||
41 | - task.delay() |
colab/plugins/utils/proxy_data_api.py
@@ -1,14 +0,0 @@ | @@ -1,14 +0,0 @@ | ||
1 | - | ||
2 | -import abc | ||
3 | - | ||
4 | -from django.conf import settings | ||
5 | - | ||
6 | - | ||
7 | -class ProxyDataAPI(object): | ||
8 | - | ||
9 | - def __init__(self): | ||
10 | - self.config = settings.COLAB_APPS.get(self.app_label, {}) | ||
11 | - | ||
12 | - @abc.abstractmethod | ||
13 | - def fetch_data(self): | ||
14 | - raise NotImplementedError('fetchData not yet implemented') |
colab/plugins/utils/views.py
@@ -1,37 +0,0 @@ | @@ -1,37 +0,0 @@ | ||
1 | - | ||
2 | -import json | ||
3 | - | ||
4 | -from django.conf import settings | ||
5 | - | ||
6 | -from revproxy.views import DiazoProxyView | ||
7 | - | ||
8 | - | ||
9 | -class ColabProxyView(DiazoProxyView): | ||
10 | - add_remote_user = settings.REVPROXY_ADD_REMOTE_USER | ||
11 | - diazo_theme_template = 'base.html' | ||
12 | - html5 = True | ||
13 | - | ||
14 | - @property | ||
15 | - def upstream(self): | ||
16 | - proxy_config = settings.COLAB_APPS.get(self.app_label, {}) | ||
17 | - return proxy_config.get('upstream') | ||
18 | - | ||
19 | - @property | ||
20 | - def app_label(self): | ||
21 | - raise NotImplementedError('app_label attribute must be set') | ||
22 | - | ||
23 | - def dispatch(self, request, *args, **kwargs): | ||
24 | - | ||
25 | - if request.user.is_authenticated(): | ||
26 | - | ||
27 | - remote_user_data = {} | ||
28 | - | ||
29 | - remote_user_data['email'] = request.user.email | ||
30 | - remote_user_data['name'] = request.user.get_full_name() | ||
31 | - | ||
32 | - request.META['HTTP_REMOTE_USER_DATA'] = json.dumps( | ||
33 | - remote_user_data, | ||
34 | - sort_keys=True, | ||
35 | - ) | ||
36 | - | ||
37 | - return super(ColabProxyView, self).dispatch(request, *args, **kwargs) |
@@ -0,0 +1,39 @@ | @@ -0,0 +1,39 @@ | ||
1 | + | ||
2 | +import json | ||
3 | + | ||
4 | +from django.conf import settings | ||
5 | + | ||
6 | +from revproxy.views import DiazoProxyView | ||
7 | + | ||
8 | +from .conf import get_plugin_config | ||
9 | + | ||
10 | + | ||
11 | +class ColabProxyView(DiazoProxyView): | ||
12 | + add_remote_user = settings.REVPROXY_ADD_REMOTE_USER | ||
13 | + diazo_theme_template = 'base.html' | ||
14 | + html5 = True | ||
15 | + | ||
16 | + @property | ||
17 | + def upstream(self): | ||
18 | + config = get_plugin_config(self.app_label) | ||
19 | + return config.get('upstream') | ||
20 | + | ||
21 | + @property | ||
22 | + def app_label(self): | ||
23 | + raise NotImplementedError('app_label attribute must be set') | ||
24 | + | ||
25 | + def dispatch(self, request, *args, **kwargs): | ||
26 | + | ||
27 | + if request.user.is_authenticated(): | ||
28 | + | ||
29 | + remote_user_data = {} | ||
30 | + | ||
31 | + remote_user_data['email'] = request.user.email | ||
32 | + remote_user_data['name'] = request.user.get_full_name() | ||
33 | + | ||
34 | + request.META['HTTP_REMOTE_USER_DATA'] = json.dumps( | ||
35 | + remote_user_data, | ||
36 | + sort_keys=True, | ||
37 | + ) | ||
38 | + | ||
39 | + return super(ColabProxyView, self).dispatch(request, *args, **kwargs) |
colab/search/templates/search/includes/search_filters.html
@@ -112,28 +112,6 @@ | @@ -112,28 +112,6 @@ | ||
112 | 112 | ||
113 | <ul class="unstyled-list"> | 113 | <ul class="unstyled-list"> |
114 | 114 | ||
115 | - {% if is_trac %} | ||
116 | - <li> | ||
117 | - <span class="glyphicon glyphicon-book"></span> | ||
118 | - <a href="{% append_to_get type='wiki' %}">{% trans "Wiki" %}</a> | ||
119 | - </li> | ||
120 | - <li> | ||
121 | - <span class="glyphicon glyphicon-tag"></span> | ||
122 | - <a href="{% append_to_get type='ticket' %}">{% trans "Ticket" %}</a> | ||
123 | - </li> | ||
124 | - <li> | ||
125 | - <span class="glyphicon glyphicon-align-right"></span> | ||
126 | - <a href="{% append_to_get type='changeset' %}">{% trans "Changeset" %}</a> | ||
127 | - </li> | ||
128 | - <li> | ||
129 | - <span class="glyphicon glyphicon-user"></span> | ||
130 | - <a href="{% append_to_get type='user' %}">{% trans "User" %}</a> | ||
131 | - </li> | ||
132 | - <li> | ||
133 | - <span class="glyphicon glyphicon-file"></span> | ||
134 | - <a href="{% append_to_get type='attachment' %}">{% trans "Attachment" %}</a> | ||
135 | - </li> | ||
136 | - {% endif %} | ||
137 | <li> | 115 | <li> |
138 | <span class="glyphicon glyphicon-envelope"></span> | 116 | <span class="glyphicon glyphicon-envelope"></span> |
139 | <a href="{% append_to_get type='thread' %}">{% trans "Discussion" %}</a> | 117 | <a href="{% append_to_get type='thread' %}">{% trans "Discussion" %}</a> |
colab/super_archives/templates/superarchives/thread-dashboard.html
@@ -12,9 +12,6 @@ | @@ -12,9 +12,6 @@ | ||
12 | <h3><b>{{ listname|title|lower }} {% if description %} ({{ description }}){% endif %}</b></h3> | 12 | <h3><b>{{ listname|title|lower }} {% if description %} ({{ description }}){% endif %}</b></h3> |
13 | <div class="btn-group btn-group-sm"> | 13 | <div class="btn-group btn-group-sm"> |
14 | <a href="#" class="btn btn-default" disabled="disabled">{% blocktrans %}{{ number_of_users }} members{% endblocktrans %}</a> | 14 | <a href="#" class="btn btn-default" disabled="disabled">{% blocktrans %}{{ number_of_users }} members{% endblocktrans %}</a> |
15 | - {% if proxy.trac %} | ||
16 | - <a href="/wiki/grupos/{{ listname }}" class="btn btn-default">Wiki</a> | ||
17 | - {% endif %} | ||
18 | </div> | 15 | </div> |
19 | <hr/> | 16 | <hr/> |
20 | 17 |