Commit da3d7ce27c9c7196d7070fd349cf8d45bd6df577
1 parent
e36a5a7b
Exists in
master
Adiciona arquivos iniciais
Showing
9 changed files
with
484 additions
and
1 deletions
Show diff stats
| ... | ... | @@ -0,0 +1,53 @@ |
| 1 | +TASKMGR_DIR = $(CURDIR) | |
| 2 | +TASKMGR_INIT_SCRIPT = wikilibras-taskmgr.init | |
| 3 | +TASKMGR_INIT = ${TASKMGR_DIR}/${TASKMGR_INIT_SCRIPT} | |
| 4 | +TASKMGR_INIT_ETC = /etc/init.d/${TASKMGR_INIT_SCRIPT} | |
| 5 | +TASKMGR_LOG = ${TASKMGR_DIR}/events.log | |
| 6 | +TASKMGR_REQUIREMENTS = ${TASKMGR_DIR}/requirements.txt | |
| 7 | +TASKMGR_ENV = ${TASKMGR_DIR}/env | |
| 8 | +TASKMGR_ENV_BIN = ${TASKMGR_ENV}/bin | |
| 9 | +TASKMGR_ENV_ACTIVATE = ${TASKMGR_ENV_BIN}/activate | |
| 10 | +TASKMGR_ENV_PYTHON = ${TASKMGR_ENV_BIN}/python | |
| 11 | +TASKMGR_ENV_PIP = ${TASKMGR_ENV_BIN}/pip | |
| 12 | +TASKMGR_SETTINGS = ${TASKMGR_DIR}/settings_local.json | |
| 13 | +TASKMGR_SETTINGS_T = ${TASKMGR_SETTINGS}.template | |
| 14 | +TASKMGR_DATABASE = ${TASKMGR_DIR}/database.json | |
| 15 | +TASKMGR_DATABASE_B = ${TASKMGR_PROJECT}.bak | |
| 16 | + | |
| 17 | +install: build enable-startup | |
| 18 | + | |
| 19 | +build: uninstall | |
| 20 | + @rm -f ${TASKMGR_LOG} | |
| 21 | + @( \ | |
| 22 | + cd ${TASKMGR_DIR}; \ | |
| 23 | + virtualenv ${TASKMGR_ENV}; \ | |
| 24 | + . ${TASKMGR_ENV_ACTIVATE}; \ | |
| 25 | + ${TASKMGR_ENV_PIP} install -U pip; \ | |
| 26 | + ${TASKMGR_ENV_PIP} install -r ${TASKMGR_REQUIREMENTS}; \ | |
| 27 | + ) | |
| 28 | + | |
| 29 | +clean: | |
| 30 | + @find ${TASKMGR_DIR} -regextype posix-awk -regex "(.*.log|.*.pyc)" -type f -delete | |
| 31 | + | |
| 32 | +disable-startup: | |
| 33 | + @sudo update-rc.d -f ${TASKMGR_INIT_SCRIPT} remove | |
| 34 | + @sudo rm -f ${TASKMGR_INIT_ETC} | |
| 35 | + | |
| 36 | +enable-startup: | |
| 37 | + @sed "s#<path-to-project>#${TASKMGR_DIR}#" ${TASKMGR_INIT} | sudo tee ${TASKMGR_INIT_ETC} | |
| 38 | + @sudo chmod 755 ${TASKMGR_INIT_ETC} | |
| 39 | + @sudo chown "root:root" ${TASKMGR_INIT_ETC} | |
| 40 | + @sudo update-rc.d -f ${TASKMGR_INIT_SCRIPT} defaults | |
| 41 | + | |
| 42 | +run: | |
| 43 | + @( \ | |
| 44 | + cd ${TASKMGR_DIR}; \ | |
| 45 | + . ${TASKMGR_ENV_ACTIVATE}; \ | |
| 46 | + ${TASKMGR_ENV_PYTHON} ${TASKMGR_DIR}/main.py; \ | |
| 47 | + ) | |
| 48 | + | |
| 49 | +uninstall: clean disable-startup | |
| 50 | + @rm -rf ${TASKMGR_ENV} | |
| 51 | + | |
| 52 | +reset: | |
| 53 | + @if [ -e "${TASKMGR_DATABASE}" ]; then cat ${TASKMGR_DATABASE}; echo ""; sudo rm -f ${TASKMGR_DATABASE} ${TASKMGR_LOG}; fi | ... | ... |
| ... | ... | @@ -0,0 +1,46 @@ |
| 1 | +## Copie o arquivo modelo de configuração | |
| 2 | + | |
| 3 | +```sh | |
| 4 | +$ cp settings_local.json.template settings_local.json | |
| 5 | +``` | |
| 6 | + | |
| 7 | +## Edite o arquivo de configuração | |
| 8 | + | |
| 9 | +```sh | |
| 10 | +$ gedit settings_local.json | |
| 11 | +``` | |
| 12 | + | |
| 13 | +- Substitua o valor da chave "db_host": "localhost" pelo endereço IP da API do banco de dados de sinais. | |
| 14 | + Exemplo: "db_host": "150.250.350.45". | |
| 15 | + | |
| 16 | +- Substitua "<path-to-corretor>" pelo caminho do diretório completo do projeto "corretor_sinais". | |
| 17 | + | |
| 18 | +- Substitua "<path-to-validador>" pelo caminho do diretório completo do projeto "validador_sinais". | |
| 19 | + | |
| 20 | +- Substitua "<path-to-wikilibras>" pelo caminho do diretório completo do projeto "wikilibrasV2". | |
| 21 | + | |
| 22 | +- Substitua my-api-key por sua API key do Pybossa. | |
| 23 | + | |
| 24 | +## Instale dependências | |
| 25 | + | |
| 26 | +```sh | |
| 27 | +$ make install | |
| 28 | +``` | |
| 29 | + | |
| 30 | +## Execute teste (Opcional) | |
| 31 | + | |
| 32 | +```sh | |
| 33 | +$ make run | |
| 34 | +``` | |
| 35 | + | |
| 36 | +## Habilitar a inicialização automática do serviço | |
| 37 | + | |
| 38 | +```sh | |
| 39 | +$ make enable-startup | |
| 40 | +``` | |
| 41 | + | |
| 42 | +## Desabilitar a inicialização automática do serviço | |
| 43 | + | |
| 44 | +```sh | |
| 45 | +$ make disable-startup | |
| 46 | +``` | ... | ... |
inicial.txt
| ... | ... | @@ -1 +0,0 @@ |
| 1 | -Repositorio taskmgr Criado |
| ... | ... | @@ -0,0 +1,32 @@ |
| 1 | +# -*- coding: UTF-8 -*- | |
| 2 | + | |
| 3 | +import os | |
| 4 | +import pyutil | |
| 5 | +import sys | |
| 6 | +import task_manager | |
| 7 | +import time | |
| 8 | + | |
| 9 | +def main(): | |
| 10 | + seconds = 10 | |
| 11 | + json_data = None | |
| 12 | + task = None | |
| 13 | + while (task == None): | |
| 14 | + try: | |
| 15 | + json_data = task_manager.load_json(os.path.join(os.path.dirname(os.path.abspath('__file__')), "settings_local.json")) | |
| 16 | + task = task_manager.task_config(json_data) | |
| 17 | + except: | |
| 18 | + task = None | |
| 19 | + print("Waiting for Network to Get Projects ID's") | |
| 20 | + time.sleep(10) | |
| 21 | + | |
| 22 | + if (json_data != None and task != None): | |
| 23 | + pyutil.log("wikilibras task manager started") | |
| 24 | + while (True): | |
| 25 | + task.run(task.data["corretor"]) | |
| 26 | + task.run(task.data["validador"]) | |
| 27 | + task.run(task.data["wikilibras"]) | |
| 28 | + print("Waiting %d seconds to check new tasks" % (seconds)) | |
| 29 | + time.sleep(seconds) | |
| 30 | + | |
| 31 | +if __name__ == '__main__': | |
| 32 | + main() | ... | ... |
| ... | ... | @@ -0,0 +1,74 @@ |
| 1 | +# -*- coding: UTF-8 -*- | |
| 2 | + | |
| 3 | +import datetime | |
| 4 | +import logging | |
| 5 | +import os | |
| 6 | +import shutil | |
| 7 | +import sys | |
| 8 | + | |
| 9 | +# @def funcao para obter data e hora atual do sistema | |
| 10 | +# @param string formato de data e hora | |
| 11 | +# @return string retorna data e hora do sistema no momento da chamada | |
| 12 | +def getTimeStamp(date_fmt="%Y-%m-%d %H:%M:%S.%f"): | |
| 13 | + if ("%f" in date_fmt): | |
| 14 | + # [:-3] remove 3 casas decimais dos milisegundos (ms) | |
| 15 | + return datetime.datetime.now().strftime(date_fmt)[:-3] | |
| 16 | + else: | |
| 17 | + return datetime.datetime.now().strftime(date_fmt) | |
| 18 | + | |
| 19 | +# @def funcao para gravar log dos eventos em arquivo | |
| 20 | +# @param string mensagem a ser salva | |
| 21 | +# @param int indice do tipo de log 0: apenas print, 1: debug, 2: info, 3: warn, 4: error, 5: critical | |
| 22 | +# @param string caminho completo do arquivo de logs | |
| 23 | +# @param string formato de tempo utilizado | |
| 24 | +# @return none | |
| 25 | +def log(msg="", log_level=2, log_file = os.path.join(os.path.dirname(os.path.abspath('__file__')), "events.log")): | |
| 26 | + dict_level = { | |
| 27 | + 0: ["Print", None, None], | |
| 28 | + 1: ["DEBUG", logging.DEBUG, logging.debug], | |
| 29 | + 2: ["INFO", logging.INFO, logging.info], | |
| 30 | + 3: ["WARNING", logging.WARN, logging.warn], | |
| 31 | + 4: ["ERROR", logging.ERROR, logging.error], | |
| 32 | + 5: ["CRITICAL", logging.CRITICAL, logging.critical] | |
| 33 | + } | |
| 34 | + # log_format = "[%(asctime)s.%(msecs).03d] %(levelname)s: <User: %(name)s> <Module: %(module)s> <Function: %(funcName)s>: %(message)s" | |
| 35 | + log_format = "[%(asctime)s.%(msecs).03d] %(levelname)s: %(message)s" | |
| 36 | + date_fmt = "%Y-%m-%d %H:%M:%S" | |
| 37 | + logging.basicConfig(filename=log_file, datefmt=date_fmt, format=log_format, level=dict_level[log_level][1]) | |
| 38 | + logging.Formatter(fmt="%(asctime)s", datefmt=date_fmt) | |
| 39 | + log_level %= len(dict_level) | |
| 40 | + write_mode = dict_level[log_level][2] | |
| 41 | + print("[%s] %s: %s" % (getTimeStamp(), dict_level[log_level][0], msg)) | |
| 42 | + if (write_mode != None): | |
| 43 | + write_mode(msg) | |
| 44 | + return | |
| 45 | + | |
| 46 | +def file_exists(file_path): | |
| 47 | + if ((os.path.isfile(file_path) == 1) and (os.path.exists(file_path) == 1)): | |
| 48 | + return True | |
| 49 | + else: | |
| 50 | + return False | |
| 51 | + | |
| 52 | +# @def funcao para exibir excecao | |
| 53 | +# @param string deve ser passado: "__file__" para identificar em qual modulo ocorreu a excecao | |
| 54 | +# @return int retorna 1 | |
| 55 | +def print_stack_trace(): | |
| 56 | + error = "\n File name: %s\n Function name: %s\n Line code: %s\n Type exception: %s\n Message: %s" % ( | |
| 57 | + os.path.basename(sys.exc_info()[2].tb_frame.f_code.co_filename), | |
| 58 | + sys.exc_info()[2].tb_frame.f_code.co_name, | |
| 59 | + sys.exc_info()[2].tb_lineno, | |
| 60 | + sys.exc_info()[0].__name__, | |
| 61 | + sys.exc_info()[1] | |
| 62 | + ) | |
| 63 | + log(error, 4) | |
| 64 | + return 1 | |
| 65 | + | |
| 66 | +def get_date_now(): | |
| 67 | + return datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S') | |
| 68 | + | |
| 69 | +def is_int(string): | |
| 70 | + try: | |
| 71 | + int(string) | |
| 72 | + return True | |
| 73 | + except ValueError: | |
| 74 | + return False | ... | ... |
| ... | ... | @@ -0,0 +1,19 @@ |
| 1 | +{ | |
| 2 | + "db_host": "localhost", | |
| 3 | + "db_timeout": 15, | |
| 4 | + "db_port": 200, | |
| 5 | + "pb_api_key": "my-api-key", | |
| 6 | + "pb_endpoint": "http://localhost/pybossa/", | |
| 7 | + "corretor": { | |
| 8 | + "short_name": "corretor_sinais", | |
| 9 | + "video_path": "<path-to-corretor>/view/videos" | |
| 10 | + }, | |
| 11 | + "validador": { | |
| 12 | + "short_name": "validador_sinais", | |
| 13 | + "video_path": "<path-to-validador>/view/videos" | |
| 14 | + }, | |
| 15 | + "wikilibras": { | |
| 16 | + "short_name": "wikilibras", | |
| 17 | + "video_path": "<path-to-wikilibras>/view/videos" | |
| 18 | + } | |
| 19 | +} | ... | ... |
| ... | ... | @@ -0,0 +1,224 @@ |
| 1 | +# -*- coding: UTF-8 -*- | |
| 2 | + | |
| 3 | +import json | |
| 4 | +import os | |
| 5 | +import pbclient | |
| 6 | +import pyutil | |
| 7 | +import requests | |
| 8 | +import shutil | |
| 9 | +import logging | |
| 10 | + | |
| 11 | +def load_json(filename): | |
| 12 | + r = {} | |
| 13 | + try: | |
| 14 | + fp = open(filename, 'r') | |
| 15 | + if (fp): | |
| 16 | + r = json.load(fp) | |
| 17 | + fp.close() | |
| 18 | + except: | |
| 19 | + pass | |
| 20 | + return r | |
| 21 | + | |
| 22 | +def save_json(data, filename, idt = 4): | |
| 23 | + try: | |
| 24 | + fp = open(filename, 'w') | |
| 25 | + if (fp): | |
| 26 | + json.dump(data, fp, ensure_ascii = False, indent = idt, sort_keys = True) | |
| 27 | + fp.close() | |
| 28 | + except: | |
| 29 | + pyutil.print_stack_trace() | |
| 30 | + return False | |
| 31 | + return True | |
| 32 | + | |
| 33 | +class task_config(): | |
| 34 | + def __init__(self, json_data): | |
| 35 | + self.data = {} | |
| 36 | + self.data["db_host"] = json_data.get("db_host", "localhost") | |
| 37 | + self.data["db_timeout"] = json_data.get("db_timeout", 30) | |
| 38 | + self.data["db_port"] = json_data.get("db_port", 200) | |
| 39 | + self.data["pb_api_key"] = json_data.get("pb_api_key", "") | |
| 40 | + self.data["pb_endpoint"] = json_data.get("pb_endpoint", "http://localhost/pybossa/") | |
| 41 | + self.data["corretor"] = dict(json_data.get("corretor", {})) | |
| 42 | + self.data["validador"] = dict(json_data.get("validador", {})) | |
| 43 | + self.data["wikilibras"] = dict(json_data.get("wikilibras", {})) | |
| 44 | + self.set_pb_config(self.data["pb_api_key"], self.data["pb_endpoint"]) | |
| 45 | + self.data["corretor"]["project_id"] = self.get_pb_project_id(self.data["corretor"]["short_name"]) | |
| 46 | + self.data["validador"]["project_id"] = self.get_pb_project_id(self.data["validador"]["short_name"]) | |
| 47 | + self.data["wikilibras"]["project_id"] = self.get_pb_project_id(self.data["wikilibras"]["short_name"]) | |
| 48 | + logging.getLogger("requests").setLevel(logging.ERROR) | |
| 49 | + | |
| 50 | + def set_pb_config(self, pb_api_key, pb_endpoint): | |
| 51 | + pbclient.set('api_key', self.data["pb_api_key"]) | |
| 52 | + pbclient.set('endpoint', self.data["pb_endpoint"]) | |
| 53 | + | |
| 54 | + def get_pb_project_id(self, pb_short_name): | |
| 55 | + projects = pbclient.find_project(short_name = pb_short_name) | |
| 56 | + if (len(projects) > 0): | |
| 57 | + return projects[0].id | |
| 58 | + else: | |
| 59 | + return None | |
| 60 | + | |
| 61 | + def to_string(self): | |
| 62 | + return json.dumps(self.data, sort_keys = True, ensure_ascii = False, indent = 4) | |
| 63 | + | |
| 64 | + def __get__(self, endpoint): | |
| 65 | + try: | |
| 66 | + r = requests.get(('http://%s:%s/%s' % (self.data["db_host"], self.data["db_port"], endpoint)), timeout = self.data["db_timeout"]) | |
| 67 | + return r | |
| 68 | + except: | |
| 69 | + return {} | |
| 70 | + | |
| 71 | + def get_data_by_version(self, version): | |
| 72 | + r = self.__get__("sinais?version=%d" % (version)) | |
| 73 | + if (r != {}): | |
| 74 | + # pyutil.log(json.dumps(r.json(), ensure_ascii = False, indent = 4)) | |
| 75 | + return r.json() | |
| 76 | + else: | |
| 77 | + return [] | |
| 78 | + | |
| 79 | + def get_data_by_version_selo(self, version, selo): | |
| 80 | + r = self.__get__("sinais?version=%d&selo=%s" % (version, selo)) | |
| 81 | + if (r != {}): | |
| 82 | + # pyutil.log(json.dumps(r.json(), ensure_ascii = False, indent = 4)) | |
| 83 | + return r.json() | |
| 84 | + else: | |
| 85 | + return [] | |
| 86 | + | |
| 87 | + def get_data_by_selo(self, selo): | |
| 88 | + r = self.__get__("sinais?&selo=%s" % (selo)) | |
| 89 | + if (r != {}): | |
| 90 | + # pyutil.log(json.dumps(r.json(), ensure_ascii = False, indent = 4)) | |
| 91 | + return r.json() | |
| 92 | + else: | |
| 93 | + return [] | |
| 94 | + | |
| 95 | + def get_listall(self): | |
| 96 | + r = self.__get__("listall") | |
| 97 | + if (r != {}): | |
| 98 | + # pyutil.log(json.dumps(r.json(), ensure_ascii = False, indent = 4)) | |
| 99 | + return r.json() | |
| 100 | + else: | |
| 101 | + return [] | |
| 102 | + | |
| 103 | + def get_version(self): | |
| 104 | + r = self.__get__("version") | |
| 105 | + if (r != {}): | |
| 106 | + version = r.json()["version"] | |
| 107 | + if (isinstance(version, int)): | |
| 108 | + return version | |
| 109 | + return 0 | |
| 110 | + | |
| 111 | + def get_file(self, url, filename): | |
| 112 | + try: | |
| 113 | + r = requests.get(url, stream = True) | |
| 114 | + if (r.status_code == 200): | |
| 115 | + with open(filename, 'wb') as f: | |
| 116 | + for chunk in r.iter_content(chunk_size = 1024): | |
| 117 | + if chunk: | |
| 118 | + f.write(chunk) | |
| 119 | + return True | |
| 120 | + except: | |
| 121 | + pass | |
| 122 | + return False | |
| 123 | + | |
| 124 | + def run(self, project): | |
| 125 | + try: | |
| 126 | + video_path = project.get("video_path", "view/videos") | |
| 127 | + proj_name = project.get("short_name", "") | |
| 128 | + proj_id = project.get("project_id", "") | |
| 129 | + database = os.path.join(os.path.dirname(os.path.abspath('__file__')), "database.json") | |
| 130 | + database_bak = database + ".bak" | |
| 131 | + control = load_json(database) | |
| 132 | + changed = False | |
| 133 | + # server_version = self.get_version() | |
| 134 | + # for i in range(1, server_version + 1): | |
| 135 | + dbquery = None | |
| 136 | + | |
| 137 | + # selos | |
| 138 | + # 1 - wikilibras | |
| 139 | + # 2 - especialista | |
| 140 | + # 3 - invalido_wikilibras | |
| 141 | + # 4 - invalido_especialista | |
| 142 | + # 5 - animadores | |
| 143 | + # 6 - invalido_animadores | |
| 144 | + # 7 - null | |
| 145 | + | |
| 146 | + if (proj_name == "corretor_sinais"): | |
| 147 | + dbquery = self.get_data_by_selo("invalido_especialista") | |
| 148 | + | |
| 149 | + if (proj_name == "validador_sinais"): | |
| 150 | + dbquery = self.get_data_by_selo("wikilibras") + self.get_data_by_selo("animadores") | |
| 151 | + | |
| 152 | + if (proj_name == "wikilibras"): | |
| 153 | + dbquery = self.get_data_by_selo("null") | |
| 154 | + | |
| 155 | + for j in dbquery: | |
| 156 | + add_task = False | |
| 157 | + current_sinal_name = str(j["nome"]).upper() | |
| 158 | + current_version = int(j["version"]) | |
| 159 | + | |
| 160 | + if (current_sinal_name in control): | |
| 161 | + if (control[current_sinal_name] < current_version): | |
| 162 | + control[current_sinal_name] = current_version | |
| 163 | + changed = True | |
| 164 | + add_task = True | |
| 165 | + else: | |
| 166 | + control[current_sinal_name] = current_version | |
| 167 | + changed = True | |
| 168 | + add_task = True | |
| 169 | + | |
| 170 | + if(add_task): | |
| 171 | + avatar_url = "http://%s:%s/blender/%s" % (str(self.data["db_host"]), str(self.data["db_port"]), str(j[u"nome"] + ".blend")) | |
| 172 | + video_ref_url = "http://%s:%s/%s" % (str(self.data["db_host"]), str(self.data["db_port"]), str(j[u"file"])) | |
| 173 | + video_ava_url = "http://%s:%s/avatar/%s" % (str(self.data["db_host"]), str(self.data["db_port"]), str(j[u"nome"] + ".webm")) | |
| 174 | + | |
| 175 | + avatar_out = os.path.join(video_path, str(j[u"nome"] + "_AVATAR.blend")) | |
| 176 | + video_ref_out = os.path.join(video_path, str(j[u"nome"] + "_REF.webm")) | |
| 177 | + video_ava_out = os.path.join(video_path, str(j[u"nome"] + "_AVATAR.webm")) | |
| 178 | + | |
| 179 | + # cria diretorio de destino caso nao exista | |
| 180 | + if not os.path.exists(video_path): | |
| 181 | + os.makedirs(video_path) | |
| 182 | + | |
| 183 | + # remove arquivos existentes relativos a pasta "avatar_out" | |
| 184 | + if pyutil.file_exists(avatar_out): | |
| 185 | + os.unlink(avatar_out) | |
| 186 | + | |
| 187 | + # remove arquivos existentes relativos a pasta "video_ref_out" | |
| 188 | + if pyutil.file_exists(video_ref_out): | |
| 189 | + os.unlink(video_ref_out) | |
| 190 | + | |
| 191 | + # remove arquivos existentes relativos a pasta "video_ava_out" | |
| 192 | + if pyutil.file_exists(video_ava_out): | |
| 193 | + os.unlink(video_ava_out) | |
| 194 | + | |
| 195 | + # faz download do arquivo blend | |
| 196 | + self.get_file(avatar_url, avatar_out) | |
| 197 | + | |
| 198 | + # faz download do video renderizado | |
| 199 | + self.get_file(video_ava_url, video_ava_out) | |
| 200 | + | |
| 201 | + # faz download do video de referencia | |
| 202 | + if (self.get_file(video_ref_url, video_ref_out)): | |
| 203 | + pyutil.log("%s: creating task: %s version: %s stamp: %s" % (proj_name, j[u"nome"], j[u"version"], j[u"nomeSelo"])) | |
| 204 | + task = dict(sign_name = j[u"nome"], submission_date = pyutil.get_date_now()) | |
| 205 | + # pbclient.create_task(proj_id, task) | |
| 206 | + else: | |
| 207 | + # nao foi possivel fazer o download do video de referencia (tarefa nao adicionada) | |
| 208 | + pyutil.log("%s: file not found: %s" % (proj_name, video_ref_url)) | |
| 209 | + | |
| 210 | + if (changed): | |
| 211 | + # verifica se arquivo database.json existe para realizar backup | |
| 212 | + if ((os.path.isfile(database) == 1) and (os.path.exists(database) == 1)): | |
| 213 | + | |
| 214 | + # verifica se arquivo database.json.bak existe para remover | |
| 215 | + if ((os.path.isfile(database_bak) == 1) and (os.path.exists(database_bak) == 1)): | |
| 216 | + os.remove(database_bak) | |
| 217 | + | |
| 218 | + # faz backup antes de salvar nova base de dados | |
| 219 | + shutil.copy(database, database_bak) | |
| 220 | + | |
| 221 | + # salva nova base de dados | |
| 222 | + save_json(control, database, 2) | |
| 223 | + except: | |
| 224 | + pyutil.print_stack_trace() | ... | ... |
| ... | ... | @@ -0,0 +1,34 @@ |
| 1 | +#!/bin/sh | |
| 2 | +### BEGIN INIT INFO | |
| 3 | +# Provides: wikilibras-taskmgr.init | |
| 4 | +# Required-Start: $all | |
| 5 | +# Required-Stop: $all | |
| 6 | +# Default-Start: 2 3 4 5 | |
| 7 | +# Default-Stop: 0 1 6 | |
| 8 | +# Short-Description: This service update tasks for Wikilibras | |
| 9 | +# Description: Enable service provided by Wikilibras API at boot | |
| 10 | +### END INIT INFO | |
| 11 | + | |
| 12 | +set -e | |
| 13 | + | |
| 14 | +export PATH=$PATH:/usr/local/bin | |
| 15 | + | |
| 16 | +CWD="<path-to-project>" | |
| 17 | + | |
| 18 | +case "$1" in | |
| 19 | + start) | |
| 20 | + cd "$CWD" | |
| 21 | + . env/bin/activate | |
| 22 | + python main.py & | |
| 23 | + ;; | |
| 24 | + stop) | |
| 25 | + cd "$CWD" | |
| 26 | + make stop | |
| 27 | + ;; | |
| 28 | + *) | |
| 29 | + echo "Usage: /etc/init.d/wikilibras-taskmgr.init {start|stop}" | |
| 30 | + exit 1 | |
| 31 | + ;; | |
| 32 | +esac | |
| 33 | + | |
| 34 | +exit 0 | ... | ... |