diff --git a/crawlab/config/__init__.py b/crawlab/config/__init__.py new file mode 100644 index 00000000..609b69de --- /dev/null +++ b/crawlab/config/__init__.py @@ -0,0 +1,10 @@ +# encoding: utf-8 + +import os + +run_env = os.environ.get("RUNENV", "local") + +if run_env == "local": # 加载本地配置 + from config.config_local import * +else: + from config.config import * diff --git a/crawlab/config.py b/crawlab/config/config.py similarity index 100% rename from crawlab/config.py rename to crawlab/config/config.py diff --git a/crawlab/config/config_local.py b/crawlab/config/config_local.py new file mode 100644 index 00000000..69d30277 --- /dev/null +++ b/crawlab/config/config_local.py @@ -0,0 +1,38 @@ +# encoding: utf-8 + +import os +BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +PROJECT_SOURCE_FILE_FOLDER = os.path.join(BASE_DIR, "spiders") + +# 配置python虚拟环境的路径 +PYTHON_ENV_PATH = '/Users/chennan/Desktop/2019/env/bin/python' + +# 爬虫部署路径 +PROJECT_DEPLOY_FILE_FOLDER = os.path.join(BASE_DIR, 'deployfile') + +PROJECT_LOGS_FOLDER = os.path.join(BASE_DIR, 'deployfile/logs') +PROJECT_TMP_FOLDER = '/tmp' + +# celery variables +BROKER_URL = 'redis://127.0.0.1:56379/0' +CELERY_RESULT_BACKEND = 'mongodb://127.0.0.1:57017/' +CELERY_MONGODB_BACKEND_SETTINGS = { + 'database': 'crawlab_test', + 'taskmeta_collection': 'tasks_celery', +} +CELERY_TIMEZONE = 'Asia/Shanghai' +CELERY_ENABLE_UTC = True + +# flower variables +FLOWER_API_ENDPOINT = 'http://localhost:5555/api' + +# database variables +MONGO_HOST = '127.0.0.1' +MONGO_PORT = 57017 +MONGO_DB = 'crawlab_test' + +# flask variables +DEBUG = True +FLASK_HOST = '127.0.0.1' +FLASK_PORT = 8000 diff --git a/crawlab/utils/node.py b/crawlab/utils/node.py index 3a0b7b92..6e40bc2b 100644 --- a/crawlab/utils/node.py +++ b/crawlab/utils/node.py @@ -24,7 +24,11 @@ def update_nodes_status(refresh=False): url = '%s/workers?status=1' % FLOWER_API_ENDPOINT if refresh: url += '&refresh=1' + res = requests.get(url) + if res.status_code != 200: + return online_node_ids + for k, v in json.loads(res.content.decode('utf-8')).items(): node_name = k node_status = NodeStatus.ONLINE if v else NodeStatus.OFFLINE