From eef33af8fcdb8bdad131d0e2ef4af2cd2943ae86 Mon Sep 17 00:00:00 2001 From: Marvin Zhang Date: Sat, 1 Jun 2019 17:05:46 +0800 Subject: [PATCH] added manage.py for CLI tool --- README-zh.md | 15 +------------ README.md | 16 +------------- crawlab/manage.py => manage.py | 20 +++++++++++------ setup.cfg | 2 ++ setup.py | 39 ++++++++++++++++++++++++++++++++++ 5 files changed, 57 insertions(+), 35 deletions(-) rename crawlab/manage.py => manage.py (74%) create mode 100644 setup.cfg create mode 100644 setup.py diff --git a/README-zh.md b/README-zh.md index 94530fef..2f407f00 100644 --- a/README-zh.md +++ b/README-zh.md @@ -37,20 +37,7 @@ npm install ## 快速开始 ```bash -# 启动后端API -python app.py - -# 启动Flower服务 -python ./bin/run_flower.py - -# 启动worker -python ./bin/run_worker.py -``` - -```bash -# 运行前端 -cd frontend -npm run serve +python manage.py serve ``` ## 截图 diff --git a/README.md b/README.md index 29433857..4eb0e9ca 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,6 @@ Celery-based web crawler admin platform for managing distributed web spiders reg [Demo](http://114.67.75.98:8080) | [Documentation](https://tikazyq.github.io/crawlab) - ## Pre-requisite - Python 3.6+ - Node.js 8.12+ @@ -38,20 +37,7 @@ Please edit configuration file `config.py` to configure api and database connect ## Quick Start ```bash -# Start backend API -python app.py - -# Start Flower service -python ./bin/run_flower.py - -# Start worker -python ./bin/run_worker.py -``` - -```bash -# run frontend client -cd frontend -npm run serve +python manage.py serve ``` ## Screenshot diff --git a/crawlab/manage.py b/manage.py similarity index 74% rename from crawlab/manage.py rename to manage.py index 4e2eb613..e9806a79 100644 --- a/crawlab/manage.py +++ b/manage.py @@ -8,6 +8,15 @@ BASE_DIR = os.path.dirname(__file__) APP_DESC = """ Crawlab CLI tool. + +usage: python manage.py [action] + +action: + serve: start all necessary services to run crawlab. This is for quick start, please checkout Deployment guide for production environment. + app: start app + flower services, normally run on master node. + worker: start app + worker services, normally run on worker nodes. + flower: start flower service only. + frontend: start frontend/client service only. """ ACTION_LIST = [ 'serve', @@ -17,32 +26,31 @@ ACTION_LIST = [ 'frontend', ] if len(sys.argv) == 1: + print(APP_DESC) sys.argv.append('--help') parser = argparse.ArgumentParser() parser.add_argument('action', type=str) -# parser.add_argument('-q', '--quality', type=int, default=0, -# help="download video quality : 1 for the standard-definition; 3 for the super-definition") args = parser.parse_args() def run_app(): - p = subprocess.Popen([sys.executable, os.path.join(BASE_DIR, 'app.py')]) + p = subprocess.Popen([sys.executable, os.path.join(BASE_DIR, 'crawlab', 'app.py')]) p.communicate() def run_flower(): - p = subprocess.Popen([sys.executable, os.path.join(BASE_DIR, 'flower.py')]) + p = subprocess.Popen([sys.executable, os.path.join(BASE_DIR, 'crawlab', 'flower.py')]) p.communicate() def run_worker(): - p = subprocess.Popen([sys.executable, os.path.join(BASE_DIR, 'worker.py')]) + p = subprocess.Popen([sys.executable, os.path.join(BASE_DIR, 'crawlab', 'worker.py')]) p.communicate() def run_frontend(): p = subprocess.Popen(['npm', 'run', 'serve'], - cwd=os.path.abspath(os.path.join(BASE_DIR, '..', 'frontend'))) + cwd=os.path.abspath(os.path.join(BASE_DIR, 'frontend'))) p.communicate() diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..224a7795 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +description-file = README.md \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..ce239009 --- /dev/null +++ b/setup.py @@ -0,0 +1,39 @@ +#-*- encoding: UTF-8 -*- +from setuptools import setup, find_packages + +VERSION = '0.2.3' + +with open('README.md') as fp: + readme = fp.read() + +setup(name='crawlab-server', + version=VERSION, + description="Celery-based web crawler admin platform for managing distributed web spiders regardless of languages and frameworks.", + long_description=readme, + classifiers=['Python', 'Javascript', 'Scrapy'], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + keywords='python crawlab celery crawler spider platform scrapy', + author='tikazyq', + author_email='tikazyq@163.com', + url='https://github.com/tikazyq/crawlab', + license='BSD', + packages=find_packages(), + include_package_data=True, + zip_safe=True, + install_requires=[ + 'celery', + 'flower', + 'requests', + 'pymongo', + 'flask', + 'flask_cors', + 'flask_restful', + 'lxml', + 'gevent', + 'scrapy', + ], + entry_points={ + 'console_scripts':[ + 'crawlab = crawlab.manage:main' + ] + }, +) \ No newline at end of file