From 768781f6d5562e389abd452b3c1baf15de40e97e Mon Sep 17 00:00:00 2001 From: Marvin Zhang Date: Sun, 3 Mar 2019 10:57:19 +0800 Subject: [PATCH] updated setup.py --- Dockerfile-task | 26 -------------------------- setup.py | 12 +++++++++--- 2 files changed, 9 insertions(+), 29 deletions(-) delete mode 100644 Dockerfile-task diff --git a/Dockerfile-task b/Dockerfile-task deleted file mode 100644 index 6202db5c..00000000 --- a/Dockerfile-task +++ /dev/null @@ -1,26 +0,0 @@ -# images -#FROM python:latest -FROM ubuntu:latest - -# source files -ADD . /opt/crawlab - -# add dns -RUN cat /etc/resolv.conf - -# install python -RUN apt-get update -RUN apt-get install -y python3 python3-pip net-tools iputils-ping vim - -# soft link -RUN ln -s /usr/bin/pip3 /usr/local/bin/pip -RUN ln -s /usr/bin/python3 /usr/local/bin/python - -# install required libraries -RUN pip install -U setuptools -RUN pip install -r /opt/crawlab/requirements.txt - -# execute apps -WORKDIR /opt/crawlab -CMD python ./bin/run_worker.py -CMD python app.py diff --git a/setup.py b/setup.py index 00ce7000..cf26d28c 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -from setuptools import setup +from setuptools import setup, find_packages with open("README.md", "r") as fh: long_description = fh.read() @@ -9,10 +9,16 @@ with open('requirements.txt') as f: setup( name='crawlab', version='0.0.1', - packages=['db', 'test', 'model', 'tasks', 'utils', 'routes', 'constants'], url='https://github.com/tikazyq/crawlab', + install_requires=requirements, license='BSD', author='Marvin Zhang', author_email='tikazyq@163.com', - description='Celery-based web crawler admin platform for managing distributed web spiders regardless of languages and frameworks.' + description='Celery-based web crawler admin platform for managing distributed web spiders regardless of languages and frameworks.', + long_description=long_description, + long_description_content_type="text/markdown", + download_url="https://github.com/tikazyq/crawlab/archive/master.zip", + packages=find_packages(), + keywords=['celery', 'python', 'webcrawler', 'crawl', 'scrapy', 'admin'], + zip_safe=True, )