updated setup.py

This commit is contained in:
Marvin Zhang
2019-03-03 10:57:19 +08:00
parent b8417bb7f8
commit 768781f6d5
2 changed files with 9 additions and 29 deletions

View File

@@ -1,26 +0,0 @@
# images
#FROM python:latest
FROM ubuntu:latest
# source files
ADD . /opt/crawlab
# add dns
RUN cat /etc/resolv.conf
# install python
RUN apt-get update
RUN apt-get install -y python3 python3-pip net-tools iputils-ping vim
# soft link
RUN ln -s /usr/bin/pip3 /usr/local/bin/pip
RUN ln -s /usr/bin/python3 /usr/local/bin/python
# install required libraries
RUN pip install -U setuptools
RUN pip install -r /opt/crawlab/requirements.txt
# execute apps
WORKDIR /opt/crawlab
CMD python ./bin/run_worker.py
CMD python app.py

View File

@@ -1,4 +1,4 @@
from setuptools import setup
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
@@ -9,10 +9,16 @@ with open('requirements.txt') as f:
setup(
name='crawlab',
version='0.0.1',
packages=['db', 'test', 'model', 'tasks', 'utils', 'routes', 'constants'],
url='https://github.com/tikazyq/crawlab',
install_requires=requirements,
license='BSD',
author='Marvin Zhang',
author_email='tikazyq@163.com',
description='Celery-based web crawler admin platform for managing distributed web spiders regardless of languages and frameworks.'
description='Celery-based web crawler admin platform for managing distributed web spiders regardless of languages and frameworks.',
long_description=long_description,
long_description_content_type="text/markdown",
download_url="https://github.com/tikazyq/crawlab/archive/master.zip",
packages=find_packages(),
keywords=['celery', 'python', 'webcrawler', 'crawl', 'scrapy', 'admin'],
zip_safe=True,
)