mirror of
https://github.com/crawlab-team/crawlab.git
synced 2026-01-22 17:31:03 +01:00
Merge pull request #491 from crawlab-team/develop
updated added splash support
This commit is contained in:
@@ -35,7 +35,7 @@ RUN apt-get update \
|
||||
&& ln -s /usr/bin/python3 /usr/local/bin/python
|
||||
|
||||
# install backend
|
||||
RUN pip install scrapy pymongo bs4 requests crawlab-sdk
|
||||
RUN pip install scrapy pymongo bs4 requests crawlab-sdk scrapy-splash
|
||||
|
||||
# add files
|
||||
ADD . /app
|
||||
|
||||
@@ -33,7 +33,7 @@ RUN chmod 777 /tmp \
|
||||
&& ln -s /usr/bin/python3 /usr/local/bin/python
|
||||
|
||||
# install backend
|
||||
RUN pip install scrapy pymongo bs4 requests crawlab-sdk -i https://pypi.tuna.tsinghua.edu.cn/simple
|
||||
RUN pip install scrapy pymongo bs4 requests crawlab-sdk scrapy-splash -i https://pypi.tuna.tsinghua.edu.cn/simple
|
||||
|
||||
# add files
|
||||
ADD . /app
|
||||
|
||||
@@ -39,3 +39,8 @@ services:
|
||||
restart: always
|
||||
volumes:
|
||||
- "/opt/crawlab/redis/data:/data"
|
||||
splash: # use Splash to run spiders on dynamic pages
|
||||
image: scrapinghub/splash
|
||||
container_name: splash
|
||||
# ports:
|
||||
# - "8050:8050"
|
||||
|
||||
Reference in New Issue
Block a user