mirror of
https://github.com/crawlab-team/crawlab.git
synced 2026-01-21 17:21:09 +01:00
* 增加Docker开发环境
* 更新Dockerfile构建文件,升级NodeJS依赖版本。 * 遵循ESLint重新格式化代码,修复部分警告 * 登录Token失效增加登出提示 * 网络请求问题增加错误错误提示 * 升级UI依赖库
This commit is contained in:
@@ -5,5 +5,22 @@ WORKDIR /tools
|
||||
RUN go get github.com/cosmtrek/air
|
||||
WORKDIR /backend
|
||||
RUN rm -rf /tools
|
||||
# set as non-interactive
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
# set CRAWLAB_IS_DOCKER
|
||||
ENV CRAWLAB_IS_DOCKER Y
|
||||
# install packages
|
||||
RUN chmod 777 /tmp \
|
||||
&& sed -i 's#http://deb.debian.org#https://mirrors.tuna.tsinghua.edu.cn#g' /etc/apt/sources.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y curl net-tools iputils-ping ntp ntpdate python3 python3-pip dumb-init \
|
||||
&& ln -s /usr/bin/pip3 /usr/local/bin/pip \
|
||||
&& ln -s /usr/bin/python3 /usr/local/bin/python
|
||||
|
||||
RUN pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple
|
||||
# install backend
|
||||
RUN pip install scrapy pymongo bs4 requests crawlab-sdk scrapy-splash
|
||||
RUN mkdir /spiders && chmod -R 0755 /spiders
|
||||
VOLUME /backend
|
||||
EXPOSE 8080
|
||||
EXPOSE 8080
|
||||
|
||||
1
workspace/dockerfiles/node/.dockerignore
Normal file
1
workspace/dockerfiles/node/.dockerignore
Normal file
@@ -0,0 +1 @@
|
||||
**/node_modules/
|
||||
@@ -1,5 +1,4 @@
|
||||
FROM node:latest
|
||||
WORKDIR frontend
|
||||
ARG NPM_REGISTRY="http://www.npmjs.org"
|
||||
RUN npm config set registry ${NPM_REGISTRY}
|
||||
RUN npm config set registry "http://registry.npm.taobao.org"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user