* 增加Docker开发环境

* 更新Dockerfile构建文件,升级NodeJS依赖版本。
 * 遵循ESLint重新格式化代码,修复部分警告
 * 登录Token失效增加登出提示
 * 网络请求问题增加错误错误提示
 * 升级UI依赖库
This commit is contained in:
yaziming
2020-06-19 16:57:00 +08:00
parent 04a94d2546
commit c671d113c9
129 changed files with 18222 additions and 14180 deletions

View File

@@ -1,19 +1,20 @@
version: "3.3"
services:
master:
build:
context: dockerfiles/golang
command: "air -c .air.conf"
volumes:
- ../backend:/backend
- ./log/master.log:/tmp/air.log
- ../backend:/backend
- /backend/tmp
depends_on:
- mongo
- redis
ports:
- 8000:8000
environment:
CRAWLAB_SPIDER_PATH: "/spiders"
CRAWLAB_SETTING_ENABLEDEMOSPIDERS: "Y"
CRAWLAB_MONGO_HOST: "mongo"
CRAWLAB_REDIS_ADDRESS: "redis"
CRAWLAB_SERVER_MASTER: "Y"
@@ -27,13 +28,14 @@ services:
depends_on:
- mongo
- redis
ports:
- 8001:8000
volumes:
- ../backend:/backend
- ./log/worker-1.log:/tmp/air.log
- /backend/tmp
environment:
CRAWLAB_SPIDER_PATH: "/spiders"
CRAWLAB_MONGO_HOST: "mongo"
CRAWLAB_REDIS_ADDRESS: "redis"
CRAWLAB_SERVER_MASTER: "N"
@@ -44,9 +46,6 @@ services:
build:
context: dockerfiles/golang
command: "air -c .air.conf"
# volumes:
# - ../backend:/backend
# - ./log/master.log:/tmp/air.log
depends_on:
- mongo
- redis
@@ -54,32 +53,31 @@ services:
- 8002:8000
volumes:
- ../backend:/backend
- ./log/worker-2.log:/tmp/air.log
environment:
CRAWLAB_SPIDER_PATH: "/spiders"
CRAWLAB_MONGO_HOST: "mongo"
CRAWLAB_REDIS_ADDRESS: "redis"
CRAWLAB_SERVER_MASTER: "N"
CRAWLAB_SERVER_PORT: 8002
CRAWLAB_SERVER_REGISTER_TYPE: "customName"
CRAWLAB_SERVER_REGISTER_CUSTOMNODENAME: "worker_2"
frontend:
build:
context: ./dockerfiles/node
args:
- NPM_REGISTRY="http://registry.npm.taobao.org/"
container_name: crawlab_frontend
ports:
- 8080:8080
volumes:
- ../frontend:/frontend
command:
- /bin/sh
- -c
- |
yarn install
yarn run serve
depends_on:
- master
ui:
build:
context: ./dockerfiles/node
container_name: crawlab_frontend
ports:
- 8080:8080
volumes:
- ../frontend:/frontend
- /frontend/node_modules
command:
- /bin/sh
- -c
- |
yarn install
yarn run serve
depends_on:
- master
mongo:
image: mongo:latest
restart: always
@@ -94,4 +92,4 @@ services:
# volumes:
# - "/opt/crawlab/redis/data:/data" # make data persistent 持久化
ports:
- "6379:6379" # expose port to host machine 暴露接口到宿主机
- "6379:6379" # expose port to host machine 暴露接口到宿主机

View File

@@ -5,5 +5,22 @@ WORKDIR /tools
RUN go get github.com/cosmtrek/air
WORKDIR /backend
RUN rm -rf /tools
# set as non-interactive
ENV DEBIAN_FRONTEND noninteractive
# set CRAWLAB_IS_DOCKER
ENV CRAWLAB_IS_DOCKER Y
# install packages
RUN chmod 777 /tmp \
&& sed -i 's#http://deb.debian.org#https://mirrors.tuna.tsinghua.edu.cn#g' /etc/apt/sources.list \
&& apt-get update \
&& apt-get install -y curl net-tools iputils-ping ntp ntpdate python3 python3-pip dumb-init \
&& ln -s /usr/bin/pip3 /usr/local/bin/pip \
&& ln -s /usr/bin/python3 /usr/local/bin/python
RUN pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple
# install backend
RUN pip install scrapy pymongo bs4 requests crawlab-sdk scrapy-splash
RUN mkdir /spiders && chmod -R 0755 /spiders
VOLUME /backend
EXPOSE 8080
EXPOSE 8080

View File

@@ -0,0 +1 @@
**/node_modules/

View File

@@ -1,5 +1,4 @@
FROM node:latest
WORKDIR frontend
ARG NPM_REGISTRY="http://www.npmjs.org"
RUN npm config set registry ${NPM_REGISTRY}
RUN npm config set registry "http://registry.npm.taobao.org"