利用Nginx转发将CRAWLAB_API_ADDRESS设置为默认IP

This commit is contained in:
marvzhang
2020-01-12 19:28:34 +08:00
parent e45ac0a358
commit de52b0dc10
8 changed files with 43 additions and 18 deletions

View File

@@ -22,8 +22,6 @@ RUN npm run build:prod
# images # images
FROM ubuntu:latest FROM ubuntu:latest
ADD . /app
# set as non-interactive # set as non-interactive
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
@@ -32,19 +30,19 @@ ENV CRAWLAB_IS_DOCKER Y
# install packages # install packages
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y curl git net-tools iputils-ping ntp ntpdate python3 python3-pip \ && apt-get install -y curl git net-tools iputils-ping ntp ntpdate python3 python3-pip nginx \
&& ln -s /usr/bin/pip3 /usr/local/bin/pip \ && ln -s /usr/bin/pip3 /usr/local/bin/pip \
&& ln -s /usr/bin/python3 /usr/local/bin/python && ln -s /usr/bin/python3 /usr/local/bin/python
# install backend # install backend
RUN pip install scrapy pymongo bs4 requests RUN pip install scrapy pymongo bs4 requests
# add files
ADD . /app
# copy backend files # copy backend files
COPY --from=backend-build /go/bin/crawlab /usr/local/bin COPY --from=backend-build /go/bin/crawlab /usr/local/bin
# install nginx
RUN apt-get -y install nginx
# copy frontend files # copy frontend files
COPY --from=frontend-build /app/dist /app/dist COPY --from=frontend-build /app/dist /app/dist
COPY --from=frontend-build /app/conf/crawlab.conf /etc/nginx/conf.d COPY --from=frontend-build /app/conf/crawlab.conf /etc/nginx/conf.d

View File

@@ -22,27 +22,25 @@ RUN npm run build:prod
# images # images
FROM ubuntu:latest FROM ubuntu:latest
ADD . /app
# set as non-interactive # set as non-interactive
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
# install packages # install packages
RUN chmod 777 /tmp \ RUN chmod 777 /tmp \
&& apt-get update \ && apt-get update \
&& apt-get install -y curl git net-tools iputils-ping ntp ntpdate python3 python3-pip \ && apt-get install -y curl git net-tools iputils-ping ntp ntpdate python3 python3-pip nginx \
&& ln -s /usr/bin/pip3 /usr/local/bin/pip \ && ln -s /usr/bin/pip3 /usr/local/bin/pip \
&& ln -s /usr/bin/python3 /usr/local/bin/python && ln -s /usr/bin/python3 /usr/local/bin/python
# install backend # install backend
RUN pip install scrapy pymongo bs4 requests -i https://pypi.tuna.tsinghua.edu.cn/simple RUN pip install scrapy pymongo bs4 requests -i https://pypi.tuna.tsinghua.edu.cn/simple
# add files
ADD . /app
# copy backend files # copy backend files
COPY --from=backend-build /go/bin/crawlab /usr/local/bin COPY --from=backend-build /go/bin/crawlab /usr/local/bin
# install nginx
RUN apt-get -y install nginx
# copy frontend files # copy frontend files
COPY --from=frontend-build /app/dist /app/dist COPY --from=frontend-build /app/dist /app/dist
COPY --from=frontend-build /app/conf/crawlab.conf /etc/nginx/conf.d COPY --from=frontend-build /app/conf/crawlab.conf /etc/nginx/conf.d

View File

@@ -72,7 +72,6 @@ services:
image: tikazyq/crawlab:latest image: tikazyq/crawlab:latest
container_name: master container_name: master
environment: environment:
CRAWLAB_API_ADDRESS: "http://localhost:8000"
CRAWLAB_SERVER_MASTER: "Y" CRAWLAB_SERVER_MASTER: "Y"
CRAWLAB_MONGO_HOST: "mongo" CRAWLAB_MONGO_HOST: "mongo"
CRAWLAB_REDIS_ADDRESS: "redis" CRAWLAB_REDIS_ADDRESS: "redis"

View File

@@ -4,14 +4,28 @@ services:
image: tikazyq/crawlab:latest image: tikazyq/crawlab:latest
container_name: master container_name: master
environment: environment:
CRAWLAB_API_ADDRESS: "http://localhost:8000" # backend API address 后端 API 地址,设置为 http://<宿主机IP>:<端口>,端口为映射出来的端口 # CRAWLAB_API_ADDRESS: "https://crawlab.cn/api" # backend API address 后端 API 地址. 适用于 https 或者源码部署
CRAWLAB_SERVER_MASTER: "Y" # whether to be master node 是否为主节点,主节点为 Y工作节点为 N CRAWLAB_SERVER_MASTER: "Y" # whether to be master node 是否为主节点,主节点为 Y工作节点为 N
CRAWLAB_MONGO_HOST: "mongo" # MongoDB host address MongoDB 的地址,在 docker compose 网络中,直接引用服务名称 CRAWLAB_MONGO_HOST: "mongo" # MongoDB host address MongoDB 的地址,在 docker compose 网络中,直接引用服务名称
# CRAWLAB_MONGO_PORT: "27017" # MongoDB port MongoDB 的端口
# CRAWLAB_MONGO_DB: "crawlab_test" # MongoDB database MongoDB 的数据库
# CRAWLAB_MONGO_USERNAME: "username" # MongoDB username MongoDB 的用户名
# CRAWLAB_MONGO_PASSWORD: "password" # MongoDB password MongoDB 的密码
# CRAWLAB_MONGO_AUTHSOURCE: "admin" # MongoDB auth source MongoDB 的验证源
CRAWLAB_REDIS_ADDRESS: "redis" # Redis host address Redis 的地址,在 docker compose 网络中,直接引用服务名称 CRAWLAB_REDIS_ADDRESS: "redis" # Redis host address Redis 的地址,在 docker compose 网络中,直接引用服务名称
# CRAWLAB_SERVER_LANG_NODE: "Y" # 预安装 Node.js 语言环境 # CRAWLAB_REDIS_PORT: "6379" # Redis port Redis 的端口
# CRAWLAB_REDIS_DATABASE: "1" # Redis database Redis 的数据库
# CRAWLAB_REDIS_PASSWORD: "password" # Redis password Redis 的密码
# CRAWLAB_LOG_LEVEL: "info" # log level 日志级别. 默认为 info
# CRAWLAB_LOG_ISDELETEPERIODICALLY: "N" # whether to periodically delete log files 是否周期性删除日志文件. 默认不删除
# CRAWLAB_LOG_DELETEFREQUENCY: "@hourly" # frequency of deleting log files 删除日志文件的频率. 默认为每小时
# CRAWLAB_SERVER_REGISTER_TYPE: "mac" # node register type 节点注册方式. 默认为 mac 地址,也可设置为 ip防止 mac 地址冲突)
# CRAWLAB_SERVER_REGISTER_IP: "127.0.0.1" # node register ip 节点注册IP. 节点唯一识别号,只有当 CRAWLAB_SERVER_REGISTER_TYPE 为 "ip" 时才生效
# CRAWLAB_TASK_WORKERS: 4 # number of task executors 任务执行器个数(并行执行任务数)
# CRAWLAB_SERVER_LANG_NODE: "Y" # whether to pre-install Node.js 预安装 Node.js 语言环境
# CRAWLAB_SETTING_ALLOWREGISTER: "N" # whether to allow user registration 是否允许用户注册
ports: ports:
- "8080:8080" # frontend port mapping 前端端口映射 - "8080:8080" # frontend port mapping 前端端口映射
- "8000:8000" # backend port mapping 后端端口映射
depends_on: depends_on:
- mongo - mongo
- redis - redis
@@ -35,6 +49,7 @@ services:
redis: redis:
image: redis:latest image: redis:latest
restart: always restart: always
# command: redis --requirepass "password" # set redis password 设置 Redis 密码
# volumes: # volumes:
# - "/opt/crawlab/redis/data:/data" # make data persistent 持久化 # - "/opt/crawlab/redis/data:/data" # make data persistent 持久化
# ports: # ports:

View File

@@ -6,7 +6,7 @@ then
: :
else else
jspath=`ls /app/dist/js/app.*.js` jspath=`ls /app/dist/js/app.*.js`
sed -i "s?http://localhost:8000?${CRAWLAB_API_ADDRESS}?g" ${jspath} sed -i "s?###CRAWLAB_API_ADDRESS###?${CRAWLAB_API_ADDRESS}?g" ${jspath}
fi fi
# replace base url # replace base url

View File

@@ -1,3 +1,3 @@
NODE_ENV='production' NODE_ENV='production'
VUE_APP_BASE_URL='http://localhost:8000' VUE_APP_BASE_URL=/api
VUE_APP_CRAWLAB_BASE_URL=http://api.crawlab.cn VUE_APP_CRAWLAB_BASE_URL=http://api.crawlab.cn

View File

@@ -10,4 +10,9 @@ server {
listen 8080; listen 8080;
root /app/dist; root /app/dist;
index index.html; index index.html;
location /api/ {
rewrite /api/(.*) /$1 break;
proxy_pass http://localhost:8000/;
}
} }

View File

@@ -2,7 +2,17 @@ import axios from 'axios'
import router from '../router' import router from '../router'
import { Message } from 'element-ui' import { Message } from 'element-ui'
// 根据 VUE_APP_BASE_URL 生成 baseUrl
let baseUrl = process.env.VUE_APP_BASE_URL ? process.env.VUE_APP_BASE_URL : 'http://localhost:8000' let baseUrl = process.env.VUE_APP_BASE_URL ? process.env.VUE_APP_BASE_URL : 'http://localhost:8000'
if (!baseUrl.match(/^https?/i)) {
baseUrl = `http://${window.location.host}${process.env.VUE_APP_BASE_URL}`
}
// 如果 Docker 中设置了 CRAWLAB_API_ADDRESS 这个环境变量,则会将 baseUrl 覆盖
const CRAWLAB_API_ADDRESS = '###CRAWLAB_API_ADDRESS###'
if (!CRAWLAB_API_ADDRESS.match('CRAWLAB_API_ADDRESS')) {
baseUrl = CRAWLAB_API_ADDRESS
}
const request = (method, path, params, data, others = {}) => { const request = (method, path, params, data, others = {}) => {
const url = baseUrl + path const url = baseUrl + path