docker-compose
version: "3.2"services: master: image: tikazyq/crawlab:0.5.1 container_name: master environment: CRAWLAB_SERVER_MASTER: "Y" # whether to be master node 是否为主节点,主节点为 Y,工作节点为 N CRAWLAB_MONGO_HOST: "mongo" # MongoDB host address MongoDB 的地址,在 docker compose 网络中,直接引用服务名称 CRAWLAB_MONGO_PORT: "27017" # MongoDB port MongoDB 的端口 CRAWLAB_MONGO_DB: "crawlab_test" # MongoDB database MongoDB 的数据库 CRAWLAB_MONGO_USERNAME: "" # MongoDB username MongoDB 的用户名 CRAWLAB_MONGO_PASSWORD: "" # MongoDB password MongoDB 的密码 CRAWLAB_MONGO_AUTHSOURCE: "" # MongoDB auth source MongoDB 的验证源 CRAWLAB_REDIS_ADDRESS: "redis" # Redis host address Redis 的地址,在 docker compose 网络中,直接引用服务名称 CRAWLAB_REDIS_PORT: "6379" # Redis port Redis 的端口 CRAWLAB_SERVER_REGISTER_TYPE: "ip" # node register type 节点注册方式. 默认为 mac 地址,也可设置为 ip(防止 mac 地址冲突) CRAWLAB_SERVER_REGISTER_IP: "192.168.80.1" # node register ip 节点注册IP. 节点唯一识别号,只有当 CRAWLAB_SERVER_REGISTER_TYPE 为 "ip" 时才生效 ports: - "9997:9997" # frontend port mapping 前端端口映射 depends_on: - mongo - redis volumes: - /root/baoqiang/shannon/nginx:/etc/nginx/conf.d - /root/baoqiang/shannon/frontend/dist:/app/dist - /root/baoqiang/shannon/backend/conf:/app/backend/conf - /root/baoqiang/shannon/backend/crawlab:/usr/local/bin/crawlab-server - /root/baoqiang/shannon/spiders/master:/app/spiders worker: image: tikazyq/crawlab:0.5.1 container_name: worker environment: CRAWLAB_SERVER_MASTER: "N" CRAWLAB_MONGO_HOST: "mongo" CRAWLAB_MONGO_PORT: "27017" # MongoDB port MongoDB 的端口 CRAWLAB_MONGO_DB: "crawlab_test" # MongoDB database MongoDB 的数据库 CRAWLAB_MONGO_USERNAME: "" # MongoDB username MongoDB 的用户名 CRAWLAB_MONGO_PASSWORD: "" # MongoDB password MongoDB 的密码 CRAWLAB_MONGO_AUTHSOURCE: "" # MongoDB auth source MongoDB 的验证源 CRAWLAB_REDIS_ADDRESS: "redis" # Redis host address Redis 的地址,在 docker compose 网络中,直接引用服务名称 CRAWLAB_REDIS_PORT: "6379" # Redis port Redis 的端口 CRAWLAB_SERVER_REGISTER_TYPE: "ip" CRAWLAB_SERVER_REGISTER_IP: "192.168.80.2" depends_on: - mongo - redis mongo: image: mongo:latest restart: always ports: - "27017:27017" redis: image: redis:latest restart: always ports: - "6379:6379
nginx.conf
server { listen 9997; root /app/dist; index index.html; location /api/ { rewrite /api/(.*) /$1 break; proxy_pass http://localhost:8000/; }}
config.yml
api: address: "localhost:8000"mongo: host: localhost port: 27017 db: crawlab_test username: "" password: "" authSource: ""redis: address: localhost password: "" database: 1 port: 6379log: level: info path: "/var/logs/crawlab" isDeletePeriodically: "N" deleteFrequency: "@hourly"server: host: 0.0.0.0 port: 8000 master: "Y" secret: "crawlab" register: # type 填 mac/ip/customName, 如果是ip,则需要手动指定IP, 如果是 customName, 需填写你的 customNodeName type: "mac" customNodeName: "" # 自定义节点名称, default node1,只有在type = customName 时生效 ip: "" lang: # 安装语言环境, Y 为安装,N 为不安装 python: "Y" node: "N" java: "N" dotnet: "N" php: "N" # scripts: "/home/wurui/app/backend/scripts" scripts: "/app/backend/scripts"spider: # path: "/home/wurui/app/spiders" path: "/app/spiders"task: workers: 16other: tmppath: "/tmp"version: 0.5.1setting: crawlabLogToES: "N" # Send crawlab runtime log to ES, open this option "Y", remember to set esClient crawlabLogIndex: "crawlab-log" allowRegister: "N" enableTutorial: "N" runOnMaster: "Y" demoSpiders: "N" checkScrapy: "Y" autoInstall: "Y" esClient: "" # Your ES client, for example, http://192.168.1.1:9200 or http://your-domain.com, if not use es, set empty spiderLogIndex: "spider-log" # Index pattern for kibana, need to config on kibananotification: mail: server: "" port: "" senderEmail: "" senderIdentity: "" smtp: user: "" password: ""repo: apiUrl: "https://center.crawlab.cn/api" # apiUrl: "http://localhost:8002" ossUrl: "https://repo.crawlab.cn"oidc: tokenUrl: "http://uibe.ketanyun.cn/sso/oauth2/token" clientId: "xALlmUuvwWAHyHMACL0I" clientSecret: "A146E95BE3945D1946252C432F929EA8C2CD98168FE6B5D3"