在grpc的包管理中碰到多个包的管理如何关联举个例子:
项目名称crawler,自然文件夹的名称就是这个,这个是golang的定义
D:\test2\crawler\proto\client\client.proto
syntax = "proto3";
package crawler.grpc.client;
option go_package = "crawler/grpc/client";
option java_multiple_files = true;
option java_package = "com.upa.bigdata.crawler.grpc.client";
import "global/entity.proto";
service Trans {
//等待指令下达
rpc WaitOrder (WaiteOrderRequest) returns (stream Order) {
}
//把爬取的信息发送到调度中心,
//由调度中心存储管理信息,例如是否发送推送消息,之后还需要配合月一套推送系统
rpc SendContent (SendContentRequest) returns (SendContentResponse) {
}
rpc SendLog (SendLogRequest) returns (SendLogResponse) {
}
}
message SendLogRequest {
string action = 1;
crawler.grpc.entity.SpiderStatus status = 2;
string info = 3;
}
message SendLogResponse {
}
message WaiteOrderRequest {
repeated Action actions = 2;
}
message Action {
string action = 2;
string table = 3;
string info = 4;
string channel = 5;
string cron = 6;
}
message Order {
crawler.grpc.entity.SpiderStatus status = 1;
//要执行的指令,名称
string action = 2; //要执行的action
string table = 3;
}
message SendContentRequest {
string action = 2;
//标题
string title = 6;
//来源网站名称
string fromSite = 7;
//来源url
string fromUrl = 8;
//来源网站发布时间
int64 siteReleaseTime = 9;
//网站的热度如果有
int32 siteHot = 10;
string content = 11;
}
message SendContentResponse {
}
D:\test2\crawler\proto\global\entity.proto
syntax = "proto3";
package crawler.grpc.entity;
option go_package = "crawler/grpc/entity";
option java_multiple_files = true;
option java_package = "com.upa.bigdata.crawler.grpc.entity";
//爬虫的信息
message SpiderAction {
//客户端id、
string clientID = 1;
//爬虫id
string action = 2;
//爬虫当前的状态
SpiderStatus spiderStatus = 3;
//对应的表名
string table = 4;
//最后一次爬取的状态
SpiderStatus lastStatus = 5;
//最后一次爬取的时间
int64 lastTime = 6;
//最后一次开始的时间
int64 lastStartTime = 7;
//最后一次结束的时间
int64 lastEndTime = 8;
//最后一次爬取的信息说明
string lastInfo = 9;
//设定的cron
string cron = 10;
//对应的频道
string channel = 11;
//停止使用该爬虫
bool disable = 12;
}
//爬虫的执行日志
message ActionLog {
//客户端id
string clientID = 1;
//爬虫id
string action = 2;
//爬虫状态
SpiderStatus status = 3;
//结束时间
int64 addTime = 4;
//开始时间
string info = 6;
}
enum SpiderStatus {
IDEAL = 0; //开始
RUNNING = 1; //执行中
END = 2; //结束
START = 3; //空闲中
Waiting = 4; //队列等待中
Error = 5; //发生错误
UNKNOW = -1;
Connect=60;//客户端链接
Disconnect=61;//客户端断开链接
}
message SpiderContent {
//爬虫客户端id
string clientID = 1;
//爬虫id
string action = 2;
//内容处理的状态
ContentStatus status = 3;
//添加时间
int64 addTime = 4;
//发布的时间
int64 releaseTime = 5;
//标题
string title = 6;
//来源网站名称
string fromSite = 7;
//来源url
string fromUrl = 8;
//来源网站发布时间
int64 siteReleaseTime = 9;
//网站的热度如果有
int32 siteHot = 10;
string content=11;
int64 id=12;
}
//爬取的内容的状态
enum ContentStatus {
UNUSED = 0;
DISABLE = 1;
RELEASE = 2;
CUNKNOW=-1;
}
D:\test2\crawler\proto\server\server.proto
syntax = "proto3";
package crawler.grpc.server;
option go_package = "crawler/grpc/server";
option java_multiple_files = true;
option java_package = "com.upa.bigdata.crawler.grpc.server";
import "global/entity.proto";
//管理段控制接口
service CrawlerService {
rpc StartAction (Action) returns (NilResponse) {
}
rpc StopAction (Action) returns (NilResponse) {
}
rpc QueryAction (QueryActionRequest) returns (QueryActionResponse) {
}
rpc SetActionChannel (SetActionChannelRequest) returns (NilResponse) {
}
rpc AutoToChannel (AutoToChannelRequest) returns (NilResponse) {
}
rpc SetActionCron (SetActionCronRequest) returns (NilResponse) {
}
rpc QueryContent (QueryContentRequest) returns (QueryContentResponse) {
}
rpc GetLog (GetLogRequest) returns (GetLogResponse) {
}
rpc GetActionStatus (GetActionStatusRequest) returns (GetActionStatusResponse) {
}
//查询汇总表
rpc QueryContent2 (QueryContentRequest) returns (QueryContentResponse) {
}
rpc ReleaseContent(ReleaseContentRequest)returns(ReleaseContentResponse){}
}
message ReleaseContentRequest{
int64 id=1;
}
message ReleaseContentResponse{
}
message GetActionStatusRequest {
repeated Action actions = 1;
}
message GetActionStatusResponse {
message State {
string action = 1;
string clientID = 2;
crawler.grpc.entity.SpiderStatus status = 3;
}
repeated State status = 4;
}
message GetLogRequest {
string clientID = 1;
string action = 2;
crawler.grpc.entity.SpiderStatus status = 3;
int64 addTime1 = 4;
int64 addTime2 = 5;
int32 page = 6;
int32 pageSize = 7;
string info = 8;
}
message GetLogResponse {
int32 page = 1;
int32 pageSize = 2;
int32 total = 3;
repeated crawler.grpc.entity.ActionLog list = 4;
}
message QueryContentRequest {
//爬虫客户端id
string clientID = 1;
//爬虫id
string action = 2;
//添加时间 开始
int64 addTime1 = 3;
//添加时间 结束
int64 addTime2 = 4;
//发布时间开始
int64 releaseTime1 = 5;
//发布时间 结束
int64 releaseTime2 = 6;
//状态
crawler.grpc.entity.ContentStatus Status = 7;
int32 page = 8;
int32 pageSize = 9;
string title=10;
}
message QueryContentResponse {
NilResponse status = 1;
int32 page = 2;
int32 size = 3;
int32 total = 5;
repeated crawler.grpc.entity.SpiderContent content = 4;
}
message SetActionCronRequest {
Action action = 1;
string cron = 2;
}
message AutoToChannelRequest {
Action action = 1;
bool auto = 2;
}
message SetActionChannelRequest {
Action action = 1;
string channel = 2;
}
message QueryActionRequest {
string clientID = 1;
string action = 2;
crawler.grpc.entity.SpiderStatus status = 3;
int32 page = 4;
int32 pageSize = 5;
}
message QueryActionResponse {
NilResponse QueryStatus = 1;
repeated crawler.grpc.entity.SpiderAction actions = 2;
int32 page = 3;
int32 size = 4;
int32 total = 5;
}
message Action {
string clientID = 1;
string action = 2;
}
message NilResponse {
bool ok = 1;
string message = 2;
}
D:\test2\crawler\proto\gen.bat
protoc --go_out=plugins=grpc,import_path=./:../../ ./client/client.proto
protoc --go_out=plugins=grpc,import_path=./:../../ ./global/entity.proto
protoc --go_out=plugins=grpc,import_path=./:../../ ./server/server.proto
例如上面:server.proto和client.proto都共同引用了entity.proto
自己体会了