在grpc的包管理中碰到多个包的管理如何关联举个例子:
    项目名称crawler,自然文件夹的名称就是这个,这个是golang的定义
    D:\test2\crawler\proto\client\client.proto

    1. syntax = "proto3";
    2. package crawler.grpc.client;
    3. option go_package = "crawler/grpc/client";
    4. option java_multiple_files = true;
    5. option java_package = "com.upa.bigdata.crawler.grpc.client";
    6. import "global/entity.proto";
    7. service Trans {
    8. //等待指令下达
    9. rpc WaitOrder (WaiteOrderRequest) returns (stream Order) {
    10. }
    11. //把爬取的信息发送到调度中心,
    12. //由调度中心存储管理信息,例如是否发送推送消息,之后还需要配合月一套推送系统
    13. rpc SendContent (SendContentRequest) returns (SendContentResponse) {
    14. }
    15. rpc SendLog (SendLogRequest) returns (SendLogResponse) {
    16. }
    17. }
    18. message SendLogRequest {
    19. string action = 1;
    20. crawler.grpc.entity.SpiderStatus status = 2;
    21. string info = 3;
    22. }
    23. message SendLogResponse {
    24. }
    25. message WaiteOrderRequest {
    26. repeated Action actions = 2;
    27. }
    28. message Action {
    29. string action = 2;
    30. string table = 3;
    31. string info = 4;
    32. string channel = 5;
    33. string cron = 6;
    34. }
    35. message Order {
    36. crawler.grpc.entity.SpiderStatus status = 1;
    37. //要执行的指令,名称
    38. string action = 2; //要执行的action
    39. string table = 3;
    40. }
    41. message SendContentRequest {
    42. string action = 2;
    43. //标题
    44. string title = 6;
    45. //来源网站名称
    46. string fromSite = 7;
    47. //来源url
    48. string fromUrl = 8;
    49. //来源网站发布时间
    50. int64 siteReleaseTime = 9;
    51. //网站的热度如果有
    52. int32 siteHot = 10;
    53. string content = 11;
    54. }
    55. message SendContentResponse {
    56. }

    D:\test2\crawler\proto\global\entity.proto

    1. syntax = "proto3";
    2. package crawler.grpc.entity;
    3. option go_package = "crawler/grpc/entity";
    4. option java_multiple_files = true;
    5. option java_package = "com.upa.bigdata.crawler.grpc.entity";
    6. //爬虫的信息
    7. message SpiderAction {
    8. //客户端id、
    9. string clientID = 1;
    10. //爬虫id
    11. string action = 2;
    12. //爬虫当前的状态
    13. SpiderStatus spiderStatus = 3;
    14. //对应的表名
    15. string table = 4;
    16. //最后一次爬取的状态
    17. SpiderStatus lastStatus = 5;
    18. //最后一次爬取的时间
    19. int64 lastTime = 6;
    20. //最后一次开始的时间
    21. int64 lastStartTime = 7;
    22. //最后一次结束的时间
    23. int64 lastEndTime = 8;
    24. //最后一次爬取的信息说明
    25. string lastInfo = 9;
    26. //设定的cron
    27. string cron = 10;
    28. //对应的频道
    29. string channel = 11;
    30. //停止使用该爬虫
    31. bool disable = 12;
    32. }
    33. //爬虫的执行日志
    34. message ActionLog {
    35. //客户端id
    36. string clientID = 1;
    37. //爬虫id
    38. string action = 2;
    39. //爬虫状态
    40. SpiderStatus status = 3;
    41. //结束时间
    42. int64 addTime = 4;
    43. //开始时间
    44. string info = 6;
    45. }
    46. enum SpiderStatus {
    47. IDEAL = 0; //开始
    48. RUNNING = 1; //执行中
    49. END = 2; //结束
    50. START = 3; //空闲中
    51. Waiting = 4; //队列等待中
    52. Error = 5; //发生错误
    53. UNKNOW = -1;
    54. Connect=60;//客户端链接
    55. Disconnect=61;//客户端断开链接
    56. }
    57. message SpiderContent {
    58. //爬虫客户端id
    59. string clientID = 1;
    60. //爬虫id
    61. string action = 2;
    62. //内容处理的状态
    63. ContentStatus status = 3;
    64. //添加时间
    65. int64 addTime = 4;
    66. //发布的时间
    67. int64 releaseTime = 5;
    68. //标题
    69. string title = 6;
    70. //来源网站名称
    71. string fromSite = 7;
    72. //来源url
    73. string fromUrl = 8;
    74. //来源网站发布时间
    75. int64 siteReleaseTime = 9;
    76. //网站的热度如果有
    77. int32 siteHot = 10;
    78. string content=11;
    79. int64 id=12;
    80. }
    81. //爬取的内容的状态
    82. enum ContentStatus {
    83. UNUSED = 0;
    84. DISABLE = 1;
    85. RELEASE = 2;
    86. CUNKNOW=-1;
    87. }

    D:\test2\crawler\proto\server\server.proto

    1. syntax = "proto3";
    2. package crawler.grpc.server;
    3. option go_package = "crawler/grpc/server";
    4. option java_multiple_files = true;
    5. option java_package = "com.upa.bigdata.crawler.grpc.server";
    6. import "global/entity.proto";
    7. //管理段控制接口
    8. service CrawlerService {
    9. rpc StartAction (Action) returns (NilResponse) {
    10. }
    11. rpc StopAction (Action) returns (NilResponse) {
    12. }
    13. rpc QueryAction (QueryActionRequest) returns (QueryActionResponse) {
    14. }
    15. rpc SetActionChannel (SetActionChannelRequest) returns (NilResponse) {
    16. }
    17. rpc AutoToChannel (AutoToChannelRequest) returns (NilResponse) {
    18. }
    19. rpc SetActionCron (SetActionCronRequest) returns (NilResponse) {
    20. }
    21. rpc QueryContent (QueryContentRequest) returns (QueryContentResponse) {
    22. }
    23. rpc GetLog (GetLogRequest) returns (GetLogResponse) {
    24. }
    25. rpc GetActionStatus (GetActionStatusRequest) returns (GetActionStatusResponse) {
    26. }
    27. //查询汇总表
    28. rpc QueryContent2 (QueryContentRequest) returns (QueryContentResponse) {
    29. }
    30. rpc ReleaseContent(ReleaseContentRequest)returns(ReleaseContentResponse){}
    31. }
    32. message ReleaseContentRequest{
    33. int64 id=1;
    34. }
    35. message ReleaseContentResponse{
    36. }
    37. message GetActionStatusRequest {
    38. repeated Action actions = 1;
    39. }
    40. message GetActionStatusResponse {
    41. message State {
    42. string action = 1;
    43. string clientID = 2;
    44. crawler.grpc.entity.SpiderStatus status = 3;
    45. }
    46. repeated State status = 4;
    47. }
    48. message GetLogRequest {
    49. string clientID = 1;
    50. string action = 2;
    51. crawler.grpc.entity.SpiderStatus status = 3;
    52. int64 addTime1 = 4;
    53. int64 addTime2 = 5;
    54. int32 page = 6;
    55. int32 pageSize = 7;
    56. string info = 8;
    57. }
    58. message GetLogResponse {
    59. int32 page = 1;
    60. int32 pageSize = 2;
    61. int32 total = 3;
    62. repeated crawler.grpc.entity.ActionLog list = 4;
    63. }
    64. message QueryContentRequest {
    65. //爬虫客户端id
    66. string clientID = 1;
    67. //爬虫id
    68. string action = 2;
    69. //添加时间 开始
    70. int64 addTime1 = 3;
    71. //添加时间 结束
    72. int64 addTime2 = 4;
    73. //发布时间开始
    74. int64 releaseTime1 = 5;
    75. //发布时间 结束
    76. int64 releaseTime2 = 6;
    77. //状态
    78. crawler.grpc.entity.ContentStatus Status = 7;
    79. int32 page = 8;
    80. int32 pageSize = 9;
    81. string title=10;
    82. }
    83. message QueryContentResponse {
    84. NilResponse status = 1;
    85. int32 page = 2;
    86. int32 size = 3;
    87. int32 total = 5;
    88. repeated crawler.grpc.entity.SpiderContent content = 4;
    89. }
    90. message SetActionCronRequest {
    91. Action action = 1;
    92. string cron = 2;
    93. }
    94. message AutoToChannelRequest {
    95. Action action = 1;
    96. bool auto = 2;
    97. }
    98. message SetActionChannelRequest {
    99. Action action = 1;
    100. string channel = 2;
    101. }
    102. message QueryActionRequest {
    103. string clientID = 1;
    104. string action = 2;
    105. crawler.grpc.entity.SpiderStatus status = 3;
    106. int32 page = 4;
    107. int32 pageSize = 5;
    108. }
    109. message QueryActionResponse {
    110. NilResponse QueryStatus = 1;
    111. repeated crawler.grpc.entity.SpiderAction actions = 2;
    112. int32 page = 3;
    113. int32 size = 4;
    114. int32 total = 5;
    115. }
    116. message Action {
    117. string clientID = 1;
    118. string action = 2;
    119. }
    120. message NilResponse {
    121. bool ok = 1;
    122. string message = 2;
    123. }

    D:\test2\crawler\proto\gen.bat

    1. protoc --go_out=plugins=grpc,import_path=./:../../ ./client/client.proto
    2. protoc --go_out=plugins=grpc,import_path=./:../../ ./global/entity.proto
    3. protoc --go_out=plugins=grpc,import_path=./:../../ ./server/server.proto

    例如上面:server.proto和client.proto都共同引用了entity.proto
    自己体会了