forked from milvus-io/milvus
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathworker.proto
225 lines (200 loc) · 5.48 KB
/
worker.proto
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
syntax = "proto3";
package milvus.proto.index;
option go_package = "github.com/milvus-io/milvus/pkg/proto/workerpb";
import "common.proto";
import "internal.proto";
import "milvus.proto";
import "schema.proto";
import "data_coord.proto";
import "index_coord.proto";
service IndexNode {
rpc GetComponentStates(milvus.GetComponentStatesRequest)
returns (milvus.ComponentStates) {
}
rpc GetStatisticsChannel(internal.GetStatisticsChannelRequest)
returns (milvus.StringResponse) {
}
rpc CreateJob(CreateJobRequest) returns (common.Status) {
}
rpc QueryJobs(QueryJobsRequest) returns (QueryJobsResponse) {
}
rpc DropJobs(DropJobsRequest) returns (common.Status) {
}
rpc GetJobStats(GetJobStatsRequest) returns (GetJobStatsResponse) {
}
rpc ShowConfigurations(internal.ShowConfigurationsRequest)
returns (internal.ShowConfigurationsResponse) {
}
// https://wiki.lfaidata.foundation/display/MIL/MEP+8+--+Add+metrics+for+proxy
rpc GetMetrics(milvus.GetMetricsRequest)
returns (milvus.GetMetricsResponse) {
}
rpc CreateJobV2(CreateJobV2Request) returns (common.Status) {
}
rpc QueryJobsV2(QueryJobsV2Request) returns (QueryJobsV2Response) {
}
rpc DropJobsV2(DropJobsV2Request) returns (common.Status) {
}
}
message CreateJobRequest {
string clusterID = 1;
string index_file_prefix = 2;
int64 buildID = 3;
repeated string data_paths = 4;
int64 index_version = 5;
int64 indexID = 6;
string index_name = 7;
index.StorageConfig storage_config = 8;
repeated common.KeyValuePair index_params = 9;
repeated common.KeyValuePair type_params = 10;
int64 num_rows = 11;
int32 current_index_version = 12;
int64 collectionID = 13;
int64 partitionID = 14;
int64 segmentID = 15;
int64 fieldID = 16;
string field_name = 17;
schema.DataType field_type = 18;
string store_path = 19;
int64 store_version = 20;
string index_store_path = 21;
int64 dim = 22;
repeated int64 data_ids = 23;
repeated index.OptionalFieldInfo optional_scalar_fields = 24;
schema.FieldSchema field = 25;
bool partition_key_isolation = 26;
int32 current_scalar_index_version = 27;
}
message QueryJobsRequest {
string clusterID = 1;
repeated int64 buildIDs = 2;
}
message QueryJobsResponse {
common.Status status = 1;
string clusterID = 2;
repeated IndexTaskInfo index_infos = 3;
}
message DropJobsRequest {
string clusterID = 1;
repeated int64 buildIDs = 2;
}
message GetJobStatsRequest {
}
message GetJobStatsResponse {
common.Status status = 1;
int64 total_job_num = 2;
int64 in_progress_job_num = 3;
int64 enqueue_job_num = 4;
int64 task_slots = 5;
repeated index.JobInfo job_infos = 6;
bool enable_disk = 7;
}
message AnalyzeRequest {
string clusterID = 1;
int64 taskID = 2;
int64 collectionID = 3;
int64 partitionID = 4;
int64 fieldID = 5;
string fieldName = 6;
schema.DataType field_type = 7;
map<int64, index.SegmentStats> segment_stats = 8;
int64 version = 9;
index.StorageConfig storage_config = 10;
int64 dim = 11;
double max_train_size_ratio = 12;
int64 num_clusters = 13;
schema.FieldSchema field = 14;
double min_cluster_size_ratio = 15;
double max_cluster_size_ratio = 16;
int64 max_cluster_size = 17;
}
message CreateStatsRequest {
string clusterID = 1;
int64 taskID = 2;
int64 collectionID = 3;
int64 partitionID = 4;
string insert_channel = 5;
int64 segmentID = 6;
repeated data.FieldBinlog insert_logs = 7;
repeated data.FieldBinlog delta_logs = 8;
index.StorageConfig storage_config = 9;
schema.CollectionSchema schema = 10;
index.StatsSubJob subJobType = 11;
int64 targetSegmentID = 12;
int64 startLogID = 13;
int64 endLogID = 14;
int64 num_rows = 15;
int64 collection_ttl = 16;
uint64 current_ts = 17;
int64 task_version = 18;
uint64 binlogMaxSize = 19;
}
message CreateJobV2Request {
string clusterID = 1;
int64 taskID = 2;
index.JobType job_type = 3;
oneof request {
AnalyzeRequest analyze_request = 4;
CreateJobRequest index_request = 5;
CreateStatsRequest stats_request = 6;
}
}
message QueryJobsV2Request {
string clusterID = 1;
repeated int64 taskIDs = 2;
index.JobType job_type = 3;
}
message IndexTaskInfo {
int64 buildID = 1;
common.IndexState state = 2;
repeated string index_file_keys = 3;
uint64 serialized_size = 4;
string fail_reason = 5;
int32 current_index_version = 6;
int64 index_store_version = 7;
uint64 mem_size = 8;
int32 current_scalar_index_version = 9;
}
message IndexJobResults {
repeated IndexTaskInfo results = 1;
}
message AnalyzeResult {
int64 taskID = 1;
index.JobState state = 2;
string fail_reason = 3;
string centroids_file = 4;
}
message AnalyzeResults {
repeated AnalyzeResult results = 1;
}
message StatsResult {
int64 taskID = 1;
index.JobState state = 2;
string fail_reason = 3;
int64 collectionID = 4;
int64 partitionID = 5;
int64 segmentID = 6;
string channel = 7;
repeated data.FieldBinlog insert_logs = 8;
repeated data.FieldBinlog stats_logs = 9;
map<int64, data.TextIndexStats> text_stats_logs = 10;
int64 num_rows = 11;
repeated data.FieldBinlog bm25_logs = 12;
}
message StatsResults {
repeated StatsResult results = 1;
}
message QueryJobsV2Response {
common.Status status = 1;
string clusterID = 2;
oneof result {
IndexJobResults index_job_results = 3;
AnalyzeResults analyze_job_results = 4;
StatsResults stats_job_results = 5;
}
}
message DropJobsV2Request {
string clusterID = 1;
repeated int64 taskIDs = 2;
index.JobType job_type = 3;
}