diff --git a/acls/proto/acl.pb.go b/acls/proto/acl.pb.go index b4c9a2af61c..c2f0bd7ab76 100644 --- a/acls/proto/acl.pb.go +++ b/acls/proto/acl.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: acl.proto package proto diff --git a/actions/proto/transport.pb.go b/actions/proto/transport.pb.go index cd1f6d0b222..6922a9ee205 100644 --- a/actions/proto/transport.pb.go +++ b/actions/proto/transport.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: transport.proto package proto @@ -218,6 +218,9 @@ type FileBuffer struct { // Set when the file is sparse. Index *Index `protobuf:"bytes,6,opt,name=index,proto3" json:"index,omitempty"` Mtime int64 `protobuf:"varint,10,opt,name=mtime,proto3" json:"mtime,omitempty"` + Atime int64 `protobuf:"varint,11,opt,name=atime,proto3" json:"atime,omitempty"` + Ctime int64 `protobuf:"varint,12,opt,name=ctime,proto3" json:"ctime,omitempty"` + Btime int64 `protobuf:"varint,13,opt,name=btime,proto3" json:"btime,omitempty"` } func (x *FileBuffer) Reset() { @@ -322,6 +325,27 @@ func (x *FileBuffer) GetMtime() int64 { return 0 } +func (x *FileBuffer) GetAtime() int64 { + if x != nil { + return x.Atime + } + return 0 +} + +func (x *FileBuffer) GetCtime() int64 { + if x != nil { + return x.Ctime + } + return 0 +} + +func (x *FileBuffer) GetBtime() int64 { + if x != nil { + return x.Btime + } + return 0 +} + type ForemanCheckin struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -408,7 +432,7 @@ var file_transport_proto_rawDesc = []byte{ 0xfc, 0xe3, 0xc4, 0x01, 0x29, 0x12, 0x27, 0x54, 0x68, 0x65, 0x20, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x76, 0x65, 0x20, 0x74, 0x68, 0x65, 0x20, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x52, 0x08, - 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x22, 0xc7, 0x02, 0x0a, 0x0a, 0x46, 0x69, 0x6c, + 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x22, 0x89, 0x03, 0x0a, 0x0a, 0x46, 0x69, 0x6c, 0x65, 0x42, 0x75, 0x66, 0x66, 0x65, 0x72, 0x12, 0x2b, 0x0a, 0x08, 0x70, 0x61, 0x74, 0x68, 0x73, 0x70, 0x65, 0x63, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x53, 0x70, 0x65, 0x63, 0x52, 0x08, 0x70, 0x61, 0x74, 0x68, @@ -429,18 +453,22 @@ var file_transport_proto_rawDesc = []byte{ 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x14, 0x0a, 0x05, 0x6d, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6d, 0x74, 0x69, - 0x6d, 0x65, 0x22, 0x79, 0x0a, 0x0e, 0x46, 0x6f, 0x72, 0x65, 0x6d, 0x61, 0x6e, 0x43, 0x68, 0x65, - 0x63, 0x6b, 0x69, 0x6e, 0x12, 0x2e, 0x0a, 0x13, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x68, 0x75, 0x6e, - 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x11, 0x6c, 0x61, 0x73, 0x74, 0x48, 0x75, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x12, 0x37, 0x0a, 0x18, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x15, 0x6c, 0x61, 0x73, 0x74, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x35, 0x5a, - 0x33, 0x77, 0x77, 0x77, 0x2e, 0x76, 0x65, 0x6c, 0x6f, 0x63, 0x69, 0x64, 0x65, 0x78, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2f, 0x76, 0x65, 0x6c, 0x6f, 0x63, 0x69, - 0x72, 0x61, 0x70, 0x74, 0x6f, 0x72, 0x2f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x05, 0x61, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x74, 0x69, 0x6d, + 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x63, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x14, + 0x0a, 0x05, 0x62, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x62, + 0x74, 0x69, 0x6d, 0x65, 0x22, 0x79, 0x0a, 0x0e, 0x46, 0x6f, 0x72, 0x65, 0x6d, 0x61, 0x6e, 0x43, + 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x12, 0x2e, 0x0a, 0x13, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x68, + 0x75, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x11, 0x6c, 0x61, 0x73, 0x74, 0x48, 0x75, 0x6e, 0x74, 0x54, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x37, 0x0a, 0x18, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x15, 0x6c, 0x61, 0x73, 0x74, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, + 0x35, 0x5a, 0x33, 0x77, 0x77, 0x77, 0x2e, 0x76, 0x65, 0x6c, 0x6f, 0x63, 0x69, 0x64, 0x65, 0x78, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2f, 0x76, 0x65, 0x6c, 0x6f, + 0x63, 0x69, 0x72, 0x61, 0x70, 0x74, 0x6f, 0x72, 0x2f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/actions/proto/transport.proto b/actions/proto/transport.proto index 1cd59eb03b8..c53a1485fc1 100644 --- a/actions/proto/transport.proto +++ b/actions/proto/transport.proto @@ -55,6 +55,9 @@ message FileBuffer { Index index = 6; int64 mtime = 10; + int64 atime = 11; + int64 ctime = 12; + int64 btime = 13; } message ForemanCheckin { diff --git a/actions/proto/vql.pb.go b/actions/proto/vql.pb.go index 9333621e235..ccf63beced3 100644 --- a/actions/proto/vql.pb.go +++ b/actions/proto/vql.pb.go @@ -3,7 +3,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: vql.proto package proto diff --git a/api/proto/api.pb.go b/api/proto/api.pb.go index 606eb35e987..f88ae387bff 100644 --- a/api/proto/api.pb.go +++ b/api/proto/api.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: api.proto package proto diff --git a/api/proto/api.pb.gw.go b/api/proto/api.pb.gw.go index a82389bccb9..68eab04f5f0 100644 --- a/api/proto/api.pb.gw.go +++ b/api/proto/api.pb.gw.go @@ -22,7 +22,7 @@ import ( "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" "google.golang.org/protobuf/proto" - proto_6 "www.velocidex.com/golang/velociraptor/artifacts/proto" + proto_4 "www.velocidex.com/golang/velociraptor/artifacts/proto" proto_0 "www.velocidex.com/golang/velociraptor/flows/proto" ) @@ -1319,7 +1319,7 @@ var ( ) func request_API_GetToolInfo_0(ctx context.Context, marshaler runtime.Marshaler, client APIClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq proto_6.Tool + var protoReq proto_4.Tool var metadata runtime.ServerMetadata if err := req.ParseForm(); err != nil { @@ -1335,7 +1335,7 @@ func request_API_GetToolInfo_0(ctx context.Context, marshaler runtime.Marshaler, } func local_request_API_GetToolInfo_0(ctx context.Context, marshaler runtime.Marshaler, server APIServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq proto_6.Tool + var protoReq proto_4.Tool var metadata runtime.ServerMetadata if err := req.ParseForm(); err != nil { @@ -1351,7 +1351,7 @@ func local_request_API_GetToolInfo_0(ctx context.Context, marshaler runtime.Mars } func request_API_SetToolInfo_0(ctx context.Context, marshaler runtime.Marshaler, client APIClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq proto_6.Tool + var protoReq proto_4.Tool var metadata runtime.ServerMetadata newReader, berr := utilities.IOReaderFactory(req.Body) @@ -1368,7 +1368,7 @@ func request_API_SetToolInfo_0(ctx context.Context, marshaler runtime.Marshaler, } func local_request_API_SetToolInfo_0(ctx context.Context, marshaler runtime.Marshaler, server APIServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq proto_6.Tool + var protoReq proto_4.Tool var metadata runtime.ServerMetadata newReader, berr := utilities.IOReaderFactory(req.Body) diff --git a/api/proto/artifacts.pb.go b/api/proto/artifacts.pb.go index 9c78e717fae..17ca2a1b73f 100644 --- a/api/proto/artifacts.pb.go +++ b/api/proto/artifacts.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: artifacts.proto package proto diff --git a/api/proto/clients.pb.go b/api/proto/clients.pb.go index b79608d3443..4243da757a3 100644 --- a/api/proto/clients.pb.go +++ b/api/proto/clients.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: clients.proto package proto diff --git a/api/proto/completions.pb.go b/api/proto/completions.pb.go index 88ac29d1592..f7a4426184e 100644 --- a/api/proto/completions.pb.go +++ b/api/proto/completions.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: completions.proto package proto diff --git a/api/proto/csv.pb.go b/api/proto/csv.pb.go index 7652344655e..9991ec7c2aa 100644 --- a/api/proto/csv.pb.go +++ b/api/proto/csv.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: csv.proto package proto diff --git a/api/proto/download.pb.go b/api/proto/download.pb.go index 507d03ec626..8721f7fa5f2 100644 --- a/api/proto/download.pb.go +++ b/api/proto/download.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: download.proto package proto diff --git a/api/proto/flows.pb.go b/api/proto/flows.pb.go index 21078a3a48b..40f1f7752c2 100644 --- a/api/proto/flows.pb.go +++ b/api/proto/flows.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: flows.proto package proto diff --git a/api/proto/hunts.pb.go b/api/proto/hunts.pb.go index 3c749bdbd58..ecc16d49fca 100644 --- a/api/proto/hunts.pb.go +++ b/api/proto/hunts.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: hunts.proto package proto @@ -769,6 +769,61 @@ func (x *GetHuntResultsRequest) GetArtifact() string { return "" } +type FlowAssignment struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` + FlowId string `protobuf:"bytes,2,opt,name=flow_id,json=flowId,proto3" json:"flow_id,omitempty"` +} + +func (x *FlowAssignment) Reset() { + *x = FlowAssignment{} + if protoimpl.UnsafeEnabled { + mi := &file_hunts_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FlowAssignment) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FlowAssignment) ProtoMessage() {} + +func (x *FlowAssignment) ProtoReflect() protoreflect.Message { + mi := &file_hunts_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FlowAssignment.ProtoReflect.Descriptor instead. +func (*FlowAssignment) Descriptor() ([]byte, []int) { + return file_hunts_proto_rawDescGZIP(), []int{9} +} + +func (x *FlowAssignment) GetClientId() string { + if x != nil { + return x.ClientId + } + return "" +} + +func (x *FlowAssignment) GetFlowId() string { + if x != nil { + return x.FlowId + } + return "" +} + type HuntMutation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -779,12 +834,16 @@ type HuntMutation struct { Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` State Hunt_State `protobuf:"varint,4,opt,name=state,proto3,enum=proto.Hunt_State" json:"state,omitempty"` StartTime uint64 `protobuf:"varint,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // A mutation can directly assign an existing flow to the + // hunt. This allows a flow to be rerun and added to the hunt + // later. + Assignment *FlowAssignment `protobuf:"bytes,6,opt,name=assignment,proto3" json:"assignment,omitempty"` } func (x *HuntMutation) Reset() { *x = HuntMutation{} if protoimpl.UnsafeEnabled { - mi := &file_hunts_proto_msgTypes[9] + mi := &file_hunts_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -797,7 +856,7 @@ func (x *HuntMutation) String() string { func (*HuntMutation) ProtoMessage() {} func (x *HuntMutation) ProtoReflect() protoreflect.Message { - mi := &file_hunts_proto_msgTypes[9] + mi := &file_hunts_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -810,7 +869,7 @@ func (x *HuntMutation) ProtoReflect() protoreflect.Message { // Deprecated: Use HuntMutation.ProtoReflect.Descriptor instead. func (*HuntMutation) Descriptor() ([]byte, []int) { - return file_hunts_proto_rawDescGZIP(), []int{9} + return file_hunts_proto_rawDescGZIP(), []int{10} } func (x *HuntMutation) GetHuntId() string { @@ -848,6 +907,13 @@ func (x *HuntMutation) GetStartTime() uint64 { return 0 } +func (x *HuntMutation) GetAssignment() *FlowAssignment { + if x != nil { + return x.Assignment + } + return nil +} + var File_hunts_proto protoreflect.FileDescriptor var file_hunts_proto_rawDesc = []byte{ @@ -1042,22 +1108,30 @@ var file_hunts_proto_rawDesc = []byte{ 0x07, 0x68, 0x75, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x68, 0x75, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, - 0x63, 0x74, 0x22, 0xb9, 0x01, 0x0a, 0x0c, 0x48, 0x75, 0x6e, 0x74, 0x4d, 0x75, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x17, 0x0a, 0x07, 0x68, 0x75, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x68, 0x75, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x05, - 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2e, 0x48, 0x75, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x05, 0x73, - 0x74, 0x61, 0x74, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x48, 0x75, - 0x6e, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, - 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x42, 0x31, - 0x5a, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x76, 0x65, 0x6c, 0x6f, 0x63, 0x69, 0x64, 0x65, 0x78, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2f, 0x76, 0x65, 0x6c, 0x6f, 0x63, - 0x69, 0x72, 0x61, 0x70, 0x74, 0x6f, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x63, 0x74, 0x22, 0x46, 0x0a, 0x0e, 0x46, 0x6c, 0x6f, 0x77, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, + 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x49, + 0x64, 0x12, 0x17, 0x0a, 0x07, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x22, 0xf0, 0x01, 0x0a, 0x0c, 0x48, + 0x75, 0x6e, 0x74, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x17, 0x0a, 0x07, 0x68, + 0x75, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x68, 0x75, + 0x6e, 0x74, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x48, 0x75, 0x6e, 0x74, + 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x73, 0x12, 0x20, 0x0a, 0x0b, + 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, + 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x48, 0x75, 0x6e, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, + 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x0a, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, + 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, + 0x74, 0x52, 0x0a, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x31, 0x5a, + 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x76, 0x65, 0x6c, 0x6f, 0x63, 0x69, 0x64, 0x65, 0x78, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2f, 0x76, 0x65, 0x6c, 0x6f, 0x63, 0x69, + 0x72, 0x61, 0x70, 0x74, 0x6f, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1073,7 +1147,7 @@ func file_hunts_proto_rawDescGZIP() []byte { } var file_hunts_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_hunts_proto_msgTypes = make([]protoimpl.MessageInfo, 10) +var file_hunts_proto_msgTypes = make([]protoimpl.MessageInfo, 11) var file_hunts_proto_goTypes = []interface{}{ (HuntOsCondition_OS)(0), // 0: proto.HuntOsCondition.OS (Hunt_State)(0), // 1: proto.Hunt.State @@ -1086,28 +1160,30 @@ var file_hunts_proto_goTypes = []interface{}{ (*ListHuntsResponse)(nil), // 8: proto.ListHuntsResponse (*GetHuntRequest)(nil), // 9: proto.GetHuntRequest (*GetHuntResultsRequest)(nil), // 10: proto.GetHuntResultsRequest - (*HuntMutation)(nil), // 11: proto.HuntMutation - (*AvailableDownloads)(nil), // 12: proto.AvailableDownloads - (*proto.ArtifactCollectorArgs)(nil), // 13: proto.ArtifactCollectorArgs + (*FlowAssignment)(nil), // 11: proto.FlowAssignment + (*HuntMutation)(nil), // 12: proto.HuntMutation + (*AvailableDownloads)(nil), // 13: proto.AvailableDownloads + (*proto.ArtifactCollectorArgs)(nil), // 14: proto.ArtifactCollectorArgs } var file_hunts_proto_depIdxs = []int32{ 0, // 0: proto.HuntOsCondition.os:type_name -> proto.HuntOsCondition.OS 2, // 1: proto.HuntCondition.excluded_labels:type_name -> proto.HuntLabelCondition 2, // 2: proto.HuntCondition.labels:type_name -> proto.HuntLabelCondition 3, // 3: proto.HuntCondition.os:type_name -> proto.HuntOsCondition - 12, // 4: proto.HuntStats.available_downloads:type_name -> proto.AvailableDownloads - 13, // 5: proto.Hunt.start_request:type_name -> proto.ArtifactCollectorArgs + 13, // 4: proto.HuntStats.available_downloads:type_name -> proto.AvailableDownloads + 14, // 5: proto.Hunt.start_request:type_name -> proto.ArtifactCollectorArgs 4, // 6: proto.Hunt.condition:type_name -> proto.HuntCondition 5, // 7: proto.Hunt.stats:type_name -> proto.HuntStats 1, // 8: proto.Hunt.state:type_name -> proto.Hunt.State 6, // 9: proto.ListHuntsResponse.items:type_name -> proto.Hunt 5, // 10: proto.HuntMutation.stats:type_name -> proto.HuntStats 1, // 11: proto.HuntMutation.state:type_name -> proto.Hunt.State - 12, // [12:12] is the sub-list for method output_type - 12, // [12:12] is the sub-list for method input_type - 12, // [12:12] is the sub-list for extension type_name - 12, // [12:12] is the sub-list for extension extendee - 0, // [0:12] is the sub-list for field type_name + 11, // 12: proto.HuntMutation.assignment:type_name -> proto.FlowAssignment + 13, // [13:13] is the sub-list for method output_type + 13, // [13:13] is the sub-list for method input_type + 13, // [13:13] is the sub-list for extension type_name + 13, // [13:13] is the sub-list for extension extendee + 0, // [0:13] is the sub-list for field type_name } func init() { file_hunts_proto_init() } @@ -1226,6 +1302,18 @@ func file_hunts_proto_init() { } } file_hunts_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FlowAssignment); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_hunts_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*HuntMutation); i { case 0: return &v.state @@ -1248,7 +1336,7 @@ func file_hunts_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_hunts_proto_rawDesc, NumEnums: 2, - NumMessages: 10, + NumMessages: 11, NumExtensions: 0, NumServices: 0, }, diff --git a/api/proto/hunts.proto b/api/proto/hunts.proto index 05dfdcb028f..56a01f42553 100644 --- a/api/proto/hunts.proto +++ b/api/proto/hunts.proto @@ -162,10 +162,20 @@ message GetHuntResultsRequest { string artifact = 4; } +message FlowAssignment { + string client_id = 1; + string flow_id = 2; +} + message HuntMutation { string hunt_id = 1; HuntStats stats = 2; string description = 3; Hunt.State state = 4; uint64 start_time = 5; + + // A mutation can directly assign an existing flow to the + // hunt. This allows a flow to be rerun and added to the hunt + // later. + FlowAssignment assignment = 6; } \ No newline at end of file diff --git a/api/proto/notebooks.pb.go b/api/proto/notebooks.pb.go index 06d25fa9265..8a1035db846 100644 --- a/api/proto/notebooks.pb.go +++ b/api/proto/notebooks.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: notebooks.proto package proto diff --git a/api/proto/objects.pb.go b/api/proto/objects.pb.go index 511f3ab2395..d7e57117155 100644 --- a/api/proto/objects.pb.go +++ b/api/proto/objects.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: objects.proto package proto diff --git a/api/proto/users.pb.go b/api/proto/users.pb.go index 0dcbc66538f..d8b41bc1b1c 100644 --- a/api/proto/users.pb.go +++ b/api/proto/users.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: users.proto package proto diff --git a/api/proto/vfs_api.pb.go b/api/proto/vfs_api.pb.go index bf0d59fe01f..208d0ff9008 100644 --- a/api/proto/vfs_api.pb.go +++ b/api/proto/vfs_api.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: vfs_api.proto package proto diff --git a/artifacts/definitions/Server/Hunts/AddFlow.yaml b/artifacts/definitions/Server/Hunts/AddFlow.yaml new file mode 100644 index 00000000000..749f14373e2 --- /dev/null +++ b/artifacts/definitions/Server/Hunts/AddFlow.yaml @@ -0,0 +1,28 @@ +name: Server.Hunts.AddFlow +description: | + This artifact adds an exisiting flow to a running hunt. + + This helps in the case where the original flow in the hunt timed + out. The user then can re-run the hunt manually possibly increasing + timeout. Then they can simply click the add flow to hunt button in + the UI to add the flow to an existing time. + +type: SERVER + +parameters: + - name: HuntId + - name: ClientId + - name: FlowId + +sources: + - query: | + SELECT * FROM if(condition=HuntId AND ClientId AND FlowId, + then={ + SELECT hunt_add(hunt_id=HuntId, + client_id=ClientId, + flow_id=FlowId) + FROM scope() + }, else={ + SELECT * FROM scope() WHERE + log(message="ERROR: You must set HuntId, ClientId and FlowId.") AND FALSE + }) diff --git a/artifacts/definitions/System/VFS/DownloadFile.yaml b/artifacts/definitions/System/VFS/DownloadFile.yaml index 1ab50f8b5d4..a34aaa50049 100644 --- a/artifacts/definitions/System/VFS/DownloadFile.yaml +++ b/artifacts/definitions/System/VFS/DownloadFile.yaml @@ -22,21 +22,19 @@ parameters: sources: - query: | - LET download_one_file = SELECT Path, Accessor, - Size, StoredSize, Error, Sha256, Md5 - FROM upload(files=Path, accessor=Accessor) + LET download_one_file = SELECT FullPath AS Path, Accessor, + Size, upload(file=FullPath, accessor=Accessor) AS Upload + FROM stat(filename=Path, accessor=Accessor) - LET download_recursive = SELECT FullPath, Accessor, + LET download_recursive = SELECT FullPath AS Path, Accessor, Size, upload(file=FullPath, accessor=Accessor) AS Upload FROM glob(globs="**", root=Path, accessor=Accessor) WHERE Mode.IsRegular - SELECT * FROM if(condition= Recursively, - then={ - SELECT FullPath AS Path, Accessor, - Upload.Size AS Size, - Upload.StoredSize AS StoredSize, - Upload.Sha256 AS Sha256, - Upload.Md5 AS Md5 - FROM download_recursive - }, else=download_one_file) + SELECT Path, Accessor, + Upload.Size AS Size, + Upload.StoredSize AS StoredSize, + Upload.Sha256 AS Sha256, + Upload.Md5 AS Md5 + FROM if(condition=Recursively, then=download_recursive, + else=download_one_file) diff --git a/artifacts/definitions/Windows/KapeFiles/Extract.yaml b/artifacts/definitions/Windows/KapeFiles/Extract.yaml new file mode 100644 index 00000000000..17dd2c681cd --- /dev/null +++ b/artifacts/definitions/Windows/KapeFiles/Extract.yaml @@ -0,0 +1,69 @@ +name: Windows.KapeFiles.Extract +description: | + The Windows.KapeFiles.Targets artifact collects files into a Zip + file. Zip files can not generally preserve timestamps since they + only have a single timestamp concept. Velociraptor will only record + the modified time in the zip file header itself but all the times + are present in the metadata file: + + "Windows.KapeFiles.Targets/All File Metadata.json" + + Sometimes, users wish to extract the contents of a collection to a + directory, and run an external tool over the data. Some such + external tools assume the file timestamps (e.g. prefetch files) are + meaningful. In this case we need to preserve the timestamps. + + You can use this artifact to extract the content of a collection + while preserving the timestamps. The artifact will read the metadata + file, unpack the contents of the container and set the timestamps on + the resulting file. + + NOTE: Windows allows 3 timestamps to be set (MAC time except for + Btime), while Linux only allows 2 timestamps (Modified and + Accessed). + + ## Example - command line invocation + + ``` + velociraptor-v0.6.1-rc1-linux-amd64 artifacts collect Windows.KapeFiles.Extract --args ContainerPath=Collection-DESKTOP-2OR51GL-2021-07-16_06_56_50_-0700_PDT.zip --args OutputDirectory=/tmp/MyOutput/ + ``` + +parameters: + - name: MetadataFile + default: "Windows.KapeFiles.Targets/All File Metadata.json" + description: Name of the KapeFile.Targets metadata file. + - name: OutputDirectory + description: Directory to write on (must be set). + - name: ContainerPath + description: Path to container (zip file) to unpack. + + +sources: + - query: | + LET FileStats = SELECT *, + regex_replace( + source=regex_replace( + source=SourceFile, + re='[:?]', replace=""), + re='\\\\', replace='/') AS ZipPath + FROM parse_jsonl(filename=url(scheme='file', + path=ContainerPath, + fragment=MetadataFile), + accessor='zip') + + LET doit = SELECT ZipPath, Created, LastAccessed, Modified, + upload_directory( + output=OutputDirectory, name=ZipPath, + mtime=Modified, atime=LastAccessed, ctime=Created, + accessor='zip', + file=url( + scheme='file', + path=ContainerPath, + fragment=ZipPath)) AS CreatedFile + FROM FileStats + + SELECT * FROM if(condition= OutputDirectory AND ContainerPath, then=doit, + else={ + SELECT * FROM scope() WHERE + log(message="ERROR: Both OutputDirectory and ContainerPath must be specified.") AND FALSE + }) diff --git a/artifacts/proto/artifact.pb.go b/artifacts/proto/artifact.pb.go index 9380d4d0086..67275bcd5e2 100644 --- a/artifacts/proto/artifact.pb.go +++ b/artifacts/proto/artifact.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: artifact.proto package proto diff --git a/bin/unzip.go b/bin/unzip.go index 465c597015f..1c928ae64a2 100644 --- a/bin/unzip.go +++ b/bin/unzip.go @@ -15,19 +15,21 @@ import ( ) var ( - unzip_cmd = app.Command("unzip", "Convert a CSV file to another format") + unzip_cmd = app.Command("unzip", "Unzip a container file") unzip_cmd_filter = unzip_cmd.Flag("where", "A WHERE condition for the query").String() unzip_path = unzip_cmd.Flag("dump_dir", "Directory to dump output files."). Default(".").String() unzip_format = unzip_cmd.Flag("format", "Output format for csv output"). - Default("json").Enum("text", "json", "jsonl") + Default("json").Enum("text", "json", "csv", "jsonl") + unzip_cmd_list = unzip_cmd.Flag("list", "List files in the zip").Short('l').Bool() - unzip_cmd_csv = unzip_cmd.Flag("csv", "Parse CSV files and emit rows in default format"). - Short('C').Bool() - unzip_cmd_file = unzip_cmd.Arg("file", "Zip file to parse").Required().String() + unzip_cmd_print = unzip_cmd.Flag("print", "Dump out the files in the zip").Short('p').Bool() + + unzip_cmd_file = unzip_cmd.Arg("file", "Zip file to parse").Required().String() + unzip_cmd_member = unzip_cmd.Arg("members", "Members glob to extract").Default("/**").String() ) @@ -53,30 +55,21 @@ func doUnzip() { Logger: log.New(&LogWriter{config_obj}, "Velociraptor: ", 0), Env: ordereddict.NewDict(). Set("ZipPath", filename). + Set("DumpDir", *unzip_path). Set("MemberGlob", *unzip_cmd_member), } - var query string - - if *unzip_cmd_csv { - query = ` - SELECT * FROM foreach( - row={ - SELECT FullPath - FROM glob(globs=url(scheme='file', - path=ZipPath, - fragment=MemberGlob).String, - accessor='zip') - WHERE NOT IsDir AND Name =~ "\\.csv$" - }, query={ - SELECT * FROM parse_csv(filename=FullPath, accessor='zip') - })` - if *unzip_cmd_filter != "" { - query += " WHERE " + *unzip_cmd_filter - } + if *unzip_cmd_list { + runUnzipList(builder) + } else if *unzip_cmd_print { + runUnzipPrint(builder) + } else { + runUnzipFiles(builder) + } +} - } else if *unzip_cmd_list { - query = ` +func runUnzipList(builder services.ScopeBuilder) { + query := ` SELECT url(parse=FullPath).Fragment AS Filename, Size FROM glob(globs=url(scheme='file', @@ -85,16 +78,19 @@ func doUnzip() { accessor='zip') WHERE NOT IsDir` - if *unzip_cmd_filter != "" { - query += " AND " + *unzip_cmd_filter - } + if *unzip_cmd_filter != "" { + query += " AND " + *unzip_cmd_filter + } - } else { - builder.Uploader = &uploads.FileBasedUploader{ - UploadDir: *unzip_path, - } + runQueryWithEnv(query, builder) +} + +func runUnzipFiles(builder services.ScopeBuilder) { + builder.Uploader = &uploads.FileBasedUploader{ + UploadDir: *unzip_path, + } - query = ` + query := ` SELECT upload( file=FullPath, accessor='zip', name=url(parse=FullPath).Fragment) AS Extracted @@ -104,11 +100,32 @@ func doUnzip() { accessor='zip') WHERE NOT IsDir` - if *unzip_cmd_filter != "" { - query += " AND " + *unzip_cmd_filter - } + if *unzip_cmd_filter != "" { + query += " AND " + *unzip_cmd_filter } + runQueryWithEnv(query, builder) +} + +func runUnzipPrint(builder services.ScopeBuilder) { + query := ` + SELECT * FROM foreach( + row={ + SELECT FullPath + FROM glob(globs=url(scheme='file', + path=ZipPath, + fragment=MemberGlob).String, + accessor='zip') + WHERE NOT IsDir AND FullPath =~ '.json$' + }, query={ + SELECT * + FROM parse_jsonl(filename=FullPath, accessor='zip') + }) + ` + runQueryWithEnv(query, builder) +} + +func getAllStats(query string, builder services.ScopeBuilder) []*ordereddict.Dict { manager, err := services.GetRepositoryManager() kingpin.FatalIfError(err, "GetRepositoryManager") @@ -120,18 +137,45 @@ func doUnzip() { ctx := InstallSignalHandler(scope) - scope.Log("Running query %v", query) + result := []*ordereddict.Dict{} + for row := range vql.Eval(ctx, scope) { + d, ok := row.(*ordereddict.Dict) + if ok { + result = append(result, d) + } + } + return result +} + +func runQueryWithEnv(query string, builder services.ScopeBuilder) { + manager, err := services.GetRepositoryManager() + kingpin.FatalIfError(err, "GetRepositoryManager") + + scope := manager.BuildScope(builder) + defer scope.Close() + + vqls, err := vfilter.MultiParse(query) + kingpin.FatalIfError(err, "Unable to parse VQL Query") + + ctx := InstallSignalHandler(scope) + + for _, vql := range vqls { + scope.Log("Running query %v", query) - switch *unzip_format { - case "text": - table := reporting.EvalQueryToTable(ctx, scope, vql, os.Stdout) - table.Render() + switch *unzip_format { + case "text": + table := reporting.EvalQueryToTable(ctx, scope, vql, os.Stdout) + table.Render() - case "jsonl": - outputJSONL(ctx, scope, vql, os.Stdout) + case "jsonl": + outputJSONL(ctx, scope, vql, os.Stdout) - case "json": - outputJSON(ctx, scope, vql, os.Stdout) + case "json": + outputJSON(ctx, scope, vql, os.Stdout) + + case "csv": + outputCSV(ctx, scope, vql, os.Stdout) + } } } diff --git a/config/proto/config.pb.go b/config/proto/config.pb.go index cd4d627a4fb..a0ccd4dca39 100644 --- a/config/proto/config.pb.go +++ b/config/proto/config.pb.go @@ -3,7 +3,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: config.proto package proto diff --git a/crypto/proto/jobs.pb.go b/crypto/proto/jobs.pb.go index b1f3c314fdb..f9ab88400e5 100644 --- a/crypto/proto/jobs.pb.go +++ b/crypto/proto/jobs.pb.go @@ -4,7 +4,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: jobs.proto package proto diff --git a/file_store/api/uploader.go b/file_store/api/uploader.go index ad63ba3d4a2..d86228cbe7f 100644 --- a/file_store/api/uploader.go +++ b/file_store/api/uploader.go @@ -33,6 +33,9 @@ type Uploader interface { store_as_name string, expected_size int64, mtime time.Time, + atime time.Time, + ctime time.Time, + btime time.Time, reader io.Reader) (*UploadResponse, error) } @@ -50,6 +53,9 @@ func (self *FileStoreUploader) Upload( store_as_name string, expected_size int64, mtime time.Time, + atime time.Time, + ctime time.Time, + btime time.Time, reader io.Reader) ( *UploadResponse, error) { diff --git a/file_store/utils.go b/file_store/utils.go deleted file mode 100644 index ceae30552d3..00000000000 --- a/file_store/utils.go +++ /dev/null @@ -1,29 +0,0 @@ -package file_store - -import ( - "github.com/Velocidex/ordereddict" - config_proto "www.velocidex.com/golang/velociraptor/config/proto" - "www.velocidex.com/golang/velociraptor/file_store/api" - "www.velocidex.com/golang/velociraptor/result_sets" -) - -func PushRows(config_obj *config_proto.Config, - path api.FSPathSpec, - rows []*ordereddict.Dict) error { - - file_store_factory := GetFileStore(config_obj) - - rs_writer, err := result_sets.NewResultSetWriter(file_store_factory, - path, nil, false /* truncate */) - if err != nil { - return err - } - - for _, row := range rows { - rs_writer.Write(row) - } - - rs_writer.Close() - - return nil -} diff --git a/flows/artifacts_test.go b/flows/artifacts_test.go index 0ea35636cdd..7aebe4e60e2 100644 --- a/flows/artifacts_test.go +++ b/flows/artifacts_test.go @@ -402,7 +402,8 @@ func (self *TestSuite) TestClientUploaderStoreFile() { scope := vql_subsystem.MakeScope() uploader.Upload(context.Background(), scope, - "foo", "ntfs", "", 1000, nilTime, reader) + "foo", "ntfs", "", 1000, + nilTime, nilTime, nilTime, nilTime, reader) // Get a new collection context. collection_context := &flows_proto.ArtifactCollectorContext{ @@ -503,7 +504,8 @@ func (self *TestSuite) TestClientUploaderStoreSparseFile() { scope := vql_subsystem.MakeScope() uploader.Upload(context.Background(), scope, - "sparse", "ntfs", "", 1000, nilTime, reader) + "sparse", "ntfs", "", 1000, + nilTime, nilTime, nilTime, nilTime, reader) // Get a new collection context. collection_context := &flows_proto.ArtifactCollectorContext{ @@ -626,7 +628,8 @@ func (self *TestSuite) TestClientUploaderStoreSparseFileNTFS() { // Upload the file to the responder. uploader.Upload(context.Background(), scope, - "sparse", "ntfs", "", 1000, nilTime, fd) + "sparse", "ntfs", "", 1000, + nilTime, nilTime, nilTime, nilTime, fd) // Get a new collection context. collection_context := &flows_proto.ArtifactCollectorContext{ diff --git a/flows/proto/artifact_collector.pb.go b/flows/proto/artifact_collector.pb.go index c4b7cb18d20..80171862b06 100644 --- a/flows/proto/artifact_collector.pb.go +++ b/flows/proto/artifact_collector.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: artifact_collector.proto package proto diff --git a/flows/proto/vfs.pb.go b/flows/proto/vfs.pb.go index b9d3bd7ef46..07ba15325a3 100644 --- a/flows/proto/vfs.pb.go +++ b/flows/proto/vfs.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0-devel -// protoc v3.12.3 +// protoc v3.12.4 // source: vfs.proto package proto diff --git a/gui/velociraptor/src/App.css b/gui/velociraptor/src/App.css index afe3aaecd78..26e29b823e6 100644 --- a/gui/velociraptor/src/App.css +++ b/gui/velociraptor/src/App.css @@ -32,6 +32,12 @@ h4 { } +.max-height .modal-body { + max-height: calc(100vh - 200px); + overflow-y: auto; +} + + .main-navbar { border-top: 0px; border-style: solid; diff --git a/gui/velociraptor/src/components/events/event-table.js b/gui/velociraptor/src/components/events/event-table.js index b37b84788fb..563560b4cc6 100644 --- a/gui/velociraptor/src/components/events/event-table.js +++ b/gui/velociraptor/src/components/events/event-table.js @@ -405,6 +405,9 @@ export class ServerEventTableWizard extends React.Component { fetchEventTable = () => { api.get("v1/GetServerMonitoringState").then(resp => { + let empty_table = {All: {artifacts: [], specs: {}}}; + this.setState({tables: empty_table, current_table: empty_table.All}); + // Pretend this is the same as the client event // table. Since the server events do not do labels, just // make them all go under the "All" label. diff --git a/gui/velociraptor/src/components/events/utils.js b/gui/velociraptor/src/components/events/utils.js index 07266f7664e..0455b1699fd 100644 --- a/gui/velociraptor/src/components/events/utils.js +++ b/gui/velociraptor/src/components/events/utils.js @@ -12,7 +12,7 @@ function _ArtifactParameters2dict(parameters) { function _ArtifactCollectorArgs_to_label_table(event_table) { let result = { // Will be replaced by full definition later. - artifacts: event_table.artifacts, + artifacts: event_table.artifacts || [], specs: {}, }; @@ -54,7 +54,8 @@ function _ArtifactCollectorArgs_to_label_table(event_table) { */ function proto2tables(table, cb) { let definitions = {}; - let result = {All: _ArtifactCollectorArgs_to_label_table(table.artifacts)}; + let result = {All: _ArtifactCollectorArgs_to_label_table( + table.artifacts || [])}; let all_artifacts = [...result.All.artifacts]; _.each(table.label_events, x=>{ diff --git a/gui/velociraptor/src/components/flows/flow-overview.js b/gui/velociraptor/src/components/flows/flow-overview.js index f7e4d73b37a..cf5fd696f76 100644 --- a/gui/velociraptor/src/components/flows/flow-overview.js +++ b/gui/velociraptor/src/components/flows/flow-overview.js @@ -50,7 +50,7 @@ export default class FlowOverview extends React.Component { flow_id: this.props.flow.session_id, client_id: this.props.flow.client_id, download_type: download_type || "", - }); + }, this.source.token); }; getDetailedFlow = () => { @@ -64,7 +64,7 @@ export default class FlowOverview extends React.Component { api.get("v1/GetFlowDetails", { flow_id: flow_id, client_id: client_id, - }).then((response) => { + }, this.source.token).then((response) => { let available_downloads = response.data.available_downloads && response.data.available_downloads.files; this.setState({available_downloads: available_downloads || []}); diff --git a/gui/velociraptor/src/components/flows/flows-add-to-hunt.js b/gui/velociraptor/src/components/flows/flows-add-to-hunt.js new file mode 100644 index 00000000000..2174e494d15 --- /dev/null +++ b/gui/velociraptor/src/components/flows/flows-add-to-hunt.js @@ -0,0 +1,133 @@ +import _ from 'lodash'; +import React from 'react'; +import PropTypes from 'prop-types'; + +import Modal from 'react-bootstrap/Modal'; +import Button from 'react-bootstrap/Button'; +import Spinner from '../utils/spinner.js'; +import BootstrapTable from 'react-bootstrap-table-next'; +import { formatColumns } from "../core/table.js"; + +import axios from 'axios'; +import api from '../core/api-service.js'; +import { runArtifact } from "./utils.js"; + +export default class AddFlowToHuntDialog extends React.Component { + static propTypes = { + client: PropTypes.object, + flow: PropTypes.object, + onClose: PropTypes.func.isRequired, + }; + + state = { + loading: true, + hunts: [], + selected_hunt_id: null, + } + + componentDidMount = () => { + this.source = axios.CancelToken.source(); + this.fetchCompatibleHunts(); + } + + componentWillUnmount() { + this.source.cancel(); + } + + fetchCompatibleHunts = () => { + let artifacts = this.props.flow && this.props.flow.request && + this.props.flow.request.artifacts; + + api.get("v1/ListHunts", { + count: 2000, + offset: 0, + }, this.source.token).then((response) => { + if (response.cancel) return; + + let hunts = response.data.items || []; + let filtered_hunts = _.filter(hunts, x=>{ + return !_.isEmpty(_.intersection(artifacts, x.artifacts)); + }); + this.setState({ + hunts: filtered_hunts, + loading: false, + }); + }); + } + + addFlowToHunt = ()=>{ + let client_id = this.props.client && this.props.client.client_id; + let flow_id = this.props.flow && this.props.flow.session_id; + + if (flow_id && client_id) { + this.setState({loading: true}); + runArtifact("server", + "Server.Hunts.AddFlow", + { + FlowId: flow_id, + ClientId: client_id, + HuntId: this.state.selected_hunt_id, + }, ()=>{ + this.props.onClose(); + this.setState({loading: false}); + }, this.source.token); + } + } + + render() { + const selectRow = { + mode: "radio", + clickToSelect: true, + hideSelectColumn: true, + classes: "row-selected", + onSelect: row=>this.setState({selected_hunt_id: row.hunt_id}), + selected: [], + }; + + if (!_.isEmpty(this.state.selected_hunt_id)) { + selectRow.selected.push(this.state.selected_hunt_id); + } + + let columns = formatColumns([ + {dataField: "hunt_id", text: "HuntId"}, + {dataField: "hunt_description", text: "Description"}, + {dataField: "create_time", text: "Created", + type: "timestamp", sort: true}, + ]); + + return ( + + + Manually add collection to hunt + + + { !_.isEmpty(this.state.hunts) && + this.node = n } + keyField="hunt_id" + bootstrap4 + headerClasses="alert alert-secondary" + bodyClasses="fixed-table-body" + data={this.state.hunts} + selectRow={ selectRow } + columns={columns} + /> + } + + + + + + + ); + } +}; diff --git a/gui/velociraptor/src/components/flows/flows-list.js b/gui/velociraptor/src/components/flows/flows-list.js index c0b9fb772e8..926cd79a386 100644 --- a/gui/velociraptor/src/components/flows/flows-list.js +++ b/gui/velociraptor/src/components/flows/flows-list.js @@ -30,6 +30,7 @@ import { runArtifact } from "./utils.js"; import Modal from 'react-bootstrap/Modal'; import UserConfig from '../core/user.js'; import VeloForm from '../forms/form.js'; +import AddFlowToHuntDialog from './flows-add-to-hunt.js'; import axios from 'axios'; @@ -196,6 +197,7 @@ class FlowsList extends React.Component { state = { showWizard: false, + showAddToHunt: false, showCopyWizard: false, showOfflineWizard: false, showDeleteWizard: false, @@ -243,19 +245,6 @@ class FlowsList extends React.Component { }); } - archiveButtonClicked = () => { - let client_id = this.props.selected_flow && this.props.selected_flow.client_id; - let flow_id = this.props.selected_flow && this.props.selected_flow.session_id; - - if (client_id && flow_id) { - api.post("v1/ArchiveFlow", { - client_id: client_id, flow_id: flow_id - }).then((response) => { - this.props.fetchFlows(); - }); - } - } - cancelButtonClicked = () => { let client_id = this.props.selected_flow && this.props.selected_flow.client_id; let flow_id = this.props.selected_flow && this.props.selected_flow.session_id; @@ -398,6 +387,14 @@ class FlowsList extends React.Component { onResolve={this.setCollectionRequest} /> } + { this.state.showAddToHunt && + this.setState({showAddToHunt: false})} + /> + } + { this.state.showCopyWizard && -