diff --git a/Makefile b/Makefile index e97cd1c..3ee8aa4 100644 --- a/Makefile +++ b/Makefile @@ -90,8 +90,15 @@ mock: -source=internal/pkg/repository/repository.go \ -destination=internal/pkg/repository/mock/repository.go PATH="$(LOCAL_BIN):$(PATH)" mockgen \ - -source=internal/pkg/repository_ch/repository.go \ - -destination=internal/pkg/repository_ch/mock/repository.go + -destination=internal/pkg/repository_ch/mock/repository_ch.go \ + -package=mock_repositorych \ + github.com/ozontech/seq-ui/internal/pkg/repository_ch \ + Repository + PATH="$(LOCAL_BIN):$(PATH)" mockgen \ + -destination=internal/pkg/repository_ch/mock/ch_driver.go \ + -package=mock_repositorych \ + github.com/ClickHouse/clickhouse-go/v2/lib/driver \ + Conn,Rows,Row PATH="$(LOCAL_BIN):$(PATH)" mockgen \ -source=internal/pkg/client/seqdb/client.go \ -destination=internal/pkg/client/seqdb/mock/client.go @@ -102,11 +109,9 @@ mock: -source=internal/pkg/cache/cache.go \ -destination=internal/pkg/cache/mock/cache.go PATH="$(LOCAL_BIN):$(PATH)" mockgen \ - -source=internal/app/auth/oidc.go \ - -destination=internal/app/auth/mock/oidc.go - PATH="$(LOCAL_BIN):$(PATH)" mockgen \ - -source=internal/app/auth/jwt.go \ - -destination=internal/app/auth/mock/jwt.go + -destination=internal/app/auth/mock/auth.go \ + github.com/ozontech/seq-ui/internal/app/auth \ + OIDCProvider,JWTProvider .PHONY: protoc protoc: diff --git a/api/errorgroups/v1/errorgroups.proto b/api/errorgroups/v1/errorgroups.proto index 495115f..97e740f 100644 --- a/api/errorgroups/v1/errorgroups.proto +++ b/api/errorgroups/v1/errorgroups.proto @@ -13,104 +13,139 @@ service ErrorGroupsService { rpc GetDetails(GetDetailsRequest) returns (GetDetailsResponse) {} rpc GetReleases(GetReleasesRequest) returns (GetReleasesResponse) {} rpc GetServices(GetServicesRequest) returns (GetServicesResponse) {} + rpc DiffByReleases(DiffByReleasesRequest) returns (DiffByReleasesResponse) {} } enum Order { - ORDER_FREQUENT = 0; - ORDER_LATEST = 1; - ORDER_OLDEST = 2; + ORDER_FREQUENT = 0; + ORDER_LATEST = 1; + ORDER_OLDEST = 2; } message GetGroupsRequest { - string service = 1; - optional string env = 2; - optional string release = 3; - google.protobuf.Duration duration = 4; - uint32 limit = 5; - uint32 offset = 6; - Order order = 7; - bool with_total = 8; - optional string source = 9; + message Filter { + bool is_new = 1; + } + + string service = 1; + optional string env = 2; + optional string release = 3; + google.protobuf.Duration duration = 4; + uint32 limit = 5; + uint32 offset = 6; + Order order = 7; + bool with_total = 8; + optional string source = 9; + optional Filter filter = 10; } message GetGroupsResponse { - uint64 total = 1; - repeated Group groups = 2; + uint64 total = 1; + repeated Group groups = 2; } message Group { - uint64 hash = 1; - string message = 2; - uint64 seen_total = 3; - google.protobuf.Timestamp first_seen_at = 4; - google.protobuf.Timestamp last_seen_at = 5; - string source = 6; + uint64 hash = 1; + string message = 2; + uint64 seen_total = 3; + google.protobuf.Timestamp first_seen_at = 4; + google.protobuf.Timestamp last_seen_at = 5; + string source = 6; } message GetHistRequest { - string service = 1; - optional uint64 group_hash = 2; - optional string env = 3; - optional string release = 4; - optional google.protobuf.Duration duration = 5; - optional string source = 6; + string service = 1; + optional uint64 group_hash = 2; + optional string env = 3; + optional string release = 4; + optional google.protobuf.Duration duration = 5; + optional string source = 6; } message GetHistResponse { - repeated Bucket buckets = 1; + repeated Bucket buckets = 1; } message Bucket { - google.protobuf.Timestamp time = 1; - uint64 count = 2; + google.protobuf.Timestamp time = 1; + uint64 count = 2; } message GetDetailsRequest { - string service = 1; - uint64 group_hash = 2; - optional string env = 3; - optional string release = 4; - optional string source = 5; + string service = 1; + uint64 group_hash = 2; + optional string env = 3; + optional string release = 4; + optional string source = 5; } message GetDetailsResponse { - message Distribution { - string value = 1; - uint64 percent = 2; - } - - message Distributions { - repeated Distribution by_env = 1; - repeated Distribution by_release = 2; - } - - uint64 group_hash = 1; - string message = 2; - uint64 seen_total = 3; - google.protobuf.Timestamp first_seen_at = 4; - google.protobuf.Timestamp last_seen_at = 5; - Distributions distributions = 6; - map log_tags = 7; - string source = 8; + message Distribution { + string value = 1; + uint64 percent = 2; + } + + message Distributions { + repeated Distribution by_env = 1; + repeated Distribution by_release = 2; + } + + uint64 group_hash = 1; + string message = 2; + uint64 seen_total = 3; + google.protobuf.Timestamp first_seen_at = 4; + google.protobuf.Timestamp last_seen_at = 5; + Distributions distributions = 6; + map log_tags = 7; + string source = 8; } message GetReleasesRequest { - string service = 1; - reserved 2; - optional string env = 3; + string service = 1; + reserved 2; + optional string env = 3; } message GetReleasesResponse { - repeated string releases = 1; + repeated string releases = 1; } message GetServicesRequest { - string query = 1; - optional string env = 2; - uint32 limit = 3; - uint32 offset = 4; + string query = 1; + optional string env = 2; + uint32 limit = 3; + uint32 offset = 4; } message GetServicesResponse { - repeated string services = 1; + repeated string services = 1; +} + +message DiffByReleasesRequest { + string service = 1; + repeated string releases = 2; + optional string env = 3; + optional string source = 4; + uint32 limit = 5; + uint32 offset = 6; + Order order = 7; + bool with_total = 8; +} + +message DiffByReleasesResponse { + message ReleaseInfo { + uint64 seen_total = 1; + } + + message Group { + uint64 hash = 1; + string message = 2; + google.protobuf.Timestamp first_seen_at = 3; + google.protobuf.Timestamp last_seen_at = 4; + string source = 5; + map release_infos = 6; + } + + uint64 total = 1; + repeated Group groups = 2; } diff --git a/internal/api/errorgroups/v1/grpc/diff_by_releases.go b/internal/api/errorgroups/v1/grpc/diff_by_releases.go new file mode 100644 index 0000000..91ae8b7 --- /dev/null +++ b/internal/api/errorgroups/v1/grpc/diff_by_releases.go @@ -0,0 +1,75 @@ +package grpc + +import ( + "context" + + "github.com/ozontech/seq-ui/internal/api/grpcutil" + "github.com/ozontech/seq-ui/internal/app/types" + "github.com/ozontech/seq-ui/pkg/errorgroups/v1" + "github.com/ozontech/seq-ui/tracing" + "go.opentelemetry.io/otel/attribute" + "google.golang.org/protobuf/types/known/timestamppb" +) + +func (a *API) DiffByReleases(ctx context.Context, req *errorgroups.DiffByReleasesRequest) (*errorgroups.DiffByReleasesResponse, error) { + ctx, span := tracing.StartSpan(ctx, "errorgroups_v1_diff_by_releases") + defer span.End() + + attributes := []attribute.KeyValue{ + {Key: "service", Value: attribute.StringValue(req.Service)}, + {Key: "releases", Value: attribute.StringSliceValue(req.Releases)}, + {Key: "limit", Value: attribute.IntValue(int(req.Limit))}, + {Key: "offset", Value: attribute.IntValue(int(req.Offset))}, + {Key: "order", Value: attribute.StringValue(req.Order.String())}, + {Key: "with_total", Value: attribute.BoolValue(req.WithTotal)}, + } + if req.Env != nil { + attributes = append(attributes, attribute.KeyValue{Key: "env", Value: attribute.StringValue(*req.Env)}) + } + if req.Source != nil { + attributes = append(attributes, attribute.KeyValue{Key: "source", Value: attribute.StringValue(*req.Source)}) + } + span.SetAttributes(attributes...) + + groups, total, err := a.service.DiffByReleases(ctx, types.DiffByReleasesRequest{ + Service: req.Service, + Releases: req.Releases, + Env: req.Env, + Source: req.Source, + Limit: req.Limit, + Offset: req.Offset, + Order: types.ErrorGroupsOrder(req.Order), + WithTotal: req.WithTotal, + }) + if err != nil { + return nil, grpcutil.ProcessError(err) + } + + return &errorgroups.DiffByReleasesResponse{ + Total: total, + Groups: diffGroupsToProto(groups), + }, nil +} + +func diffGroupsToProto(source []types.DiffGroup) []*errorgroups.DiffByReleasesResponse_Group { + groups := make([]*errorgroups.DiffByReleasesResponse_Group, 0, len(source)) + + for _, g := range source { + releaseInfos := make(map[string]*errorgroups.DiffByReleasesResponse_ReleaseInfo) + for release, info := range g.ReleaseInfos { + releaseInfos[release] = &errorgroups.DiffByReleasesResponse_ReleaseInfo{ + SeenTotal: info.SeenTotal, + } + } + groups = append(groups, &errorgroups.DiffByReleasesResponse_Group{ + Hash: g.Hash, + Message: g.Message, + FirstSeenAt: timestamppb.New(g.FirstSeenAt), + LastSeenAt: timestamppb.New(g.LastSeenAt), + Source: g.Source, + ReleaseInfos: releaseInfos, + }) + } + + return groups +} diff --git a/internal/api/errorgroups/v1/grpc/get_details.go b/internal/api/errorgroups/v1/grpc/get_details.go index de91570..6ab7df6 100644 --- a/internal/api/errorgroups/v1/grpc/get_details.go +++ b/internal/api/errorgroups/v1/grpc/get_details.go @@ -14,7 +14,7 @@ import ( ) func (a *API) GetDetails(ctx context.Context, req *errorgroups.GetDetailsRequest) (*errorgroups.GetDetailsResponse, error) { - _, span := tracing.StartSpan(ctx, "errorgroups_v1_get_groups") + ctx, span := tracing.StartSpan(ctx, "errorgroups_v1_get_details") defer span.End() attributes := []attribute.KeyValue{ diff --git a/internal/api/errorgroups/v1/grpc/get_groups.go b/internal/api/errorgroups/v1/grpc/get_groups.go index 02555e5..d91ffc4 100644 --- a/internal/api/errorgroups/v1/grpc/get_groups.go +++ b/internal/api/errorgroups/v1/grpc/get_groups.go @@ -2,6 +2,7 @@ package grpc import ( "context" + "encoding/json" "time" "go.opentelemetry.io/otel/attribute" @@ -22,6 +23,7 @@ func (a *API) GetGroups(ctx context.Context, req *errorgroups.GetGroupsRequest) {Key: "limit", Value: attribute.IntValue(int(req.Limit))}, {Key: "offset", Value: attribute.IntValue(int(req.Offset))}, {Key: "order", Value: attribute.StringValue(string(req.Order))}, + {Key: "with_total", Value: attribute.BoolValue(req.WithTotal)}, } if req.Env != nil { attributes = append(attributes, attribute.KeyValue{Key: "env", Value: attribute.StringValue(*req.Env)}) @@ -35,6 +37,10 @@ func (a *API) GetGroups(ctx context.Context, req *errorgroups.GetGroupsRequest) if req.Source != nil { attributes = append(attributes, attribute.KeyValue{Key: "source", Value: attribute.StringValue(*req.Source)}) } + if req.Filter != nil { + filterRaw, _ := json.Marshal(req.Filter) + attributes = append(attributes, attribute.KeyValue{Key: "filter", Value: attribute.StringValue(string(filterRaw))}) + } span.SetAttributes(attributes...) var duration *time.Duration @@ -54,7 +60,19 @@ func (a *API) GetGroups(ctx context.Context, req *errorgroups.GetGroupsRequest) Order: types.ErrorGroupsOrder(req.Order), WithTotal: req.WithTotal, } - groups, total, err := a.service.GetErrorGroups(ctx, request) + + var ( + groups []types.ErrorGroup + total uint64 + err error + ) + + if req.Filter != nil && req.Filter.IsNew { + groups, total, err = a.service.GetNewErrorGroups(ctx, request) + } else { + groups, total, err = a.service.GetErrorGroups(ctx, request) + } + if err != nil { return nil, grpcutil.ProcessError(err) } diff --git a/internal/api/errorgroups/v1/grpc/get_hist.go b/internal/api/errorgroups/v1/grpc/get_hist.go index 71af29e..cc2b50d 100644 --- a/internal/api/errorgroups/v1/grpc/get_hist.go +++ b/internal/api/errorgroups/v1/grpc/get_hist.go @@ -15,7 +15,7 @@ import ( ) func (a *API) GetHist(ctx context.Context, req *errorgroups.GetHistRequest) (*errorgroups.GetHistResponse, error) { - ctx, span := tracing.StartSpan(ctx, "errorgroups_v1_get_groups") + ctx, span := tracing.StartSpan(ctx, "errorgroups_v1_get_hist") defer span.End() attributes := []attribute.KeyValue{ diff --git a/internal/api/errorgroups/v1/grpc/get_releases.go b/internal/api/errorgroups/v1/grpc/get_releases.go index 7225189..032151c 100644 --- a/internal/api/errorgroups/v1/grpc/get_releases.go +++ b/internal/api/errorgroups/v1/grpc/get_releases.go @@ -12,7 +12,7 @@ import ( ) func (a *API) GetReleases(ctx context.Context, req *errorgroups.GetReleasesRequest) (*errorgroups.GetReleasesResponse, error) { - _, span := tracing.StartSpan(ctx, "errorgroups_v1_get_groups") + ctx, span := tracing.StartSpan(ctx, "errorgroups_v1_get_releases") defer span.End() attributes := []attribute.KeyValue{ diff --git a/internal/api/errorgroups/v1/grpc/get_services.go b/internal/api/errorgroups/v1/grpc/get_services.go index b74836d..69aa0e4 100644 --- a/internal/api/errorgroups/v1/grpc/get_services.go +++ b/internal/api/errorgroups/v1/grpc/get_services.go @@ -12,7 +12,7 @@ import ( ) func (a *API) GetServices(ctx context.Context, req *errorgroups.GetServicesRequest) (*errorgroups.GetServicesResponse, error) { - _, span := tracing.StartSpan(ctx, "errorgroups_v1_get_groups") + ctx, span := tracing.StartSpan(ctx, "errorgroups_v1_get_services") defer span.End() attributes := []attribute.KeyValue{ diff --git a/internal/api/errorgroups/v1/http/api.go b/internal/api/errorgroups/v1/http/api.go index 26fb80d..98beb00 100644 --- a/internal/api/errorgroups/v1/http/api.go +++ b/internal/api/errorgroups/v1/http/api.go @@ -27,6 +27,7 @@ func (a *API) Router() chi.Router { mux.Post("/details", a.serveGetDetails) mux.Post("/releases", a.serveGetReleases) mux.Post("/services", a.serveGetServices) + mux.Post("/diff_by_releases", a.serveGetDiffByReleases) return mux } diff --git a/internal/api/errorgroups/v1/http/diff_by_releases.go b/internal/api/errorgroups/v1/http/diff_by_releases.go new file mode 100644 index 0000000..9373795 --- /dev/null +++ b/internal/api/errorgroups/v1/http/diff_by_releases.go @@ -0,0 +1,125 @@ +package http + +import ( + "encoding/json" + "fmt" + "net/http" + "strconv" + "time" + + "github.com/ozontech/seq-ui/internal/api/httputil" + "github.com/ozontech/seq-ui/internal/app/types" + "github.com/ozontech/seq-ui/tracing" + "go.opentelemetry.io/otel/attribute" +) + +// serveDiffByReleases go doc. +// +// @Router /errorgroups/v1/diff_by_releases [post] +// @ID errorgroups_v1_diff_by_releases +// @Tags errorgroups_v1 +// @Param body body diffByReleasesRequest true "Request body" +// @Success 200 {object} diffByReleasesResponse "A successful response" +// @Failure default {object} httputil.Error "An unexpected error response" +// @Security bearer +func (a *API) serveGetDiffByReleases(w http.ResponseWriter, r *http.Request) { + ctx, span := tracing.StartSpan(r.Context(), "errorgroups_v1_diff_by_releases") + defer span.End() + + wr := httputil.NewWriter(w) + + var httpReq diffByReleasesRequest + if err := json.NewDecoder(r.Body).Decode(&httpReq); err != nil { + wr.Error(fmt.Errorf("failed to parse request: %w", err), http.StatusBadRequest) + return + } + + attributes := []attribute.KeyValue{ + {Key: "service", Value: attribute.StringValue(httpReq.Service)}, + {Key: "releases", Value: attribute.StringSliceValue(httpReq.Releases)}, + {Key: "limit", Value: attribute.IntValue(int(httpReq.Limit))}, + {Key: "offset", Value: attribute.IntValue(int(httpReq.Offset))}, + {Key: "order", Value: attribute.StringValue(string(httpReq.Order))}, + {Key: "with_total", Value: attribute.BoolValue(httpReq.WithTotal)}, + } + if httpReq.Env != nil { + attributes = append(attributes, attribute.KeyValue{Key: "env", Value: attribute.StringValue(*httpReq.Env)}) + } + if httpReq.Source != nil { + attributes = append(attributes, attribute.KeyValue{Key: "source", Value: attribute.StringValue(*httpReq.Source)}) + } + span.SetAttributes(attributes...) + + groups, total, err := a.service.DiffByReleases(ctx, types.DiffByReleasesRequest{ + Service: httpReq.Service, + Releases: httpReq.Releases, + Env: httpReq.Env, + Source: httpReq.Source, + Limit: httpReq.Limit, + Offset: httpReq.Offset, + Order: httpReq.Order.toDomain(), + WithTotal: httpReq.WithTotal, + }) + if err != nil { + httputil.ProcessError(wr, err) + return + } + + wr.WriteJson(diffByReleasesResponse{ + Total: total, + Groups: newDiffGroups(groups), + }) +} + +type diffByReleasesRequest struct { + Service string `json:"service"` + Releases []string `json:"releases"` + Env *string `json:"env,omitempty"` + Source *string `json:"source,omitempty"` + Limit uint32 `json:"limit"` + Offset uint32 `json:"offset"` + Order order `json:"order"` + WithTotal bool `json:"with_total"` +} // @name errorgroups.v1.DiffByReleasesRequest + +type diffByReleasesResponse struct { + Total uint64 `json:"total"` + Groups []diffGroup `json:"groups"` +} // @name errorgroups.v1.DiffByReleasesResponse + +type diffGroup struct { + Hash string `json:"hash" format:"uint64"` + Message string `json:"message"` + FirstSeenAt time.Time `json:"first_seen_at" format:"date-time"` + LastSeenAt time.Time `json:"last_seen_at" format:"date-time"` + Source string `json:"source"` + + ReleaseInfos map[string]diffReleaseInfo `json:"release_infos"` +} // @name errorgroups.v1.DiffGroup + +type diffReleaseInfo struct { + SeenTotal uint64 `json:"seen_total"` +} // @name errorgroups.v1.DiffReleaseInfo + +func newDiffGroups(source []types.DiffGroup) []diffGroup { + groups := make([]diffGroup, 0, len(source)) + + for _, g := range source { + releaseInfos := make(map[string]diffReleaseInfo) + for release, info := range g.ReleaseInfos { + releaseInfos[release] = diffReleaseInfo{ + SeenTotal: info.SeenTotal, + } + } + groups = append(groups, diffGroup{ + Hash: strconv.FormatUint(g.Hash, 10), + Message: g.Message, + FirstSeenAt: g.FirstSeenAt, + LastSeenAt: g.LastSeenAt, + Source: g.Source, + ReleaseInfos: releaseInfos, + }) + } + + return groups +} diff --git a/internal/api/errorgroups/v1/http/get_groups.go b/internal/api/errorgroups/v1/http/get_groups.go index 16ca2d9..fb2c59d 100644 --- a/internal/api/errorgroups/v1/http/get_groups.go +++ b/internal/api/errorgroups/v1/http/get_groups.go @@ -46,6 +46,7 @@ func (a *API) serveGetGroups(w http.ResponseWriter, r *http.Request) { {Key: "limit", Value: attribute.IntValue(int(httpReq.Limit))}, {Key: "offset", Value: attribute.IntValue(int(httpReq.Offset))}, {Key: "order", Value: attribute.StringValue(string(httpReq.Order))}, + {Key: "with_total", Value: attribute.BoolValue(httpReq.WithTotal)}, } if httpReq.Env != nil { attributes = append(attributes, attribute.KeyValue{Key: "env", Value: attribute.StringValue(*httpReq.Env)}) @@ -59,6 +60,10 @@ func (a *API) serveGetGroups(w http.ResponseWriter, r *http.Request) { if httpReq.Source != nil { attributes = append(attributes, attribute.KeyValue{Key: "source", Value: attribute.StringValue(*httpReq.Source)}) } + if httpReq.Filter != nil { + filterRaw, _ := json.Marshal(httpReq.Filter) + attributes = append(attributes, attribute.KeyValue{Key: "filter", Value: attribute.StringValue(string(filterRaw))}) + } span.SetAttributes(attributes...) req := types.GetErrorGroupsRequest{ @@ -72,7 +77,17 @@ func (a *API) serveGetGroups(w http.ResponseWriter, r *http.Request) { Order: httpReq.Order.toDomain(), WithTotal: httpReq.WithTotal, } - groups, total, err := a.service.GetErrorGroups(ctx, req) + + var ( + groups []types.ErrorGroup + total uint64 + ) + if httpReq.Filter != nil && httpReq.Filter.IsNew { + groups, total, err = a.service.GetNewErrorGroups(ctx, req) + } else { + groups, total, err = a.service.GetErrorGroups(ctx, req) + } + if err != nil { httputil.ProcessError(wr, err) return @@ -105,6 +120,10 @@ func (o order) toDomain() types.ErrorGroupsOrder { } } +type groupsFilter struct { + IsNew bool `json:"is_new"` +} // @name errorgroups.v1.GroupsFilter + type getGroupsRequest struct { Service string `json:"service"` Env *string `json:"env,omitempty"` @@ -116,6 +135,8 @@ type getGroupsRequest struct { Offset uint32 `json:"offset"` Order order `json:"order"` WithTotal bool `json:"with_total"` + + Filter *groupsFilter `json:"filter,omitempty"` } // @name errorgroups.v1.GetGroupsRequest type getGroupsResponse struct { diff --git a/internal/api/errorgroups/v1/http/get_services.go b/internal/api/errorgroups/v1/http/get_services.go index 1969982..66a040a 100644 --- a/internal/api/errorgroups/v1/http/get_services.go +++ b/internal/api/errorgroups/v1/http/get_services.go @@ -22,7 +22,7 @@ import ( // @Failure default {object} httputil.Error "An unexpected error response" // @Security bearer func (a *API) serveGetServices(w http.ResponseWriter, r *http.Request) { - ctx, span := tracing.StartSpan(r.Context(), "errorgroups_v1_get_releases") + ctx, span := tracing.StartSpan(r.Context(), "errorgroups_v1_get_services") defer span.End() wr := httputil.NewWriter(w) diff --git a/internal/app/auth/mock/jwt.go b/internal/app/auth/mock/auth.go similarity index 57% rename from internal/app/auth/mock/jwt.go rename to internal/app/auth/mock/auth.go index 10453ae..061a821 100644 --- a/internal/app/auth/mock/jwt.go +++ b/internal/app/auth/mock/auth.go @@ -1,21 +1,61 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: internal/app/auth/jwt.go +// Source: github.com/ozontech/seq-ui/internal/app/auth (interfaces: OIDCProvider,JWTProvider) // // Generated by this command: // -// mockgen -source=internal/app/auth/jwt.go -destination=internal/app/auth/mock/jwt.go +// mockgen -destination=internal/app/auth/mock/auth.go github.com/ozontech/seq-ui/internal/app/auth OIDCProvider,JWTProvider // // Package mock_auth is a generated GoMock package. package mock_auth import ( + context "context" reflect "reflect" auth "github.com/ozontech/seq-ui/internal/app/auth" gomock "go.uber.org/mock/gomock" ) +// MockOIDCProvider is a mock of OIDCProvider interface. +type MockOIDCProvider struct { + ctrl *gomock.Controller + recorder *MockOIDCProviderMockRecorder + isgomock struct{} +} + +// MockOIDCProviderMockRecorder is the mock recorder for MockOIDCProvider. +type MockOIDCProviderMockRecorder struct { + mock *MockOIDCProvider +} + +// NewMockOIDCProvider creates a new mock instance. +func NewMockOIDCProvider(ctrl *gomock.Controller) *MockOIDCProvider { + mock := &MockOIDCProvider{ctrl: ctrl} + mock.recorder = &MockOIDCProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockOIDCProvider) EXPECT() *MockOIDCProviderMockRecorder { + return m.recorder +} + +// Verify mocks base method. +func (m *MockOIDCProvider) Verify(ctx context.Context, token string) (auth.OIDCToken, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Verify", ctx, token) + ret0, _ := ret[0].(auth.OIDCToken) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Verify indicates an expected call of Verify. +func (mr *MockOIDCProviderMockRecorder) Verify(ctx, token any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Verify", reflect.TypeOf((*MockOIDCProvider)(nil).Verify), ctx, token) +} + // MockJWTProvider is a mock of JWTProvider interface. type MockJWTProvider struct { ctrl *gomock.Controller diff --git a/internal/app/auth/mock/oidc.go b/internal/app/auth/mock/oidc.go deleted file mode 100644 index 91f69f6..0000000 --- a/internal/app/auth/mock/oidc.go +++ /dev/null @@ -1,57 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: internal/app/auth/oidc.go -// -// Generated by this command: -// -// mockgen -source=internal/app/auth/oidc.go -destination=internal/app/auth/mock/oidc.go -// - -// Package mock_auth is a generated GoMock package. -package mock_auth - -import ( - context "context" - reflect "reflect" - - auth "github.com/ozontech/seq-ui/internal/app/auth" - gomock "go.uber.org/mock/gomock" -) - -// MockOIDCProvider is a mock of OIDCProvider interface. -type MockOIDCProvider struct { - ctrl *gomock.Controller - recorder *MockOIDCProviderMockRecorder - isgomock struct{} -} - -// MockOIDCProviderMockRecorder is the mock recorder for MockOIDCProvider. -type MockOIDCProviderMockRecorder struct { - mock *MockOIDCProvider -} - -// NewMockOIDCProvider creates a new mock instance. -func NewMockOIDCProvider(ctrl *gomock.Controller) *MockOIDCProvider { - mock := &MockOIDCProvider{ctrl: ctrl} - mock.recorder = &MockOIDCProviderMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockOIDCProvider) EXPECT() *MockOIDCProviderMockRecorder { - return m.recorder -} - -// Verify mocks base method. -func (m *MockOIDCProvider) Verify(ctx context.Context, token string) (auth.OIDCToken, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Verify", ctx, token) - ret0, _ := ret[0].(auth.OIDCToken) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Verify indicates an expected call of Verify. -func (mr *MockOIDCProviderMockRecorder) Verify(ctx, token any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Verify", reflect.TypeOf((*MockOIDCProvider)(nil).Verify), ctx, token) -} diff --git a/internal/app/types/errorgroups.go b/internal/app/types/errorgroups.go index 62661a0..959505e 100644 --- a/internal/app/types/errorgroups.go +++ b/internal/app/types/errorgroups.go @@ -89,9 +89,8 @@ type ErrorGroupCounts struct { } type GetErrorGroupReleasesRequest struct { - Service string - GroupHash *uint64 - Env *string + Service string + Env *string } type GetServicesRequest struct { @@ -100,3 +99,27 @@ type GetServicesRequest struct { Limit uint32 Offset uint32 } + +type DiffByReleasesRequest struct { + Service string + Releases []string + Env *string + Source *string + Limit uint32 + Offset uint32 + Order ErrorGroupsOrder + WithTotal bool +} + +type DiffReleaseInfo struct { + SeenTotal uint64 +} + +type DiffGroup struct { + Hash uint64 + Message string + FirstSeenAt time.Time + LastSeenAt time.Time + Source string + ReleaseInfos map[string]DiffReleaseInfo +} diff --git a/internal/pkg/repository_ch/error_groups.go b/internal/pkg/repository_ch/error_groups.go new file mode 100644 index 0000000..22ad1c5 --- /dev/null +++ b/internal/pkg/repository_ch/error_groups.go @@ -0,0 +1,774 @@ +package repositorych + +import ( + "context" + "database/sql" + "errors" + "fmt" + "maps" + "slices" + "time" + + sq "github.com/Masterminds/squirrel" + + "github.com/ozontech/seq-ui/internal/app/types" +) + +func (r *repository) GetErrorGroups( + ctx context.Context, + req types.GetErrorGroupsRequest, +) ([]types.ErrorGroup, error) { + // we need this subquery to make query faster, see https://github.com/ClickHouse/ClickHouse/issues/7187 + subQ := sq. + Select("_group_hash"). + From("error_groups"). + Where(sq.Eq{"service": req.Service}). + GroupBy("_group_hash", "service"). + Limit(uint64(req.Limit)). + Offset(uint64(req.Offset)) + + if r.sharded { + subQ = subQ.Distinct() + } + + for col, val := range r.queryFilters() { + subQ = subQ.Where(sq.Eq{col: val}).GroupBy(col) + } + + if req.Env != nil && *req.Env != "" { + subQ = subQ.Where(sq.Eq{"env": req.Env}).GroupBy("env") + } + if req.Release != nil && *req.Release != "" { + subQ = subQ.Where(sq.Eq{"release": req.Release}).GroupBy("release") + } + if req.Duration != nil && *req.Duration != 0 { + subQ = subQ.Having(sq.GtOrEq{"maxMerge(last_seen_at)": r.nowFn().Add(-req.Duration.Abs())}) + } + if req.Source != nil && *req.Source != "" { + subQ = subQ.Where(sq.Eq{"source": req.Source}).GroupBy("source") + } + subQ = orderBy(subQ, req.Order, true) + + subQuery, subArgs := subQ.MustSql() + + in := "IN" + if r.sharded { + in = "GLOBAL IN" + } + q := sq. + Select( + "_group_hash as group_hash", + "source", + "any(message) as message", + "countMerge(seen_total) as seen_total", + "minMerge(first_seen_at) as first_seen_at", + "maxMerge(last_seen_at) as last_seen_at", + ). + From("error_groups"). + Where(fmt.Sprintf("_group_hash %s (%s)", in, subQuery), subArgs...). + GroupBy("_group_hash", "service", "source") + + // using string formatting below because squirrel doesn't support subquery in WHERE clause + q = q.Where(fmt.Sprintf("service = '%s'", req.Service)) + + for col, val := range r.queryFilters() { + q = q.Where(fmt.Sprintf("%s = '%s'", col, val)).GroupBy(col) + } + + if req.Source != nil && *req.Source != "" { + q = q.Where(fmt.Sprintf("source = '%s'", *req.Source)) + } + if req.Env != nil && *req.Env != "" { + q = q.Where(fmt.Sprintf("env = '%s'", *req.Env)).GroupBy("env") + } + if req.Release != nil && *req.Release != "" { + q = q.Where(fmt.Sprintf("release = '%s'", *req.Release)).GroupBy("release") + } + q = orderBy(q, req.Order, false) + + query, args := q.MustSql() + metricLabels := []string{"error_groups", "SELECT"} + rows, err := r.conn.Query(ctx, metricLabels, query, args...) + if err != nil { + incErrorMetric(err, metricLabels) + return nil, fmt.Errorf("failed to get error groups: %w", err) + } + + var errorGroups []types.ErrorGroup + for rows.Next() { + var group types.ErrorGroup + err = rows.Scan( + &group.Hash, + &group.Source, + &group.Message, + &group.SeenTotal, + &group.FirstSeenAt, + &group.LastSeenAt, + ) + if err != nil { + return nil, fmt.Errorf("failed to scan row: %w", err) + } + + errorGroups = append(errorGroups, group) + } + + return errorGroups, nil +} + +func (r *repository) GetErrorGroupsCount( + ctx context.Context, + req types.GetErrorGroupsRequest, +) (uint64, error) { + subQ := sq. + Select("maxMerge(last_seen_at) AS last_seen_at"). + From("error_groups"). + Where(sq.Eq{"service": req.Service}). + GroupBy("_group_hash", "service") + + for col, val := range r.queryFilters() { + subQ = subQ.Where(sq.Eq{col: val}).GroupBy(col) + } + + if req.Env != nil && *req.Env != "" { + subQ = subQ.Where(sq.Eq{"env": req.Env}).GroupBy("env") + } + if req.Release != nil && *req.Release != "" { + subQ = subQ.Where(sq.Eq{"release": req.Release}).GroupBy("release") + } + if req.Duration != nil && *req.Duration != 0 { + subQ = subQ.Having(sq.GtOrEq{"last_seen_at": r.nowFn().Add(-req.Duration.Abs())}) + } + if req.Source != nil && *req.Source != "" { + subQ = subQ.Where(sq.Eq{"source": req.Source}).GroupBy("source") + } + + q := sq.Select("count()").FromSelect(subQ, "subQ") + + query, args := q.MustSql() + metricLabels := []string{"error_groups", "SELECT"} + row := r.conn.QueryRow(ctx, metricLabels, query, args...) + + var total uint64 + if err := row.Scan(&total); err != nil { + if errors.Is(err, sql.ErrNoRows) { + return 0, nil + } + incErrorMetric(err, metricLabels) + return 0, fmt.Errorf("failed to get error groups count: %w", err) + } + + return total, nil +} + +func (r *repository) GetNewErrorGroups( + ctx context.Context, + req types.GetErrorGroupsRequest, +) ([]types.ErrorGroup, error) { + // we need this subquery to make query faster, see https://github.com/ClickHouse/ClickHouse/issues/7187 + subQ := sq. + Select("_group_hash"). + From("error_groups"). + Where(sq.Eq{"service": req.Service}). + GroupBy("_group_hash", "source"). + Limit(uint64(req.Limit)). + Offset(uint64(req.Offset)) + + if r.sharded { + subQ = subQ.Distinct() + } + for col, val := range r.queryFilters() { + subQ = subQ.Where(sq.Eq{col: val}) + } + if req.Env != nil && *req.Env != "" { + subQ = subQ.Where(sq.Eq{"env": req.Env}) + } + if req.Source != nil && *req.Source != "" { + subQ = subQ.Where(sq.Eq{"source": req.Source}) + } + + if req.Release != nil && *req.Release != "" { // new by releases, ignore duration + subQ = subQ.Having(sq.Eq{ + "count()": 1, + "any(release)": *req.Release, + }) + } else if req.Duration != nil && *req.Duration != 0 { // new by duration + subQ = subQ.Having(sq.GtOrEq{"minMerge(first_seen_at)": r.nowFn().Add(-req.Duration.Abs())}) + } + + subQ = orderBy(subQ, req.Order, true) + + subQuery, subArgs := subQ.MustSql() + + in := "IN" + if r.sharded { + in = "GLOBAL IN" + } + q := sq. + Select( + "_group_hash", + "source", + "any(message) as message", + "countMerge(seen_total) as seen_total", + "minMerge(first_seen_at) as first_seen_at", + "maxMerge(last_seen_at) as last_seen_at", + ). + From("error_groups"). + Where(fmt.Sprintf("_group_hash %s (%s)", in, subQuery), subArgs...). + GroupBy("_group_hash", "source") + + // using string formatting below because squirrel doesn't support subquery in WHERE clause + q = q.Where(fmt.Sprintf("service = '%s'", req.Service)) + + for col, val := range r.queryFilters() { + q = q.Where(fmt.Sprintf("%s = '%s'", col, val)) + } + + if req.Source != nil && *req.Source != "" { + q = q.Where(fmt.Sprintf("source = '%s'", *req.Source)) + } + if req.Env != nil && *req.Env != "" { + q = q.Where(fmt.Sprintf("env = '%s'", *req.Env)) + } + q = orderBy(q, req.Order, false) + + query, args := q.MustSql() + metricLabels := []string{"error_groups", "SELECT"} + rows, err := r.conn.Query(ctx, metricLabels, query, args...) + if err != nil { + incErrorMetric(err, metricLabels) + return nil, fmt.Errorf("failed to get new error groups: %w", err) + } + + var errorGroups []types.ErrorGroup + for rows.Next() { + var group types.ErrorGroup + err = rows.Scan( + &group.Hash, + &group.Source, + &group.Message, + &group.SeenTotal, + &group.FirstSeenAt, + &group.LastSeenAt, + ) + if err != nil { + return nil, fmt.Errorf("failed to scan row: %w", err) + } + + errorGroups = append(errorGroups, group) + } + + return errorGroups, nil +} + +func (r *repository) GetNewErrorGroupsCount( + ctx context.Context, + req types.GetErrorGroupsRequest, +) (uint64, error) { + subQ := sq. + Select("_group_hash"). + From("error_groups"). + Where(sq.Eq{"service": req.Service}). + GroupBy("_group_hash", "source") + + for col, val := range r.queryFilters() { + subQ = subQ.Where(sq.Eq{col: val}) + } + if req.Env != nil && *req.Env != "" { + subQ = subQ.Where(sq.Eq{"env": req.Env}) + } + if req.Source != nil && *req.Source != "" { + subQ = subQ.Where(sq.Eq{"source": req.Source}) + } + + if req.Release != nil && *req.Release != "" { // new by releases, ignore duration + subQ = subQ.Having(sq.Eq{ + "count()": 1, + "any(release)": *req.Release, + }) + } else if req.Duration != nil && *req.Duration != 0 { // new by duration + subQ = subQ.Having(sq.GtOrEq{"minMerge(first_seen_at)": r.nowFn().Add(-req.Duration.Abs())}) + } + + q := sq.Select("count()").FromSelect(subQ, "subQ") + + query, args := q.MustSql() + metricLabels := []string{"error_groups", "SELECT"} + row := r.conn.QueryRow(ctx, metricLabels, query, args...) + + var total uint64 + if err := row.Scan(&total); err != nil { + if errors.Is(err, sql.ErrNoRows) { + return 0, nil + } + incErrorMetric(err, metricLabels) + return 0, fmt.Errorf("failed to get new error groups count: %w", err) + } + + return total, nil +} + +func (r *repository) GetErrorHist( + ctx context.Context, + req types.GetErrorHistRequest, +) ([]types.ErrorHistBucket, error) { + startDate := getHistBucketSize(req.Duration) + + q := sq. + Select( + startDate, + "countMerge(counts) as counts", + ). + From("agg_events_10min"). + Where(sq.Eq{"service": req.Service}). + GroupBy(startDate, "service"). + OrderBy(startDate) + + for col, val := range r.queryFilters() { + q = q.Where(sq.Eq{col: val}).GroupBy(col) + } + + if req.GroupHash != nil && *req.GroupHash != 0 { + q = q.Where(sq.Eq{"_group_hash": req.GroupHash}).GroupBy("_group_hash") + } + if req.Env != nil && *req.Env != "" { + q = q.Where(sq.Eq{"env": req.Env}).GroupBy("env") + } + if req.Release != nil && *req.Release != "" { + q = q.Where(sq.Eq{"release": req.Release}).GroupBy("release") + } + if req.Duration != nil && *req.Duration != 0 { + q = q.Where(sq.GtOrEq{startDate: r.nowFn().Add(-req.Duration.Abs())}) + } + if req.Source != nil && *req.Source != "" { + q = q.Where(sq.Eq{"source": req.Source}).GroupBy("source") + } + + query, args := q.MustSql() + metricLabels := []string{"agg_events_10min", "SELECT"} + rows, err := r.conn.Query(ctx, metricLabels, query, args...) + if err != nil { + incErrorMetric(err, metricLabels) + return nil, fmt.Errorf("failed to get error hist: %w", err) + } + + var buckets []types.ErrorHistBucket + for rows.Next() { + var bucket types.ErrorHistBucket + if err := rows.Scan(&bucket.Time, &bucket.Count); err != nil { + return nil, fmt.Errorf("failed to scan row: %w", err) + } + buckets = append(buckets, bucket) + } + + return buckets, nil +} + +func (r *repository) GetErrorDetails( + ctx context.Context, + req types.GetErrorGroupDetailsRequest, +) (types.ErrorGroupDetails, error) { + q := sq. + Select( + "_group_hash as group_hash", + "source", + "any(message) as message", + "countMerge(seen_total) as seen_total", + "minMerge(first_seen_at) as first_seen_at", + "maxMerge(last_seen_at) as last_seen_at", + "max(log_tags) as log_tags", + ). + From("error_groups"). + Where(sq.Eq{ + "service": req.Service, + "_group_hash": req.GroupHash, + }). + GroupBy("_group_hash", "service", "source") + + for col, val := range r.queryFilters() { + q = q.Where(sq.Eq{col: val}).GroupBy(col) + } + + if req.Env != nil && *req.Env != "" { + q = q.Where(sq.Eq{"env": req.Env}).GroupBy("env") + } + if req.Release != nil && *req.Release != "" { + q = q.Where(sq.Eq{"release": req.Release}).GroupBy("release") + } + if req.Source != nil && *req.Source != "" { + q = q.Where(sq.Eq{"source": req.Source}) + } + + var details types.ErrorGroupDetails + + query, args := q.MustSql() + metricLabels := []string{"error_groups", "SELECT"} + row := r.conn.QueryRow(ctx, metricLabels, query, args...) + err := row.Scan( + &details.GroupHash, + &details.Source, + &details.Message, + &details.SeenTotal, + &details.FirstSeenAt, + &details.LastSeenAt, + &details.LogTags, + ) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + incErrorMetric(err, metricLabels) + return details, fmt.Errorf("failed to get error details: %w", err) + } + + return details, nil +} + +func (r *repository) GetErrorCounts( + ctx context.Context, + req types.GetErrorGroupDetailsRequest, +) (types.ErrorGroupCounts, error) { + counts := types.ErrorGroupCounts{ + ByEnv: types.ErrorGroupCount{}, + ByRelease: types.ErrorGroupCount{}, + } + + q := sq. + Select("countMerge(seen_total) as seen_total", "env", "release"). + From("error_groups"). + Where(sq.Eq{ + "service": req.Service, + "_group_hash": req.GroupHash, + }). + GroupBy("_group_hash", "service", "env", "release"). + OrderBy("seen_total DESC") + + for col, val := range r.queryFilters() { + q = q.Where(sq.Eq{col: val}).GroupBy(col) + } + + if req.Env != nil && *req.Env != "" { + q = q.Where(sq.Eq{"env": *req.Env}) + } + if req.Release != nil && *req.Release != "" { + q = q.Where(sq.Eq{"release": *req.Release}) + } + if req.Source != nil && *req.Source != "" { + q = q.Where(sq.Eq{"source": *req.Source}) + } + + query, args := q.MustSql() + metricLabels := []string{"error_groups", "SELECT"} + rows, err := r.conn.Query(ctx, metricLabels, query, args...) + if err != nil { + incErrorMetric(err, metricLabels) + return counts, fmt.Errorf("failed to get error counts: %w", err) + } + + for rows.Next() { + var ( + seen uint64 + env, release string + ) + if err := rows.Scan(&seen, &env, &release); err != nil { + return counts, fmt.Errorf("failed to scan row: %w", err) + } + counts.ByEnv[env] += seen + counts.ByRelease[release] += seen + } + + return counts, nil +} + +func (r *repository) GetErrorReleases( + ctx context.Context, + req types.GetErrorGroupReleasesRequest, +) ([]string, error) { + q := sq. + Select("release").Distinct(). + From("services"). + Where(sq.And{ + sq.Eq{"service": req.Service}, + sq.NotEq{"release": ""}, + }). + OrderBy("ttl DESC") + + for col, val := range r.queryFilters() { + q = q.Where(sq.Eq{col: val}) + } + + if req.Env != nil && *req.Env != "" { + q = q.Where(sq.Eq{"env": req.Env}) + } + + query, args := q.MustSql() + metricLabels := []string{"services", "SELECT"} + rows, err := r.conn.Query(ctx, metricLabels, query, args...) + if err != nil { + incErrorMetric(err, metricLabels) + return nil, fmt.Errorf("failed to get releases: %w", err) + } + + releases := make([]string, 0) + for rows.Next() { + var release string + if err := rows.Scan(&release); err != nil { + return nil, fmt.Errorf("failed to scan row: %w", err) + } + if release == "" { + continue + } + releases = append(releases, release) + } + + return releases, nil +} + +func (r *repository) GetServices( + ctx context.Context, + req types.GetServicesRequest, +) ([]string, error) { + q := sq. + Select("service").Distinct(). + From("services"). + Where("startsWith(service, ?)", req.Query). + Where(sq.NotEq{"service": ""}). + OrderBy("service") + + for col, val := range r.queryFilters() { + q = q.Where(sq.Eq{col: val}) + } + + if req.Env != nil && *req.Env != "" { + q = q.Where(sq.Eq{"env": req.Env}) + } + + if req.Limit > 0 { + q = q.Limit(uint64(req.Limit)) + } + if req.Offset > 0 { + q = q.Offset(uint64(req.Offset)) + } + + query, args := q.MustSql() + metricLabels := []string{"services", "SELECT"} + rows, err := r.conn.Query(ctx, metricLabels, query, args...) + if err != nil { + incErrorMetric(err, metricLabels) + return nil, fmt.Errorf("failed to get services: %w", err) + } + + services := make([]string, 0) + for rows.Next() { + var service string + if err := rows.Scan(&service); err != nil { + return nil, fmt.Errorf("failed to scan row: %w", err) + } + services = append(services, service) + } + + return services, nil +} + +func (r *repository) DiffByReleases( + ctx context.Context, + req types.DiffByReleasesRequest, +) ([]types.DiffGroup, error) { + where := sq.Eq{ + "service": req.Service, + "release": req.Releases, + } + for col, val := range r.queryFilters() { + where[col] = val + } + if req.Env != nil && *req.Env != "" { + where["env"] = *req.Env + } + if req.Source != nil && *req.Source != "" { + where["source"] = *req.Source + } + + groupsQ := sq. + Select( + "_group_hash", + "source", + "any(message) as message", + "minMerge(first_seen_at) as first_seen_at", + "maxMerge(last_seen_at) as last_seen_at", + ). + From("error_groups"). + Where(where). + GroupBy("_group_hash", "source"). + Limit(uint64(req.Limit)). + Offset(uint64(req.Offset)) + + switch req.Order { + case types.OrderFrequent: + groupsQ = groupsQ.OrderBy("countMerge(seen_total) DESC") + case types.OrderLatest: + groupsQ = groupsQ.OrderBy("last_seen_at DESC") + case types.OrderOldest: + groupsQ = groupsQ.OrderBy("first_seen_at") + } + + groupsQuery, args := groupsQ.MustSql() + metricLabels := []string{"error_groups", "SELECT"} + rows, err := r.conn.Query(ctx, metricLabels, groupsQuery, args...) + if err != nil { + incErrorMetric(err, metricLabels) + return nil, fmt.Errorf("failed to get error groups: %w", err) + } + + var ( + diffGroups []types.DiffGroup + idxByHash = map[uint64]int{} + ) + for rows.Next() { + group := types.DiffGroup{ + ReleaseInfos: make(map[string]types.DiffReleaseInfo), + } + for _, r := range req.Releases { + group.ReleaseInfos[r] = types.DiffReleaseInfo{} + } + + err = rows.Scan( + &group.Hash, + &group.Source, + &group.Message, + &group.FirstSeenAt, + &group.LastSeenAt, + ) + if err != nil { + return nil, fmt.Errorf("failed to scan row: %w", err) + } + + diffGroups = append(diffGroups, group) + idxByHash[group.Hash] = len(diffGroups) - 1 + } + + if len(diffGroups) == 0 { + return nil, nil + } + + hashes := slices.Collect(maps.Keys(idxByHash)) + slices.Sort(hashes) + where["_group_hash"] = hashes + + q := sq. + Select( + "_group_hash", + "release", + "countMerge(seen_total) as seen_total", + ). + From("error_groups"). + Where(where). + GroupBy("_group_hash", "release") + + query, args := q.MustSql() + rows, err = r.conn.Query(ctx, metricLabels, query, args...) + if err != nil { + incErrorMetric(err, metricLabels) + return nil, fmt.Errorf("failed to get error groups by release: %w", err) + } + + for rows.Next() { + var ( + hash, seenTotal uint64 + release string + ) + if err := rows.Scan(&hash, &release, &seenTotal); err != nil { + return nil, fmt.Errorf("failed to scan row: %w", err) + } + + if idx, ok := idxByHash[hash]; ok { + diffGroups[idx].ReleaseInfos[release] = types.DiffReleaseInfo{ + SeenTotal: seenTotal, + } + } + } + + return diffGroups, nil +} + +func (r *repository) DiffByReleasesTotal( + ctx context.Context, + req types.DiffByReleasesRequest, +) (uint64, error) { + subQ := sq. + Select("_group_hash"). + From("error_groups"). + Where(sq.Eq{ + "service": req.Service, + "release": req.Releases, + }). + GroupBy("_group_hash") + + for col, val := range r.queryFilters() { + subQ = subQ.Where(sq.Eq{col: val}) + } + if req.Env != nil && *req.Env != "" { + subQ = subQ.Where(sq.Eq{"env": *req.Env}) + } + if req.Source != nil && *req.Source != "" { + subQ = subQ.Where(sq.Eq{"source": *req.Source}) + } + + q := sq.Select("count()").FromSelect(subQ, "subQ") + + query, args := q.MustSql() + metricLabels := []string{"error_groups", "SELECT"} + row := r.conn.QueryRow(ctx, metricLabels, query, args...) + + var total uint64 + if err := row.Scan(&total); err != nil { + if errors.Is(err, sql.ErrNoRows) { + return 0, nil + } + incErrorMetric(err, metricLabels) + return 0, fmt.Errorf("failed to get error groups count: %w", err) + } + + return total, nil +} + +func orderBy(q sq.SelectBuilder, o types.ErrorGroupsOrder, sub bool) sq.SelectBuilder { + seenTotal := "seen_total DESC" + lastSeenAt := "last_seen_at DESC" + firstSeenAt := "first_seen_at" + if sub { + seenTotal = "countMerge(seen_total) DESC" + lastSeenAt = "maxMerge(last_seen_at) DESC" + firstSeenAt = "minMerge(first_seen_at)" + } + + switch o { + case types.OrderFrequent: + q = q.OrderBy(seenTotal) + case types.OrderLatest: + q = q.OrderBy(lastSeenAt) + case types.OrderOldest: + q = q.OrderBy(firstSeenAt) + } + return q +} + +func getHistBucketSize(d *time.Duration) string { + const ( + startDate = "start_date" + startOfHour = "toStartOfHour(start_date)" + startOfDay = "toStartOfDay(start_date)" + day = 24 * time.Hour + ) + + if d == nil { + return startOfDay + } + + duration := *d + switch { + case duration < 7*time.Hour: + return startDate + case duration < 7*day: + return startOfHour + case duration >= 7*day: + return startOfDay + default: + return startOfDay + } +} diff --git a/internal/pkg/repository_ch/error_groups_test.go b/internal/pkg/repository_ch/error_groups_test.go new file mode 100644 index 0000000..e759a0a --- /dev/null +++ b/internal/pkg/repository_ch/error_groups_test.go @@ -0,0 +1,882 @@ +package repositorych + +import ( + "context" + "database/sql" + "errors" + "fmt" + "testing" + "time" + + "github.com/ozontech/seq-ui/internal/app/types" + mock "github.com/ozontech/seq-ui/internal/pkg/repository_ch/mock" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func fakeNow(now time.Time) func() time.Time { + return func() time.Time { + return now + } +} + +func TestGetNewErrorGroups(t *testing.T) { + var ( + service = "test-svc" + release = "test-release" + env = "test-env" + source = "test-source" + + fakeNow = fakeNow(time.Now()) + duration = time.Hour * 24 + timeDiff = fakeNow().Add(-duration.Abs()) + + someErr = errors.New("some err") + ) + + type mockRows struct { + count int + scanErr error + } + + type mockConn struct { + query string + args []any + + rows *mockRows + err error + } + + tests := []struct { + name string + + req types.GetErrorGroupsRequest + wantGroups int + wantErr bool + + isSharded bool + queryFilter map[string]string + mockConn *mockConn + }{ + { + name: "ok_by_releases", + + req: types.GetErrorGroupsRequest{ + Service: service, + Release: &release, + Duration: &duration, + Limit: 20, + Offset: 5, + Order: types.OrderFrequent, + }, + wantGroups: 2, + + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT _group_hash, source, any(message) as message, countMerge(seen_total) as seen_total, minMerge(first_seen_at) as first_seen_at, maxMerge(last_seen_at) as last_seen_at"+ + " FROM error_groups"+ + " WHERE _group_hash IN (%s) AND service = 'test-svc'"+ + " GROUP BY _group_hash, source"+ + " ORDER BY seen_total DESC", + + "SELECT _group_hash"+ + " FROM error_groups"+ + " WHERE service = ?"+ + " GROUP BY _group_hash, source"+ + " HAVING any(release) = ? AND count() = ?"+ + " ORDER BY countMerge(seen_total) DESC"+ + " LIMIT 20 OFFSET 5", + ), + args: []any{service, release, 1}, + + rows: &mockRows{ + count: 2, + }, + }, + }, + { + name: "ok_by_duration", + + req: types.GetErrorGroupsRequest{ + Service: service, + Duration: &duration, + Limit: 10, + Order: types.OrderLatest, + }, + wantGroups: 2, + + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT _group_hash, source, any(message) as message, countMerge(seen_total) as seen_total, minMerge(first_seen_at) as first_seen_at, maxMerge(last_seen_at) as last_seen_at"+ + " FROM error_groups"+ + " WHERE _group_hash IN (%s) AND service = 'test-svc'"+ + " GROUP BY _group_hash, source"+ + " ORDER BY last_seen_at DESC", + + "SELECT _group_hash"+ + " FROM error_groups"+ + " WHERE service = ?"+ + " GROUP BY _group_hash, source"+ + " HAVING minMerge(first_seen_at) >= ?"+ + " ORDER BY maxMerge(last_seen_at) DESC"+ + " LIMIT 10 OFFSET 0", + ), + args: []any{service, timeDiff}, + + rows: &mockRows{ + count: 2, + }, + }, + }, + { + name: "ok_full_filters", + + req: types.GetErrorGroupsRequest{ + Service: service, + Env: &env, + Source: &source, + Duration: &duration, + Limit: 10, + Offset: 20, + Order: types.OrderOldest, + }, + wantGroups: 2, + + queryFilter: map[string]string{ + "filter1": "value1", + "filter2": "value2", + }, + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT _group_hash, source, any(message) as message, countMerge(seen_total) as seen_total, minMerge(first_seen_at) as first_seen_at, maxMerge(last_seen_at) as last_seen_at"+ + " FROM error_groups"+ + " WHERE _group_hash IN (%s) AND service = 'test-svc' AND filter1 = 'value1' AND filter2 = 'value2' AND source = 'test-source' AND env = 'test-env'"+ + " GROUP BY _group_hash, source"+ + " ORDER BY first_seen_at", + + "SELECT _group_hash"+ + " FROM error_groups"+ + " WHERE service = ? AND filter1 = ? AND filter2 = ? AND env = ? AND source = ?"+ + " GROUP BY _group_hash, source"+ + " HAVING minMerge(first_seen_at) >= ?"+ + " ORDER BY minMerge(first_seen_at)"+ + " LIMIT 10 OFFSET 20", + ), + args: []any{service, "value1", "value2", env, source, timeDiff}, + + rows: &mockRows{ + count: 2, + }, + }, + }, + { + name: "ok_sharded", + + req: types.GetErrorGroupsRequest{ + Service: service, + Duration: &duration, + Limit: 10, + Offset: 20, + Order: types.OrderFrequent, + }, + wantGroups: 2, + + isSharded: true, + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT _group_hash, source, any(message) as message, countMerge(seen_total) as seen_total, minMerge(first_seen_at) as first_seen_at, maxMerge(last_seen_at) as last_seen_at"+ + " FROM error_groups"+ + " WHERE _group_hash GLOBAL IN (%s) AND service = 'test-svc'"+ + " GROUP BY _group_hash, source"+ + " ORDER BY seen_total DESC", + + "SELECT DISTINCT _group_hash"+ + " FROM error_groups"+ + " WHERE service = ?"+ + " GROUP BY _group_hash, source"+ + " HAVING minMerge(first_seen_at) >= ?"+ + " ORDER BY countMerge(seen_total) DESC"+ + " LIMIT 10 OFFSET 20", + ), + args: []any{service, timeDiff}, + + rows: &mockRows{ + count: 2, + }, + }, + }, + { + name: "ok_no_rows", + + req: types.GetErrorGroupsRequest{}, + wantGroups: 0, + + mockConn: &mockConn{ + rows: &mockRows{ + count: 0, + }, + }, + }, + { + name: "err_query", + + req: types.GetErrorGroupsRequest{}, + wantErr: true, + + mockConn: &mockConn{ + err: someErr, + }, + }, + { + name: "err_scan", + + req: types.GetErrorGroupsRequest{}, + wantErr: true, + + mockConn: &mockConn{ + rows: &mockRows{ + scanErr: someErr, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + mockedConn := mock.NewMockConn(ctrl) + + repo := newRepo(mockedConn, tt.isSharded, tt.queryFilter, fakeNow) + + if tt.mockConn != nil { + mockedRows := mock.NewMockRows(ctrl) + if rows := tt.mockConn.rows; rows != nil { + times := rows.count + if rows.scanErr != nil { + times = 1 + } + + mockedRows.EXPECT().Next().Return(true).Times(times) + mockedRows.EXPECT().Scan(gomock.Any()).Return(rows.scanErr).Times(times) + if rows.scanErr == nil { + mockedRows.EXPECT().Next().Return(false).Times(1) + } + } + + if tt.mockConn.query == "" { + mockedConn.EXPECT().Query(gomock.Any(), gomock.Any(), gomock.Any()).Return(mockedRows, tt.mockConn.err).Times(1) + } else { + mockedConn.EXPECT().Query(gomock.Any(), tt.mockConn.query, tt.mockConn.args...).Return(mockedRows, tt.mockConn.err).Times(1) + } + } + + got, err := repo.GetNewErrorGroups(context.Background(), tt.req) + require.Equal(t, tt.wantErr, err != nil) + require.Equal(t, tt.wantGroups, len(got)) + }) + } +} + +func TestGetNewErrorGroupsCount(t *testing.T) { + var ( + service = "test-svc" + release = "test-release" + env = "test-env" + source = "test-source" + + fakeNow = fakeNow(time.Now()) + duration = time.Hour * 24 + timeDiff = fakeNow().Add(-duration.Abs()) + + someErr = errors.New("some err") + ) + + type mockConn struct { + query string + args []any + + scanErr error + } + + tests := []struct { + name string + + req types.GetErrorGroupsRequest + wantErr bool + + queryFilter map[string]string + mockConn *mockConn + }{ + { + name: "ok_by_releases", + + req: types.GetErrorGroupsRequest{ + Service: service, + Release: &release, + Duration: &duration, + }, + + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT count() FROM (%s) AS subQ", + + "SELECT _group_hash"+ + " FROM error_groups"+ + " WHERE service = ?"+ + " GROUP BY _group_hash, source"+ + " HAVING any(release) = ? AND count() = ?", + ), + args: []any{service, release, 1}, + }, + }, + { + name: "ok_by_duration", + + req: types.GetErrorGroupsRequest{ + Service: service, + Duration: &duration, + }, + + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT count() FROM (%s) AS subQ", + + "SELECT _group_hash"+ + " FROM error_groups"+ + " WHERE service = ?"+ + " GROUP BY _group_hash, source"+ + " HAVING minMerge(first_seen_at) >= ?", + ), + args: []any{service, timeDiff}, + }, + }, + { + name: "ok_full_filters", + + req: types.GetErrorGroupsRequest{ + Service: service, + Duration: &duration, + Env: &env, + Source: &source, + }, + + queryFilter: map[string]string{ + "filter1": "value1", + "filter2": "value2", + }, + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT count() FROM (%s) AS subQ", + + "SELECT _group_hash"+ + " FROM error_groups"+ + " WHERE service = ? AND filter1 = ? AND filter2 = ? AND env = ? AND source = ?"+ + " GROUP BY _group_hash, source"+ + " HAVING minMerge(first_seen_at) >= ?", + ), + args: []any{service, "value1", "value2", env, source, timeDiff}, + }, + }, + { + name: "ok_no_rows", + + req: types.GetErrorGroupsRequest{}, + + mockConn: &mockConn{ + scanErr: sql.ErrNoRows, + }, + }, + { + name: "err_scan", + + req: types.GetErrorGroupsRequest{}, + wantErr: true, + + mockConn: &mockConn{ + scanErr: someErr, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + mockedConn := mock.NewMockConn(ctrl) + + repo := newRepo(mockedConn, true, tt.queryFilter, fakeNow) + + if tt.mockConn != nil { + mockedRow := mock.NewMockRow(ctrl) + mockedRow.EXPECT().Scan(gomock.Any()).Return(tt.mockConn.scanErr) + + if tt.mockConn.query == "" { + mockedConn.EXPECT().QueryRow(gomock.Any(), gomock.Any(), gomock.Any()).Return(mockedRow).Times(1) + } else { + mockedConn.EXPECT().QueryRow(gomock.Any(), tt.mockConn.query, tt.mockConn.args...).Return(mockedRow).Times(1) + } + } + + got, err := repo.GetNewErrorGroupsCount(context.Background(), tt.req) + require.Equal(t, tt.wantErr, err != nil) + if tt.wantErr { + require.Equal(t, uint64(0), got) + } + }) + } +} + +func TestDiffByReleases(t *testing.T) { + var ( + service = "test-svc" + releases = []string{"test-release1", "test-release2"} + env = "test-env" + source = "test-source" + + someErr = errors.New("some err") + ) + + type mockRows struct { + scanFns []func(...any) error + scanErr bool + } + + type mockConn struct { + query string + args []any + + rows *mockRows + err error + } + + tests := []struct { + name string + + req types.DiffByReleasesRequest + wantGroups []types.DiffGroup + wantErr bool + + queryFilter map[string]string + mockConnGroups, mockConn *mockConn + }{ + { + name: "ok", + + req: types.DiffByReleasesRequest{ + Service: service, + Releases: releases, + Limit: 20, + Order: types.OrderFrequent, + }, + wantGroups: []types.DiffGroup{ + { + Hash: 123, + ReleaseInfos: map[string]types.DiffReleaseInfo{ + releases[0]: {SeenTotal: 10}, + releases[1]: {SeenTotal: 20}, + }, + }, + { + Hash: 456, + ReleaseInfos: map[string]types.DiffReleaseInfo{ + releases[0]: {SeenTotal: 0}, + releases[1]: {SeenTotal: 1000}, + }, + }, + }, + + mockConnGroups: &mockConn{ + query: "" + + "SELECT _group_hash, source, any(message) as message, minMerge(first_seen_at) as first_seen_at, maxMerge(last_seen_at) as last_seen_at" + + " FROM error_groups" + + " WHERE release IN (?,?) AND service = ?" + + " GROUP BY _group_hash, source" + + " ORDER BY countMerge(seen_total) DESC" + + " LIMIT 20 OFFSET 0", + args: []any{releases[0], releases[1], service}, + + rows: &mockRows{ + scanFns: []func(...any) error{ + func(args ...any) error { + *args[0].(*uint64) = 123 + return nil + }, + func(args ...any) error { + *args[0].(*uint64) = 456 + return nil + }, + }, + }, + }, + mockConn: &mockConn{ + query: "" + + "SELECT _group_hash, release, countMerge(seen_total) as seen_total" + + " FROM error_groups" + + " WHERE _group_hash IN (?,?) AND release IN (?,?) AND service = ?" + + " GROUP BY _group_hash, release", + args: []any{uint64(123), uint64(456), releases[0], releases[1], service}, + + rows: &mockRows{ + scanFns: []func(...any) error{ + func(args ...any) error { + *args[0].(*uint64) = 123 + *args[1].(*string) = releases[0] + *args[2].(*uint64) = 10 + return nil + }, + func(args ...any) error { + *args[0].(*uint64) = 123 + *args[1].(*string) = releases[1] + *args[2].(*uint64) = 20 + return nil + }, + func(args ...any) error { + *args[0].(*uint64) = 456 + *args[1].(*string) = releases[1] + *args[2].(*uint64) = 1000 + return nil + }, + }, + }, + }, + }, + { + name: "ok_full_filters", + + req: types.DiffByReleasesRequest{ + Service: service, + Releases: releases, + Env: &env, + Source: &source, + Limit: 20, + Offset: 5, + Order: types.OrderLatest, + }, + wantGroups: []types.DiffGroup{ + { + Hash: 123, + ReleaseInfos: map[string]types.DiffReleaseInfo{ + releases[0]: {SeenTotal: 10}, + releases[1]: {SeenTotal: 20}, + }, + }, + { + Hash: 456, + ReleaseInfos: map[string]types.DiffReleaseInfo{ + releases[0]: {SeenTotal: 0}, + releases[1]: {SeenTotal: 1000}, + }, + }, + }, + + queryFilter: map[string]string{ + "filter1": "value1", + "filter2": "value2", + }, + mockConnGroups: &mockConn{ + query: "" + + "SELECT _group_hash, source, any(message) as message, minMerge(first_seen_at) as first_seen_at, maxMerge(last_seen_at) as last_seen_at" + + " FROM error_groups" + + " WHERE env = ? AND filter1 = ? AND filter2 = ? AND release IN (?,?) AND service = ? AND source = ?" + + " GROUP BY _group_hash, source" + + " ORDER BY last_seen_at DESC" + + " LIMIT 20 OFFSET 5", + args: []any{env, "value1", "value2", releases[0], releases[1], service, source}, + + rows: &mockRows{ + scanFns: []func(...any) error{ + func(args ...any) error { + *args[0].(*uint64) = 123 + return nil + }, + func(args ...any) error { + *args[0].(*uint64) = 456 + return nil + }, + }, + }, + }, + mockConn: &mockConn{ + query: "" + + "SELECT _group_hash, release, countMerge(seen_total) as seen_total" + + " FROM error_groups" + + " WHERE _group_hash IN (?,?) AND env = ? AND filter1 = ? AND filter2 = ? AND release IN (?,?) AND service = ? AND source = ?" + + " GROUP BY _group_hash, release", + args: []any{uint64(123), uint64(456), env, "value1", "value2", releases[0], releases[1], service, source}, + + rows: &mockRows{ + scanFns: []func(...any) error{ + func(args ...any) error { + *args[0].(*uint64) = 123 + *args[1].(*string) = releases[0] + *args[2].(*uint64) = 10 + return nil + }, + func(args ...any) error { + *args[0].(*uint64) = 123 + *args[1].(*string) = releases[1] + *args[2].(*uint64) = 20 + return nil + }, + func(args ...any) error { + *args[0].(*uint64) = 456 + *args[1].(*string) = releases[1] + *args[2].(*uint64) = 1000 + return nil + }, + }, + }, + }, + }, + { + name: "ok_no_rows", + + req: types.DiffByReleasesRequest{}, + wantGroups: nil, + + mockConnGroups: &mockConn{ + rows: &mockRows{}, + }, + }, + { + name: "err_query_groups", + + req: types.DiffByReleasesRequest{}, + wantErr: true, + + mockConnGroups: &mockConn{ + err: someErr, + }, + }, + { + name: "err_scan_groups", + + req: types.DiffByReleasesRequest{}, + wantErr: true, + + mockConnGroups: &mockConn{ + rows: &mockRows{ + scanErr: true, + scanFns: []func(...any) error{ + func(args ...any) error { return someErr }, + }, + }, + }, + }, + { + name: "err_query", + + req: types.DiffByReleasesRequest{}, + wantErr: true, + + mockConnGroups: &mockConn{ + rows: &mockRows{ + scanFns: []func(...any) error{ + func(args ...any) error { + *args[0].(*uint64) = 123 + return nil + }, + }, + }, + }, + mockConn: &mockConn{ + err: someErr, + }, + }, + { + name: "err_scan", + + req: types.DiffByReleasesRequest{}, + wantErr: true, + + mockConnGroups: &mockConn{ + rows: &mockRows{ + scanFns: []func(...any) error{ + func(args ...any) error { + *args[0].(*uint64) = 123 + return nil + }, + }, + }, + }, + mockConn: &mockConn{ + rows: &mockRows{ + scanErr: true, + scanFns: []func(...any) error{ + func(args ...any) error { return someErr }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + mockedConn := mock.NewMockConn(ctrl) + + repo := newRepo(mockedConn, true, tt.queryFilter, time.Now) + + initMockConn := func(mc *mockConn) { + if mc == nil { + return + } + + mockedRows := mock.NewMockRows(ctrl) + if rows := mc.rows; rows != nil { + for _, scanFn := range rows.scanFns { + mockedRows.EXPECT().Next().Return(true) + mockedRows.EXPECT().Scan(gomock.Any()).DoAndReturn(scanFn) + } + if !rows.scanErr { + mockedRows.EXPECT().Next().Return(false) + } + } + + if mc.query == "" { + mockedConn.EXPECT().Query(gomock.Any(), gomock.Any(), gomock.Any()).Return(mockedRows, mc.err).Times(1) + } else { + mockedConn.EXPECT().Query(gomock.Any(), mc.query, mc.args...).Return(mockedRows, mc.err).Times(1) + } + } + + initMockConn(tt.mockConnGroups) + initMockConn(tt.mockConn) + + got, err := repo.DiffByReleases(context.Background(), tt.req) + require.Equal(t, tt.wantErr, err != nil) + if tt.wantErr { + return + } + + require.Equal(t, tt.wantGroups, got) + }) + } +} + +func TestDiffByReleasesTotal(t *testing.T) { + var ( + service = "test-svc" + releases = []string{"test-release1", "test-release2"} + env = "test-env" + source = "test-source" + + someErr = errors.New("some err") + ) + + type mockConn struct { + query string + args []any + + scanErr error + } + + tests := []struct { + name string + + req types.DiffByReleasesRequest + wantErr bool + + queryFilter map[string]string + mockConn *mockConn + }{ + { + name: "ok", + + req: types.DiffByReleasesRequest{ + Service: service, + Releases: releases, + }, + + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT count() FROM (%s) AS subQ", + + "SELECT _group_hash"+ + " FROM error_groups"+ + " WHERE release IN (?,?) AND service = ?"+ + " GROUP BY _group_hash", + ), + args: []any{releases[0], releases[1], service}, + }, + }, + { + name: "ok_full_filters", + + req: types.DiffByReleasesRequest{ + Service: service, + Releases: releases, + Env: &env, + Source: &source, + }, + + queryFilter: map[string]string{ + "filter1": "value1", + "filter2": "value2", + }, + mockConn: &mockConn{ + query: fmt.Sprintf(""+ + "SELECT count() FROM (%s) AS subQ", + + "SELECT _group_hash"+ + " FROM error_groups"+ + " WHERE release IN (?,?) AND service = ? AND filter1 = ? AND filter2 = ? AND env = ? AND source = ?"+ + " GROUP BY _group_hash", + ), + args: []any{releases[0], releases[1], service, "value1", "value2", env, source}, + }, + }, + { + name: "ok_no_rows", + + req: types.DiffByReleasesRequest{}, + + mockConn: &mockConn{ + scanErr: sql.ErrNoRows, + }, + }, + { + name: "err_scan", + + req: types.DiffByReleasesRequest{}, + wantErr: true, + + mockConn: &mockConn{ + scanErr: someErr, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + mockedConn := mock.NewMockConn(ctrl) + + repo := newRepo(mockedConn, true, tt.queryFilter, time.Now) + + if tt.mockConn != nil { + mockedRow := mock.NewMockRow(ctrl) + mockedRow.EXPECT().Scan(gomock.Any()).Return(tt.mockConn.scanErr) + + if tt.mockConn.query == "" { + mockedConn.EXPECT().QueryRow(gomock.Any(), gomock.Any(), gomock.Any()).Return(mockedRow).Times(1) + } else { + mockedConn.EXPECT().QueryRow(gomock.Any(), tt.mockConn.query, tt.mockConn.args...).Return(mockedRow).Times(1) + } + } + + got, err := repo.DiffByReleasesTotal(context.Background(), tt.req) + require.Equal(t, tt.wantErr, err != nil) + if tt.wantErr { + require.Equal(t, uint64(0), got) + } + }) + } +} diff --git a/internal/pkg/repository_ch/mock/ch_driver.go b/internal/pkg/repository_ch/mock/ch_driver.go new file mode 100644 index 0000000..798470b --- /dev/null +++ b/internal/pkg/repository_ch/mock/ch_driver.go @@ -0,0 +1,443 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ClickHouse/clickhouse-go/v2/lib/driver (interfaces: Conn,Rows,Row) +// +// Generated by this command: +// +// mockgen -destination=internal/pkg/repository_ch/mock/ch_driver.go -package=mock_repositorych github.com/ClickHouse/clickhouse-go/v2/lib/driver Conn,Rows,Row +// + +// Package mock_repositorych is a generated GoMock package. +package mock_repositorych + +import ( + context "context" + reflect "reflect" + + driver "github.com/ClickHouse/clickhouse-go/v2/lib/driver" + gomock "go.uber.org/mock/gomock" +) + +// MockConn is a mock of Conn interface. +type MockConn struct { + ctrl *gomock.Controller + recorder *MockConnMockRecorder + isgomock struct{} +} + +// MockConnMockRecorder is the mock recorder for MockConn. +type MockConnMockRecorder struct { + mock *MockConn +} + +// NewMockConn creates a new mock instance. +func NewMockConn(ctrl *gomock.Controller) *MockConn { + mock := &MockConn{ctrl: ctrl} + mock.recorder = &MockConnMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockConn) EXPECT() *MockConnMockRecorder { + return m.recorder +} + +// AsyncInsert mocks base method. +func (m *MockConn) AsyncInsert(ctx context.Context, query string, wait bool, args ...any) error { + m.ctrl.T.Helper() + varargs := []any{ctx, query, wait} + for _, a := range args { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "AsyncInsert", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// AsyncInsert indicates an expected call of AsyncInsert. +func (mr *MockConnMockRecorder) AsyncInsert(ctx, query, wait any, args ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, query, wait}, args...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AsyncInsert", reflect.TypeOf((*MockConn)(nil).AsyncInsert), varargs...) +} + +// Close mocks base method. +func (m *MockConn) Close() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Close") + ret0, _ := ret[0].(error) + return ret0 +} + +// Close indicates an expected call of Close. +func (mr *MockConnMockRecorder) Close() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockConn)(nil).Close)) +} + +// Contributors mocks base method. +func (m *MockConn) Contributors() []string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Contributors") + ret0, _ := ret[0].([]string) + return ret0 +} + +// Contributors indicates an expected call of Contributors. +func (mr *MockConnMockRecorder) Contributors() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Contributors", reflect.TypeOf((*MockConn)(nil).Contributors)) +} + +// Exec mocks base method. +func (m *MockConn) Exec(ctx context.Context, query string, args ...any) error { + m.ctrl.T.Helper() + varargs := []any{ctx, query} + for _, a := range args { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Exec", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// Exec indicates an expected call of Exec. +func (mr *MockConnMockRecorder) Exec(ctx, query any, args ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, query}, args...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Exec", reflect.TypeOf((*MockConn)(nil).Exec), varargs...) +} + +// Ping mocks base method. +func (m *MockConn) Ping(arg0 context.Context) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Ping", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Ping indicates an expected call of Ping. +func (mr *MockConnMockRecorder) Ping(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Ping", reflect.TypeOf((*MockConn)(nil).Ping), arg0) +} + +// PrepareBatch mocks base method. +func (m *MockConn) PrepareBatch(ctx context.Context, query string, opts ...driver.PrepareBatchOption) (driver.Batch, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, query} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PrepareBatch", varargs...) + ret0, _ := ret[0].(driver.Batch) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PrepareBatch indicates an expected call of PrepareBatch. +func (mr *MockConnMockRecorder) PrepareBatch(ctx, query any, opts ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, query}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PrepareBatch", reflect.TypeOf((*MockConn)(nil).PrepareBatch), varargs...) +} + +// Query mocks base method. +func (m *MockConn) Query(ctx context.Context, query string, args ...any) (driver.Rows, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, query} + for _, a := range args { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Query", varargs...) + ret0, _ := ret[0].(driver.Rows) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Query indicates an expected call of Query. +func (mr *MockConnMockRecorder) Query(ctx, query any, args ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, query}, args...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Query", reflect.TypeOf((*MockConn)(nil).Query), varargs...) +} + +// QueryRow mocks base method. +func (m *MockConn) QueryRow(ctx context.Context, query string, args ...any) driver.Row { + m.ctrl.T.Helper() + varargs := []any{ctx, query} + for _, a := range args { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "QueryRow", varargs...) + ret0, _ := ret[0].(driver.Row) + return ret0 +} + +// QueryRow indicates an expected call of QueryRow. +func (mr *MockConnMockRecorder) QueryRow(ctx, query any, args ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, query}, args...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "QueryRow", reflect.TypeOf((*MockConn)(nil).QueryRow), varargs...) +} + +// Select mocks base method. +func (m *MockConn) Select(ctx context.Context, dest any, query string, args ...any) error { + m.ctrl.T.Helper() + varargs := []any{ctx, dest, query} + for _, a := range args { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Select", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// Select indicates an expected call of Select. +func (mr *MockConnMockRecorder) Select(ctx, dest, query any, args ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, dest, query}, args...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Select", reflect.TypeOf((*MockConn)(nil).Select), varargs...) +} + +// ServerVersion mocks base method. +func (m *MockConn) ServerVersion() (*driver.ServerVersion, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ServerVersion") + ret0, _ := ret[0].(*driver.ServerVersion) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ServerVersion indicates an expected call of ServerVersion. +func (mr *MockConnMockRecorder) ServerVersion() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ServerVersion", reflect.TypeOf((*MockConn)(nil).ServerVersion)) +} + +// Stats mocks base method. +func (m *MockConn) Stats() driver.Stats { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Stats") + ret0, _ := ret[0].(driver.Stats) + return ret0 +} + +// Stats indicates an expected call of Stats. +func (mr *MockConnMockRecorder) Stats() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stats", reflect.TypeOf((*MockConn)(nil).Stats)) +} + +// MockRows is a mock of Rows interface. +type MockRows struct { + ctrl *gomock.Controller + recorder *MockRowsMockRecorder + isgomock struct{} +} + +// MockRowsMockRecorder is the mock recorder for MockRows. +type MockRowsMockRecorder struct { + mock *MockRows +} + +// NewMockRows creates a new mock instance. +func NewMockRows(ctrl *gomock.Controller) *MockRows { + mock := &MockRows{ctrl: ctrl} + mock.recorder = &MockRowsMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRows) EXPECT() *MockRowsMockRecorder { + return m.recorder +} + +// Close mocks base method. +func (m *MockRows) Close() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Close") + ret0, _ := ret[0].(error) + return ret0 +} + +// Close indicates an expected call of Close. +func (mr *MockRowsMockRecorder) Close() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockRows)(nil).Close)) +} + +// ColumnTypes mocks base method. +func (m *MockRows) ColumnTypes() []driver.ColumnType { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ColumnTypes") + ret0, _ := ret[0].([]driver.ColumnType) + return ret0 +} + +// ColumnTypes indicates an expected call of ColumnTypes. +func (mr *MockRowsMockRecorder) ColumnTypes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ColumnTypes", reflect.TypeOf((*MockRows)(nil).ColumnTypes)) +} + +// Columns mocks base method. +func (m *MockRows) Columns() []string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Columns") + ret0, _ := ret[0].([]string) + return ret0 +} + +// Columns indicates an expected call of Columns. +func (mr *MockRowsMockRecorder) Columns() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Columns", reflect.TypeOf((*MockRows)(nil).Columns)) +} + +// Err mocks base method. +func (m *MockRows) Err() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Err") + ret0, _ := ret[0].(error) + return ret0 +} + +// Err indicates an expected call of Err. +func (mr *MockRowsMockRecorder) Err() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Err", reflect.TypeOf((*MockRows)(nil).Err)) +} + +// Next mocks base method. +func (m *MockRows) Next() bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Next") + ret0, _ := ret[0].(bool) + return ret0 +} + +// Next indicates an expected call of Next. +func (mr *MockRowsMockRecorder) Next() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Next", reflect.TypeOf((*MockRows)(nil).Next)) +} + +// Scan mocks base method. +func (m *MockRows) Scan(dest ...any) error { + m.ctrl.T.Helper() + varargs := []any{} + for _, a := range dest { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Scan", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// Scan indicates an expected call of Scan. +func (mr *MockRowsMockRecorder) Scan(dest ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Scan", reflect.TypeOf((*MockRows)(nil).Scan), dest...) +} + +// ScanStruct mocks base method. +func (m *MockRows) ScanStruct(dest any) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ScanStruct", dest) + ret0, _ := ret[0].(error) + return ret0 +} + +// ScanStruct indicates an expected call of ScanStruct. +func (mr *MockRowsMockRecorder) ScanStruct(dest any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ScanStruct", reflect.TypeOf((*MockRows)(nil).ScanStruct), dest) +} + +// Totals mocks base method. +func (m *MockRows) Totals(dest ...any) error { + m.ctrl.T.Helper() + varargs := []any{} + for _, a := range dest { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Totals", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// Totals indicates an expected call of Totals. +func (mr *MockRowsMockRecorder) Totals(dest ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Totals", reflect.TypeOf((*MockRows)(nil).Totals), dest...) +} + +// MockRow is a mock of Row interface. +type MockRow struct { + ctrl *gomock.Controller + recorder *MockRowMockRecorder + isgomock struct{} +} + +// MockRowMockRecorder is the mock recorder for MockRow. +type MockRowMockRecorder struct { + mock *MockRow +} + +// NewMockRow creates a new mock instance. +func NewMockRow(ctrl *gomock.Controller) *MockRow { + mock := &MockRow{ctrl: ctrl} + mock.recorder = &MockRowMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRow) EXPECT() *MockRowMockRecorder { + return m.recorder +} + +// Err mocks base method. +func (m *MockRow) Err() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Err") + ret0, _ := ret[0].(error) + return ret0 +} + +// Err indicates an expected call of Err. +func (mr *MockRowMockRecorder) Err() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Err", reflect.TypeOf((*MockRow)(nil).Err)) +} + +// Scan mocks base method. +func (m *MockRow) Scan(dest ...any) error { + m.ctrl.T.Helper() + varargs := []any{} + for _, a := range dest { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Scan", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// Scan indicates an expected call of Scan. +func (mr *MockRowMockRecorder) Scan(dest ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Scan", reflect.TypeOf((*MockRow)(nil).Scan), dest...) +} + +// ScanStruct mocks base method. +func (m *MockRow) ScanStruct(dest any) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ScanStruct", dest) + ret0, _ := ret[0].(error) + return ret0 +} + +// ScanStruct indicates an expected call of ScanStruct. +func (mr *MockRowMockRecorder) ScanStruct(dest any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ScanStruct", reflect.TypeOf((*MockRow)(nil).ScanStruct), dest) +} diff --git a/internal/pkg/repository_ch/mock/repository.go b/internal/pkg/repository_ch/mock/repository_ch.go similarity index 65% rename from internal/pkg/repository_ch/mock/repository.go rename to internal/pkg/repository_ch/mock/repository_ch.go index 3f88dd7..d89a611 100644 --- a/internal/pkg/repository_ch/mock/repository.go +++ b/internal/pkg/repository_ch/mock/repository_ch.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: internal/pkg/repository_ch/repository.go +// Source: github.com/ozontech/seq-ui/internal/pkg/repository_ch (interfaces: Repository) // // Generated by this command: // -// mockgen -source=internal/pkg/repository_ch/repository.go -destination=internal/pkg/repository_ch/mock/repository.go +// mockgen -destination=internal/pkg/repository_ch/mock/repository_ch.go -package=mock_repositorych github.com/ozontech/seq-ui/internal/pkg/repository_ch Repository // // Package mock_repositorych is a generated GoMock package. @@ -41,6 +41,36 @@ func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { return m.recorder } +// DiffByReleases mocks base method. +func (m *MockRepository) DiffByReleases(arg0 context.Context, arg1 types.DiffByReleasesRequest) ([]types.DiffGroup, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DiffByReleases", arg0, arg1) + ret0, _ := ret[0].([]types.DiffGroup) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DiffByReleases indicates an expected call of DiffByReleases. +func (mr *MockRepositoryMockRecorder) DiffByReleases(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DiffByReleases", reflect.TypeOf((*MockRepository)(nil).DiffByReleases), arg0, arg1) +} + +// DiffByReleasesTotal mocks base method. +func (m *MockRepository) DiffByReleasesTotal(arg0 context.Context, arg1 types.DiffByReleasesRequest) (uint64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DiffByReleasesTotal", arg0, arg1) + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DiffByReleasesTotal indicates an expected call of DiffByReleasesTotal. +func (mr *MockRepositoryMockRecorder) DiffByReleasesTotal(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DiffByReleasesTotal", reflect.TypeOf((*MockRepository)(nil).DiffByReleasesTotal), arg0, arg1) +} + // GetErrorCounts mocks base method. func (m *MockRepository) GetErrorCounts(arg0 context.Context, arg1 types.GetErrorGroupDetailsRequest) (types.ErrorGroupCounts, error) { m.ctrl.T.Helper() @@ -131,6 +161,36 @@ func (mr *MockRepositoryMockRecorder) GetErrorReleases(arg0, arg1 any) *gomock.C return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetErrorReleases", reflect.TypeOf((*MockRepository)(nil).GetErrorReleases), arg0, arg1) } +// GetNewErrorGroups mocks base method. +func (m *MockRepository) GetNewErrorGroups(arg0 context.Context, arg1 types.GetErrorGroupsRequest) ([]types.ErrorGroup, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetNewErrorGroups", arg0, arg1) + ret0, _ := ret[0].([]types.ErrorGroup) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetNewErrorGroups indicates an expected call of GetNewErrorGroups. +func (mr *MockRepositoryMockRecorder) GetNewErrorGroups(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNewErrorGroups", reflect.TypeOf((*MockRepository)(nil).GetNewErrorGroups), arg0, arg1) +} + +// GetNewErrorGroupsCount mocks base method. +func (m *MockRepository) GetNewErrorGroupsCount(arg0 context.Context, arg1 types.GetErrorGroupsRequest) (uint64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetNewErrorGroupsCount", arg0, arg1) + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetNewErrorGroupsCount indicates an expected call of GetNewErrorGroupsCount. +func (mr *MockRepositoryMockRecorder) GetNewErrorGroupsCount(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNewErrorGroupsCount", reflect.TypeOf((*MockRepository)(nil).GetNewErrorGroupsCount), arg0, arg1) +} + // GetServices mocks base method. func (m *MockRepository) GetServices(arg0 context.Context, arg1 types.GetServicesRequest) ([]string, error) { m.ctrl.T.Helper() diff --git a/internal/pkg/repository_ch/repository.go b/internal/pkg/repository_ch/repository.go index 65a2d67..2a667f2 100644 --- a/internal/pkg/repository_ch/repository.go +++ b/internal/pkg/repository_ch/repository.go @@ -2,26 +2,28 @@ package repositorych import ( "context" - "database/sql" - "errors" - "fmt" + "iter" + "maps" + "slices" "time" "github.com/ClickHouse/clickhouse-go/v2/lib/driver" - sq "github.com/Masterminds/squirrel" "github.com/ozontech/seq-ui/internal/app/types" - sqlb "github.com/ozontech/seq-ui/internal/pkg/repository/sql_builder" ) type Repository interface { GetErrorGroups(context.Context, types.GetErrorGroupsRequest) ([]types.ErrorGroup, error) GetErrorGroupsCount(context.Context, types.GetErrorGroupsRequest) (uint64, error) + GetNewErrorGroups(context.Context, types.GetErrorGroupsRequest) ([]types.ErrorGroup, error) + GetNewErrorGroupsCount(context.Context, types.GetErrorGroupsRequest) (uint64, error) GetErrorHist(context.Context, types.GetErrorHistRequest) ([]types.ErrorHistBucket, error) GetErrorDetails(context.Context, types.GetErrorGroupDetailsRequest) (types.ErrorGroupDetails, error) GetErrorCounts(context.Context, types.GetErrorGroupDetailsRequest) (types.ErrorGroupCounts, error) GetErrorReleases(context.Context, types.GetErrorGroupReleasesRequest) ([]string, error) GetServices(context.Context, types.GetServicesRequest) ([]string, error) + DiffByReleases(context.Context, types.DiffByReleasesRequest) ([]types.DiffGroup, error) + DiffByReleasesTotal(context.Context, types.DiffByReleasesRequest) (uint64, error) } type repository struct { @@ -29,463 +31,31 @@ type repository struct { sharded bool queryFilter map[string]string + + nowFn func() time.Time // for testing } func New(conn driver.Conn, sharded bool, queryFilter map[string]string) Repository { + return newRepo(conn, sharded, queryFilter, time.Now) +} + +func newRepo(conn driver.Conn, sharded bool, queryFilter map[string]string, nowFn func() time.Time) *repository { return &repository{ conn: newConn(conn), sharded: sharded, queryFilter: queryFilter, + nowFn: nowFn, } } -func (r *repository) GetErrorGroups( - ctx context.Context, - req types.GetErrorGroupsRequest, -) ([]types.ErrorGroup, error) { - // we need this subquery to make query faster, see https://github.com/ClickHouse/ClickHouse/issues/7187 - subQ := sqlb. - Select("_group_hash"). - From("error_groups"). - Where(sq.Eq{"service": req.Service}). - GroupBy("_group_hash", "service"). - Limit(uint64(req.Limit)). - Offset(uint64(req.Offset)) - - if r.sharded { - subQ = subQ.Distinct() - } - - for col, val := range r.queryFilter { - subQ = subQ.Where(sq.Eq{col: val}).GroupBy(col) - } - - if req.Env != nil && *req.Env != "" { - subQ = subQ.Where(sq.Eq{"env": req.Env}).GroupBy("env") - } - if req.Release != nil && *req.Release != "" { - subQ = subQ.Where(sq.Eq{"release": req.Release}).GroupBy("release") - } - if req.Duration != nil && *req.Duration != 0 { - subQ = subQ.Having(sq.GtOrEq{"maxMerge(last_seen_at)": time.Now().Add(-req.Duration.Abs())}) - } - if req.Source != nil && *req.Source != "" { - subQ = subQ.Where(sq.Eq{"source": req.Source}).GroupBy("source") - } - subQ = orderBy(subQ, req.Order, true) - - subQuery, subArgs := subQ.MustSql() - - in := "IN" - if r.sharded { - in = "GLOBAL IN" - } - q := sqlb. - Select( - "_group_hash as group_hash", - "source", - "any(message) as message", - "countMerge(seen_total) as seen_total", - "minMerge(first_seen_at) as first_seen_at", - "maxMerge(last_seen_at) as last_seen_at", - ). - From("error_groups"). - Where(fmt.Sprintf("_group_hash %s (%s)", in, subQuery), subArgs...). - GroupBy("_group_hash", "service", "source") - - // using string formatting below because squirrel doesn't support subquery in WHERE clause - q = q.Where(fmt.Sprintf("service = '%s'", req.Service)) - - for col, val := range r.queryFilter { - q = q.Where(fmt.Sprintf("%s = '%s'", col, val)).GroupBy(col) - } - - if req.Source != nil && *req.Source != "" { - q = q.Where(fmt.Sprintf("source = '%s'", *req.Source)) - } - if req.Env != nil && *req.Env != "" { - q = q.Where(fmt.Sprintf("env = '%s'", *req.Env)).GroupBy("env") - } - if req.Release != nil && *req.Release != "" { - q = q.Where(fmt.Sprintf("release = '%s'", *req.Release)).GroupBy("release") - } - q = orderBy(q, req.Order, false) - - query, args := q.MustSql() - metricLabels := []string{"error_groups", "SELECT"} - rows, err := r.conn.Query(ctx, metricLabels, query, args...) - if err != nil { - incErrorMetric(err, metricLabels) - return nil, fmt.Errorf("failed to get error groups: %w", err) - } - - var errorGroups []types.ErrorGroup - for rows.Next() { - var group types.ErrorGroup - err = rows.Scan( - &group.Hash, - &group.Source, - &group.Message, - &group.SeenTotal, - &group.FirstSeenAt, - &group.LastSeenAt, - ) - if err != nil { - return nil, fmt.Errorf("failed to scan row: %w", err) - } - - errorGroups = append(errorGroups, group) - } - - return errorGroups, nil -} - -func (r *repository) GetErrorGroupsCount( - ctx context.Context, - req types.GetErrorGroupsRequest, -) (uint64, error) { - subQ := sqlb. - Select("maxMerge(last_seen_at) AS last_seen_at"). - From("error_groups"). - Where(sq.Eq{"service": req.Service}). - GroupBy("_group_hash", "service") - - for col, val := range r.queryFilter { - subQ = subQ.Where(sq.Eq{col: val}).GroupBy(col) - } - - if req.Env != nil && *req.Env != "" { - subQ = subQ.Where(sq.Eq{"env": req.Env}).GroupBy("env") - } - if req.Release != nil && *req.Release != "" { - subQ = subQ.Where(sq.Eq{"release": req.Release}).GroupBy("release") - } - if req.Duration != nil && *req.Duration != 0 { - subQ = subQ.Having(sq.GtOrEq{"last_seen_at": time.Now().Add(-req.Duration.Abs())}) - } - if req.Source != nil && *req.Source != "" { - subQ = subQ.Where(sq.Eq{"source": req.Source}).GroupBy("source") - } - - q := sqlb.Select("count()").FromSelect(subQ, "subQ") - - query, args := q.MustSql() - metricLabels := []string{"error_groups", "SELECT"} - row := r.conn.QueryRow(ctx, metricLabels, query, args...) - - var total uint64 - if err := row.Scan(&total); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return 0, nil - } - incErrorMetric(err, metricLabels) - return 0, fmt.Errorf("failed to get error groups count: %w", err) - } - - return total, nil -} - -func (r *repository) GetErrorHist( - ctx context.Context, - req types.GetErrorHistRequest, -) ([]types.ErrorHistBucket, error) { - startDate := getHistBucketSize(req.Duration) - - q := sqlb. - Select( - startDate, - "countMerge(counts) as counts", - ). - From("agg_events_10min"). - Where(sq.Eq{"service": req.Service}). - GroupBy(startDate, "service"). - OrderBy(startDate) - - for col, val := range r.queryFilter { - q = q.Where(sq.Eq{col: val}).GroupBy(col) - } - - if req.GroupHash != nil && *req.GroupHash != 0 { - q = q.Where(sq.Eq{"_group_hash": req.GroupHash}).GroupBy("_group_hash") - } - if req.Env != nil && *req.Env != "" { - q = q.Where(sq.Eq{"env": req.Env}).GroupBy("env") - } - if req.Release != nil && *req.Release != "" { - q = q.Where(sq.Eq{"release": req.Release}).GroupBy("release") - } - if req.Duration != nil && *req.Duration != 0 { - q = q.Where(sq.GtOrEq{startDate: time.Now().Add(-req.Duration.Abs())}) - } - if req.Source != nil && *req.Source != "" { - q = q.Where(sq.Eq{"source": req.Source}).GroupBy("source") - } - - query, args := q.MustSql() - metricLabels := []string{"agg_events_10min", "SELECT"} - rows, err := r.conn.Query(ctx, metricLabels, query, args...) - if err != nil { - incErrorMetric(err, metricLabels) - return nil, fmt.Errorf("failed to get error hist: %w", err) - } - - var buckets []types.ErrorHistBucket - for rows.Next() { - var bucket types.ErrorHistBucket - if err := rows.Scan(&bucket.Time, &bucket.Count); err != nil { - return nil, fmt.Errorf("failed to scan row: %w", err) - } - buckets = append(buckets, bucket) - } - - return buckets, nil -} - -func (r *repository) GetErrorDetails( - ctx context.Context, - req types.GetErrorGroupDetailsRequest, -) (types.ErrorGroupDetails, error) { - q := sqlb. - Select( - "_group_hash as group_hash", - "source", - "any(message) as message", - "countMerge(seen_total) as seen_total", - "minMerge(first_seen_at) as first_seen_at", - "maxMerge(last_seen_at) as last_seen_at", - "max(log_tags) as log_tags", - ). - From("error_groups"). - Where(sq.Eq{ - "service": req.Service, - "_group_hash": req.GroupHash, - }). - GroupBy("_group_hash", "service", "source") - - for col, val := range r.queryFilter { - q = q.Where(sq.Eq{col: val}).GroupBy(col) - } - - if req.Env != nil && *req.Env != "" { - q = q.Where(sq.Eq{"env": req.Env}).GroupBy("env") - } - if req.Release != nil && *req.Release != "" { - q = q.Where(sq.Eq{"release": req.Release}).GroupBy("release") - } - if req.Source != nil && *req.Source != "" { - q = q.Where(sq.Eq{"source": req.Source}) - } - - var details types.ErrorGroupDetails - - query, args := q.MustSql() - metricLabels := []string{"error_groups", "SELECT"} - row := r.conn.QueryRow(ctx, metricLabels, query, args...) - err := row.Scan( - &details.GroupHash, - &details.Source, - &details.Message, - &details.SeenTotal, - &details.FirstSeenAt, - &details.LastSeenAt, - &details.LogTags, - ) - if err != nil && !errors.Is(err, sql.ErrNoRows) { - incErrorMetric(err, metricLabels) - return details, fmt.Errorf("failed to get error details: %w", err) - } - - return details, nil -} - -func (r *repository) GetErrorCounts( - ctx context.Context, - req types.GetErrorGroupDetailsRequest, -) (types.ErrorGroupCounts, error) { - counts := types.ErrorGroupCounts{ - ByEnv: types.ErrorGroupCount{}, - ByRelease: types.ErrorGroupCount{}, - } - - q := sqlb. - Select("countMerge(seen_total) as seen_total", "env", "release"). - From("error_groups"). - Where(sq.Eq{ - "service": req.Service, - "_group_hash": req.GroupHash, - }). - GroupBy("_group_hash", "service", "env", "release"). - OrderBy("seen_total DESC") - - for col, val := range r.queryFilter { - q = q.Where(sq.Eq{col: val}).GroupBy(col) - } - - if req.Env != nil && *req.Env != "" { - q = q.Where(sq.Eq{"env": *req.Env}) - } - if req.Release != nil && *req.Release != "" { - q = q.Where(sq.Eq{"release": *req.Release}) - } - if req.Source != nil && *req.Source != "" { - q = q.Where(sq.Eq{"source": *req.Source}) - } - - query, args := q.MustSql() - metricLabels := []string{"error_groups", "SELECT"} - rows, err := r.conn.Query(ctx, metricLabels, query, args...) - if err != nil { - incErrorMetric(err, metricLabels) - return counts, fmt.Errorf("failed to get error counts: %w", err) - } - - for rows.Next() { - var ( - seen uint64 - env, release string - ) - if err := rows.Scan(&seen, &env, &release); err != nil { - return counts, fmt.Errorf("failed to scan row: %w", err) +func (r *repository) queryFilters() iter.Seq2[string, string] { + keys := slices.Collect(maps.Keys(r.queryFilter)) + slices.Sort(keys) + return func(yield func(string, string) bool) { + for _, key := range keys { + if !yield(key, r.queryFilter[key]) { + return + } } - counts.ByEnv[env] += seen - counts.ByRelease[release] += seen - } - - return counts, nil -} - -func (r *repository) GetErrorReleases( - ctx context.Context, - req types.GetErrorGroupReleasesRequest, -) ([]string, error) { - q := sqlb. - Select("release").Distinct(). - From("services"). - Where(sq.And{ - sq.Eq{"service": req.Service}, - sq.NotEq{"release": ""}, - }). - OrderBy("release") - - for col, val := range r.queryFilter { - q = q.Where(sq.Eq{col: val}) - } - - if req.Env != nil && *req.Env != "" { - q = q.Where(sq.Eq{"env": req.Env}) - } - - query, args := q.MustSql() - metricLabels := []string{"services", "SELECT"} - rows, err := r.conn.Query(ctx, metricLabels, query, args...) - if err != nil { - incErrorMetric(err, metricLabels) - return nil, fmt.Errorf("failed to get releases: %w", err) - } - - releases := make([]string, 0) - for rows.Next() { - var release string - if err := rows.Scan(&release); err != nil { - return nil, fmt.Errorf("failed to scan row: %w", err) - } - if release == "" { - continue - } - releases = append(releases, release) - } - - return releases, nil -} - -func (r *repository) GetServices( - ctx context.Context, - req types.GetServicesRequest, -) ([]string, error) { - q := sqlb. - Select("service").Distinct(). - From("services"). - Where("startsWith(service, ?)", req.Query). - Where(sq.NotEq{"service": ""}). - OrderBy("service") - - for col, val := range r.queryFilter { - q = q.Where(sq.Eq{col: val}) - } - - if req.Env != nil && *req.Env != "" { - q = q.Where(sq.Eq{"env": req.Env}) - } - - if req.Limit > 0 { - q = q.Limit(uint64(req.Limit)) - } - if req.Offset > 0 { - q = q.Offset(uint64(req.Offset)) - } - - query, args := q.MustSql() - metricLabels := []string{"services", "SELECT"} - rows, err := r.conn.Query(ctx, metricLabels, query, args...) - if err != nil { - incErrorMetric(err, metricLabels) - return nil, fmt.Errorf("failed to get services: %w", err) - } - - services := make([]string, 0) - for rows.Next() { - var service string - if err := rows.Scan(&service); err != nil { - return nil, fmt.Errorf("failed to scan row: %w", err) - } - services = append(services, service) - } - - return services, nil -} - -func orderBy(q sq.SelectBuilder, o types.ErrorGroupsOrder, sub bool) sq.SelectBuilder { - seenTotal := "seen_total DESC" - lastSeenAt := "last_seen_at DESC" - firstSeenAt := "first_seen_at" - if sub { - seenTotal = "countMerge(seen_total) DESC" - lastSeenAt = "maxMerge(last_seen_at) DESC" - firstSeenAt = "minMerge(first_seen_at)" - } - - switch o { - case types.OrderFrequent: - q = q.OrderBy(seenTotal) - case types.OrderLatest: - q = q.OrderBy(lastSeenAt) - case types.OrderOldest: - q = q.OrderBy(firstSeenAt) - } - return q -} - -func getHistBucketSize(d *time.Duration) string { - const ( - startDate = "start_date" - startOfHour = "toStartOfHour(start_date)" - startOfDay = "toStartOfDay(start_date)" - day = 24 * time.Hour - ) - - if d == nil { - return startOfDay - } - - duration := *d - switch { - case duration < 7*time.Hour: - return startDate - case duration < 7*day: - return startOfHour - case duration >= 7*day: - return startOfDay - default: - return startOfDay } } diff --git a/internal/pkg/service/errorgroups/service.go b/internal/pkg/service/errorgroups/service.go index 6a9752d..f1d9d6c 100644 --- a/internal/pkg/service/errorgroups/service.go +++ b/internal/pkg/service/errorgroups/service.go @@ -13,6 +13,10 @@ import ( repositorych "github.com/ozontech/seq-ui/internal/pkg/repository_ch" ) +const ( + defaultLimit uint32 = 25 +) + type Service struct { repo repositorych.Repository logTagsMapping config.LogTagsMapping @@ -28,35 +32,58 @@ func New(repo repositorych.Repository, logTagsMapping config.LogTagsMapping) *Se func (s *Service) GetErrorGroups( ctx context.Context, req types.GetErrorGroupsRequest, +) ([]types.ErrorGroup, uint64, error) { + return getErrorGroups(ctx, req, s.repo.GetErrorGroups, s.repo.GetErrorGroupsCount) +} + +func (s *Service) GetNewErrorGroups( + ctx context.Context, + req types.GetErrorGroupsRequest, +) ([]types.ErrorGroup, uint64, error) { + // If the release and duration are not specified, + // then we are looking for errors for all time and releases. + // In this case, we believe that there are no new errors. + if (req.Release == nil || *req.Release == "") && + (req.Duration == nil || *req.Duration == 0) { + return nil, 0, nil + } + + return getErrorGroups(ctx, req, s.repo.GetNewErrorGroups, s.repo.GetNewErrorGroupsCount) +} + +func getErrorGroups( + ctx context.Context, + req types.GetErrorGroupsRequest, + groupsFn func(ctx context.Context, req types.GetErrorGroupsRequest) ([]types.ErrorGroup, error), + countFn func(ctx context.Context, req types.GetErrorGroupsRequest) (uint64, error), ) ([]types.ErrorGroup, uint64, error) { if req.Service == "" { return nil, 0, types.NewErrInvalidRequestField("'service' must not be empty") } - const defaultLimit uint32 = 25 if req.Limit == 0 { req.Limit = defaultLimit } - group, groupCtx := errgroup.WithContext(ctx) + eg, ctx := errgroup.WithContext(ctx) var groups []types.ErrorGroup - group.Go(func() error { + eg.Go(func() error { var err error - groups, err = s.repo.GetErrorGroups(groupCtx, req) + groups, err = groupsFn(ctx, req) return err }) var total uint64 if req.WithTotal { - group.Go(func() error { + eg.Go(func() error { var err error - total, err = s.repo.GetErrorGroupsCount(groupCtx, req) + total, err = countFn(ctx, req) return err }) } - err := group.Wait() + err := eg.Wait() if err != nil { return nil, 0, fmt.Errorf("get error groups failed: %w", err) } @@ -176,3 +203,47 @@ func (s *Service) GetServices( ) ([]string, error) { return s.repo.GetServices(ctx, req) } + +func (s *Service) DiffByReleases( + ctx context.Context, + req types.DiffByReleasesRequest, +) ([]types.DiffGroup, uint64, error) { + if req.Service == "" { + return nil, 0, types.NewErrInvalidRequestField("'service' must be non-empty") + } + if len(req.Releases) < 2 { + return nil, 0, types.NewErrInvalidRequestField("length of'releases' must be at least 2") + } + if slices.Contains(req.Releases, "") { + return nil, 0, types.NewErrInvalidRequestField("each element in 'releases' must be non-empty") + } + + if req.Limit == 0 { + req.Limit = defaultLimit + } + + eg, ctx := errgroup.WithContext(ctx) + + var diffGroups []types.DiffGroup + eg.Go(func() error { + var err error + diffGroups, err = s.repo.DiffByReleases(ctx, req) + return err + }) + + var total uint64 + if req.WithTotal { + eg.Go(func() error { + var err error + total, err = s.repo.DiffByReleasesTotal(ctx, req) + return err + }) + } + + err := eg.Wait() + if err != nil { + return nil, 0, fmt.Errorf("diff by releases failed: %w", err) + } + + return diffGroups, total, err +} diff --git a/pkg/errorgroups/v1/errorgroups.pb.go b/pkg/errorgroups/v1/errorgroups.pb.go index dbff7c0..220df28 100644 --- a/pkg/errorgroups/v1/errorgroups.pb.go +++ b/pkg/errorgroups/v1/errorgroups.pb.go @@ -76,15 +76,16 @@ type GetGroupsRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` - Env *string `protobuf:"bytes,2,opt,name=env,proto3,oneof" json:"env,omitempty"` - Release *string `protobuf:"bytes,3,opt,name=release,proto3,oneof" json:"release,omitempty"` - Duration *durationpb.Duration `protobuf:"bytes,4,opt,name=duration,proto3" json:"duration,omitempty"` - Limit uint32 `protobuf:"varint,5,opt,name=limit,proto3" json:"limit,omitempty"` - Offset uint32 `protobuf:"varint,6,opt,name=offset,proto3" json:"offset,omitempty"` - Order Order `protobuf:"varint,7,opt,name=order,proto3,enum=errorgroups.v1.Order" json:"order,omitempty"` - WithTotal bool `protobuf:"varint,8,opt,name=with_total,json=withTotal,proto3" json:"with_total,omitempty"` - Source *string `protobuf:"bytes,9,opt,name=source,proto3,oneof" json:"source,omitempty"` + Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + Env *string `protobuf:"bytes,2,opt,name=env,proto3,oneof" json:"env,omitempty"` + Release *string `protobuf:"bytes,3,opt,name=release,proto3,oneof" json:"release,omitempty"` + Duration *durationpb.Duration `protobuf:"bytes,4,opt,name=duration,proto3" json:"duration,omitempty"` + Limit uint32 `protobuf:"varint,5,opt,name=limit,proto3" json:"limit,omitempty"` + Offset uint32 `protobuf:"varint,6,opt,name=offset,proto3" json:"offset,omitempty"` + Order Order `protobuf:"varint,7,opt,name=order,proto3,enum=errorgroups.v1.Order" json:"order,omitempty"` + WithTotal bool `protobuf:"varint,8,opt,name=with_total,json=withTotal,proto3" json:"with_total,omitempty"` + Source *string `protobuf:"bytes,9,opt,name=source,proto3,oneof" json:"source,omitempty"` + Filter *GetGroupsRequest_Filter `protobuf:"bytes,10,opt,name=filter,proto3,oneof" json:"filter,omitempty"` } func (x *GetGroupsRequest) Reset() { @@ -182,6 +183,13 @@ func (x *GetGroupsRequest) GetSource() string { return "" } +func (x *GetGroupsRequest) GetFilter() *GetGroupsRequest_Filter { + if x != nil { + return x.Filter + } + return nil +} + type GetGroupsResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -915,6 +923,211 @@ func (x *GetServicesResponse) GetServices() []string { return nil } +type DiffByReleasesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + Releases []string `protobuf:"bytes,2,rep,name=releases,proto3" json:"releases,omitempty"` + Env *string `protobuf:"bytes,3,opt,name=env,proto3,oneof" json:"env,omitempty"` + Source *string `protobuf:"bytes,4,opt,name=source,proto3,oneof" json:"source,omitempty"` + Limit uint32 `protobuf:"varint,5,opt,name=limit,proto3" json:"limit,omitempty"` + Offset uint32 `protobuf:"varint,6,opt,name=offset,proto3" json:"offset,omitempty"` + Order Order `protobuf:"varint,7,opt,name=order,proto3,enum=errorgroups.v1.Order" json:"order,omitempty"` + WithTotal bool `protobuf:"varint,8,opt,name=with_total,json=withTotal,proto3" json:"with_total,omitempty"` +} + +func (x *DiffByReleasesRequest) Reset() { + *x = DiffByReleasesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffByReleasesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffByReleasesRequest) ProtoMessage() {} + +func (x *DiffByReleasesRequest) ProtoReflect() protoreflect.Message { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffByReleasesRequest.ProtoReflect.Descriptor instead. +func (*DiffByReleasesRequest) Descriptor() ([]byte, []int) { + return file_errorgroups_v1_errorgroups_proto_rawDescGZIP(), []int{12} +} + +func (x *DiffByReleasesRequest) GetService() string { + if x != nil { + return x.Service + } + return "" +} + +func (x *DiffByReleasesRequest) GetReleases() []string { + if x != nil { + return x.Releases + } + return nil +} + +func (x *DiffByReleasesRequest) GetEnv() string { + if x != nil && x.Env != nil { + return *x.Env + } + return "" +} + +func (x *DiffByReleasesRequest) GetSource() string { + if x != nil && x.Source != nil { + return *x.Source + } + return "" +} + +func (x *DiffByReleasesRequest) GetLimit() uint32 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *DiffByReleasesRequest) GetOffset() uint32 { + if x != nil { + return x.Offset + } + return 0 +} + +func (x *DiffByReleasesRequest) GetOrder() Order { + if x != nil { + return x.Order + } + return Order_ORDER_FREQUENT +} + +func (x *DiffByReleasesRequest) GetWithTotal() bool { + if x != nil { + return x.WithTotal + } + return false +} + +type DiffByReleasesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Total uint64 `protobuf:"varint,1,opt,name=total,proto3" json:"total,omitempty"` + Groups []*DiffByReleasesResponse_Group `protobuf:"bytes,2,rep,name=groups,proto3" json:"groups,omitempty"` +} + +func (x *DiffByReleasesResponse) Reset() { + *x = DiffByReleasesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffByReleasesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffByReleasesResponse) ProtoMessage() {} + +func (x *DiffByReleasesResponse) ProtoReflect() protoreflect.Message { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffByReleasesResponse.ProtoReflect.Descriptor instead. +func (*DiffByReleasesResponse) Descriptor() ([]byte, []int) { + return file_errorgroups_v1_errorgroups_proto_rawDescGZIP(), []int{13} +} + +func (x *DiffByReleasesResponse) GetTotal() uint64 { + if x != nil { + return x.Total + } + return 0 +} + +func (x *DiffByReleasesResponse) GetGroups() []*DiffByReleasesResponse_Group { + if x != nil { + return x.Groups + } + return nil +} + +type GetGroupsRequest_Filter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + IsNew bool `protobuf:"varint,1,opt,name=is_new,json=isNew,proto3" json:"is_new,omitempty"` +} + +func (x *GetGroupsRequest_Filter) Reset() { + *x = GetGroupsRequest_Filter{} + if protoimpl.UnsafeEnabled { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetGroupsRequest_Filter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetGroupsRequest_Filter) ProtoMessage() {} + +func (x *GetGroupsRequest_Filter) ProtoReflect() protoreflect.Message { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetGroupsRequest_Filter.ProtoReflect.Descriptor instead. +func (*GetGroupsRequest_Filter) Descriptor() ([]byte, []int) { + return file_errorgroups_v1_errorgroups_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *GetGroupsRequest_Filter) GetIsNew() bool { + if x != nil { + return x.IsNew + } + return false +} + type GetDetailsResponse_Distribution struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -927,7 +1140,7 @@ type GetDetailsResponse_Distribution struct { func (x *GetDetailsResponse_Distribution) Reset() { *x = GetDetailsResponse_Distribution{} if protoimpl.UnsafeEnabled { - mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[12] + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -940,7 +1153,7 @@ func (x *GetDetailsResponse_Distribution) String() string { func (*GetDetailsResponse_Distribution) ProtoMessage() {} func (x *GetDetailsResponse_Distribution) ProtoReflect() protoreflect.Message { - mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[12] + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -982,7 +1195,7 @@ type GetDetailsResponse_Distributions struct { func (x *GetDetailsResponse_Distributions) Reset() { *x = GetDetailsResponse_Distributions{} if protoimpl.UnsafeEnabled { - mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[13] + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -995,7 +1208,7 @@ func (x *GetDetailsResponse_Distributions) String() string { func (*GetDetailsResponse_Distributions) ProtoMessage() {} func (x *GetDetailsResponse_Distributions) ProtoReflect() protoreflect.Message { - mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[13] + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1025,6 +1238,140 @@ func (x *GetDetailsResponse_Distributions) GetByRelease() []*GetDetailsResponse_ return nil } +type DiffByReleasesResponse_ReleaseInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + SeenTotal uint64 `protobuf:"varint,1,opt,name=seen_total,json=seenTotal,proto3" json:"seen_total,omitempty"` +} + +func (x *DiffByReleasesResponse_ReleaseInfo) Reset() { + *x = DiffByReleasesResponse_ReleaseInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffByReleasesResponse_ReleaseInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffByReleasesResponse_ReleaseInfo) ProtoMessage() {} + +func (x *DiffByReleasesResponse_ReleaseInfo) ProtoReflect() protoreflect.Message { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffByReleasesResponse_ReleaseInfo.ProtoReflect.Descriptor instead. +func (*DiffByReleasesResponse_ReleaseInfo) Descriptor() ([]byte, []int) { + return file_errorgroups_v1_errorgroups_proto_rawDescGZIP(), []int{13, 0} +} + +func (x *DiffByReleasesResponse_ReleaseInfo) GetSeenTotal() uint64 { + if x != nil { + return x.SeenTotal + } + return 0 +} + +type DiffByReleasesResponse_Group struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Hash uint64 `protobuf:"varint,1,opt,name=hash,proto3" json:"hash,omitempty"` + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + FirstSeenAt *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=first_seen_at,json=firstSeenAt,proto3" json:"first_seen_at,omitempty"` + LastSeenAt *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=last_seen_at,json=lastSeenAt,proto3" json:"last_seen_at,omitempty"` + Source string `protobuf:"bytes,5,opt,name=source,proto3" json:"source,omitempty"` + ReleaseInfos map[string]*DiffByReleasesResponse_ReleaseInfo `protobuf:"bytes,6,rep,name=release_infos,json=releaseInfos,proto3" json:"release_infos,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *DiffByReleasesResponse_Group) Reset() { + *x = DiffByReleasesResponse_Group{} + if protoimpl.UnsafeEnabled { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffByReleasesResponse_Group) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffByReleasesResponse_Group) ProtoMessage() {} + +func (x *DiffByReleasesResponse_Group) ProtoReflect() protoreflect.Message { + mi := &file_errorgroups_v1_errorgroups_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffByReleasesResponse_Group.ProtoReflect.Descriptor instead. +func (*DiffByReleasesResponse_Group) Descriptor() ([]byte, []int) { + return file_errorgroups_v1_errorgroups_proto_rawDescGZIP(), []int{13, 1} +} + +func (x *DiffByReleasesResponse_Group) GetHash() uint64 { + if x != nil { + return x.Hash + } + return 0 +} + +func (x *DiffByReleasesResponse_Group) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *DiffByReleasesResponse_Group) GetFirstSeenAt() *timestamppb.Timestamp { + if x != nil { + return x.FirstSeenAt + } + return nil +} + +func (x *DiffByReleasesResponse_Group) GetLastSeenAt() *timestamppb.Timestamp { + if x != nil { + return x.LastSeenAt + } + return nil +} + +func (x *DiffByReleasesResponse_Group) GetSource() string { + if x != nil { + return x.Source + } + return "" +} + +func (x *DiffByReleasesResponse_Group) GetReleaseInfos() map[string]*DiffByReleasesResponse_ReleaseInfo { + if x != nil { + return x.ReleaseInfos + } + return nil +} + var File_errorgroups_v1_errorgroups_proto protoreflect.FileDescriptor var file_errorgroups_v1_errorgroups_proto_rawDesc = []byte{ @@ -1035,7 +1382,7 @@ var file_errorgroups_v1_errorgroups_proto_rawDesc = []byte{ 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x22, 0xcf, 0x02, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, + 0x6f, 0x74, 0x6f, 0x22, 0xc1, 0x03, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x15, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, @@ -1054,137 +1401,198 @@ var file_errorgroups_v1_errorgroups_proto_rawDesc = []byte{ 0x74, 0x68, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x77, 0x69, 0x74, 0x68, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x06, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x42, 0x0a, - 0x0a, 0x08, 0x5f, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x58, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, - 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, - 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, - 0x12, 0x2d, 0x0a, 0x06, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x15, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, - 0x31, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x52, 0x06, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x22, - 0xea, 0x01, 0x0a, 0x05, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, - 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x18, 0x0a, - 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x65, 0x6e, 0x5f, - 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x65, 0x65, - 0x6e, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x3e, 0x0a, 0x0d, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, - 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0b, 0x66, 0x69, 0x72, 0x73, 0x74, - 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, 0x12, 0x3c, 0x0a, 0x0c, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x73, - 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x6c, 0x61, 0x73, 0x74, 0x53, 0x65, - 0x65, 0x6e, 0x41, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x98, 0x02, 0x0a, - 0x0e, 0x47, 0x65, 0x74, 0x48, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x22, 0x0a, 0x0a, 0x67, 0x72, 0x6f, - 0x75, 0x70, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x48, 0x00, 0x52, - 0x09, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x61, 0x73, 0x68, 0x88, 0x01, 0x01, 0x12, 0x15, 0x0a, - 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x03, 0x65, 0x6e, + 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x12, 0x44, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, + 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, + 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, + 0x03, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x88, 0x01, 0x01, 0x1a, 0x1f, 0x0a, 0x06, + 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x15, 0x0a, 0x06, 0x69, 0x73, 0x5f, 0x6e, 0x65, 0x77, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x69, 0x73, 0x4e, 0x65, 0x77, 0x42, 0x06, 0x0a, + 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, + 0x65, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x09, 0x0a, 0x07, + 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x58, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x47, 0x72, + 0x6f, 0x75, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x74, 0x6f, 0x74, + 0x61, 0x6c, 0x12, 0x2d, 0x0a, 0x06, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, + 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x52, 0x06, 0x67, 0x72, 0x6f, 0x75, 0x70, + 0x73, 0x22, 0xea, 0x01, 0x0a, 0x05, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x68, + 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, + 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x65, + 0x6e, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, + 0x65, 0x65, 0x6e, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x3e, 0x0a, 0x0d, 0x66, 0x69, 0x72, 0x73, + 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0b, 0x66, 0x69, 0x72, + 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, 0x12, 0x3c, 0x0a, 0x0c, 0x6c, 0x61, 0x73, 0x74, + 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x6c, 0x61, 0x73, 0x74, + 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x98, + 0x02, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x48, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x22, 0x0a, 0x0a, 0x67, + 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x48, + 0x00, 0x52, 0x09, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x61, 0x73, 0x68, 0x88, 0x01, 0x01, 0x12, + 0x15, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x03, + 0x65, 0x6e, 0x76, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, + 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x07, 0x72, 0x65, 0x6c, 0x65, 0x61, + 0x73, 0x65, 0x88, 0x01, 0x01, 0x12, 0x3a, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x48, 0x03, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, + 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x04, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0d, + 0x0a, 0x0b, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x42, 0x06, 0x0a, + 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, + 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x09, + 0x0a, 0x07, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x43, 0x0a, 0x0f, 0x47, 0x65, 0x74, + 0x48, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x30, 0x0a, 0x07, + 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x42, + 0x75, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x07, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x22, 0x4e, + 0x0a, 0x06, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x2e, 0x0a, 0x04, 0x74, 0x69, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x52, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, + 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xbe, + 0x01, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1d, + 0x0a, 0x0a, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x04, 0x52, 0x09, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x61, 0x73, 0x68, 0x12, 0x15, 0x0a, + 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x07, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, - 0x88, 0x01, 0x01, 0x12, 0x3a, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x48, 0x03, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, - 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, - 0x04, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, - 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x42, 0x06, 0x0a, 0x04, 0x5f, - 0x65, 0x6e, 0x76, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x42, - 0x0b, 0x0a, 0x09, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x09, 0x0a, 0x07, - 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x43, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x48, 0x69, - 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x30, 0x0a, 0x07, 0x62, 0x75, - 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x75, 0x63, - 0x6b, 0x65, 0x74, 0x52, 0x07, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x22, 0x4e, 0x0a, 0x06, - 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x2e, 0x0a, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x01, + 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, + 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, + 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x72, 0x65, 0x6c, + 0x65, 0x61, 0x73, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, + 0xcc, 0x05, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, + 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x67, 0x72, 0x6f, 0x75, + 0x70, 0x48, 0x61, 0x73, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, + 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x65, 0x65, 0x6e, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x3e, + 0x0a, 0x0d, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x52, 0x0b, 0x66, 0x69, 0x72, 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, 0x12, 0x3c, + 0x0a, 0x0c, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xbe, 0x01, 0x0a, - 0x11, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1d, 0x0a, 0x0a, - 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x09, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x61, 0x73, 0x68, 0x12, 0x15, 0x0a, 0x03, 0x65, - 0x6e, 0x76, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x88, - 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x88, 0x01, - 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x02, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x42, 0x06, - 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x72, 0x65, 0x6c, 0x65, 0x61, - 0x73, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0xcc, 0x05, - 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x68, 0x61, - 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x48, - 0x61, 0x73, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1d, 0x0a, - 0x0a, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x09, 0x73, 0x65, 0x65, 0x6e, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x3e, 0x0a, 0x0d, - 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x0b, 0x66, 0x69, 0x72, 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, 0x12, 0x3c, 0x0a, 0x0c, - 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, - 0x6c, 0x61, 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, 0x12, 0x56, 0x0a, 0x0d, 0x64, 0x69, - 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, - 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x52, 0x0d, 0x64, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x4a, 0x0a, 0x08, 0x6c, 0x6f, 0x67, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, 0x07, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, - 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4c, 0x6f, 0x67, 0x54, 0x61, 0x67, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6c, 0x6f, 0x67, 0x54, 0x61, 0x67, 0x73, 0x12, 0x16, - 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x3e, 0x0a, 0x0c, 0x44, 0x69, 0x73, 0x74, 0x72, 0x69, - 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x18, 0x0a, 0x07, - 0x70, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x07, 0x70, - 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x1a, 0xa7, 0x01, 0x0a, 0x0d, 0x44, 0x69, 0x73, 0x74, 0x72, - 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x46, 0x0a, 0x06, 0x62, 0x79, 0x5f, 0x65, - 0x6e, 0x76, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x69, 0x73, - 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x62, 0x79, 0x45, 0x6e, 0x76, - 0x12, 0x4e, 0x0a, 0x0a, 0x62, 0x79, 0x5f, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, - 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x62, 0x79, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, - 0x1a, 0x3a, 0x0a, 0x0c, 0x4c, 0x6f, 0x67, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x0a, 0x6c, 0x61, 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, 0x12, 0x56, 0x0a, 0x0d, + 0x64, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, + 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x0d, 0x64, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4a, 0x0a, 0x08, 0x6c, 0x6f, 0x67, 0x5f, 0x74, 0x61, 0x67, 0x73, + 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, + 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4c, 0x6f, 0x67, 0x54, 0x61, + 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6c, 0x6f, 0x67, 0x54, 0x61, 0x67, 0x73, + 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x3e, 0x0a, 0x0c, 0x44, 0x69, 0x73, 0x74, + 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x18, + 0x0a, 0x07, 0x70, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, + 0x07, 0x70, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x1a, 0xa7, 0x01, 0x0a, 0x0d, 0x44, 0x69, 0x73, + 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x46, 0x0a, 0x06, 0x62, 0x79, + 0x5f, 0x65, 0x6e, 0x76, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, + 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x62, 0x79, 0x45, + 0x6e, 0x76, 0x12, 0x4e, 0x0a, 0x0a, 0x62, 0x79, 0x5f, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, + 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x69, 0x73, 0x74, 0x72, + 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x62, 0x79, 0x52, 0x65, 0x6c, 0x65, 0x61, + 0x73, 0x65, 0x1a, 0x3a, 0x0a, 0x0c, 0x4c, 0x6f, 0x67, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x53, + 0x0a, 0x12, 0x47, 0x65, 0x74, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x15, + 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x65, + 0x6e, 0x76, 0x88, 0x01, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x4a, 0x04, 0x08, + 0x02, 0x10, 0x03, 0x22, 0x31, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, + 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, + 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, + 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x22, 0x77, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, + 0x72, 0x79, 0x12, 0x15, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x88, 0x01, 0x01, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, + 0x69, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, + 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, + 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x22, + 0x31, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x73, 0x22, 0x8e, 0x02, 0x0a, 0x15, 0x44, 0x69, 0x66, 0x66, 0x42, 0x79, 0x52, 0x65, 0x6c, + 0x65, 0x61, 0x73, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, + 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, + 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, + 0x65, 0x73, 0x12, 0x15, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x88, 0x01, 0x01, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x16, 0x0a, 0x06, + 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6f, 0x66, + 0x66, 0x73, 0x65, 0x74, 0x12, 0x2b, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, + 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x12, 0x1d, 0x0a, 0x0a, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, + 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x77, 0x69, 0x74, 0x68, 0x54, 0x6f, 0x74, 0x61, 0x6c, + 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x22, 0xca, 0x04, 0x0a, 0x16, 0x44, 0x69, 0x66, 0x66, 0x42, 0x79, 0x52, 0x65, + 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, + 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x74, + 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x44, 0x0a, 0x06, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, + 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x42, 0x79, 0x52, 0x65, 0x6c, 0x65, + 0x61, 0x73, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x47, 0x72, 0x6f, + 0x75, 0x70, 0x52, 0x06, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x1a, 0x2c, 0x0a, 0x0b, 0x52, 0x65, + 0x6c, 0x65, 0x61, 0x73, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x65, + 0x6e, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73, + 0x65, 0x65, 0x6e, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x1a, 0xa5, 0x03, 0x0a, 0x05, 0x47, 0x72, 0x6f, + 0x75, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, + 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x12, 0x3e, 0x0a, 0x0d, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, + 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x52, 0x0b, 0x66, 0x69, 0x72, 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, + 0x12, 0x3c, 0x0a, 0x0c, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x73, 0x65, 0x65, 0x6e, 0x5f, 0x61, 0x74, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x52, 0x0a, 0x6c, 0x61, 0x73, 0x74, 0x53, 0x65, 0x65, 0x6e, 0x41, 0x74, 0x12, 0x16, + 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x63, 0x0a, 0x0d, 0x72, 0x65, 0x6c, 0x65, 0x61, 0x73, + 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3e, 0x2e, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x44, + 0x69, 0x66, 0x66, 0x42, 0x79, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x2e, 0x52, 0x65, 0x6c, 0x65, + 0x61, 0x73, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x72, + 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x73, 0x1a, 0x73, 0x0a, 0x11, 0x52, + 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x53, 0x0a, 0x12, - 0x47, 0x65, 0x74, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x15, 0x0a, 0x03, - 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x65, 0x6e, 0x76, - 0x88, 0x01, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x4a, 0x04, 0x08, 0x02, 0x10, - 0x03, 0x22, 0x31, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x6c, 0x65, - 0x61, 0x73, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x6c, 0x65, - 0x61, 0x73, 0x65, 0x73, 0x22, 0x77, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, - 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, - 0x65, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, - 0x12, 0x15, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, - 0x03, 0x65, 0x6e, 0x76, 0x88, 0x01, 0x01, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x16, 0x0a, - 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6f, - 0x66, 0x66, 0x73, 0x65, 0x74, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x76, 0x22, 0x31, 0x0a, - 0x13, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, + 0x65, 0x79, 0x12, 0x48, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x32, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, + 0x76, 0x31, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x42, 0x79, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, + 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x2a, 0x3f, 0x0a, 0x05, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x0e, 0x4f, 0x52, 0x44, 0x45, 0x52, 0x5f, 0x46, 0x52, 0x45, 0x51, 0x55, 0x45, 0x4e, 0x54, 0x10, 0x00, 0x12, 0x10, 0x0a, 0x0c, 0x4f, 0x52, 0x44, 0x45, 0x52, 0x5f, 0x4c, 0x41, 0x54, 0x45, 0x53, 0x54, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x4f, 0x52, 0x44, 0x45, 0x52, 0x5f, 0x4f, 0x4c, 0x44, 0x45, 0x53, 0x54, 0x10, - 0x02, 0x32, 0xc1, 0x03, 0x0a, 0x12, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, + 0x02, 0x32, 0xa4, 0x04, 0x0a, 0x12, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x52, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x12, 0x20, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, @@ -1212,11 +1620,17 @@ var file_errorgroups_v1_errorgroups_proto_rawDesc = []byte{ 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x7a, 0x6f, 0x6e, 0x74, 0x65, 0x63, 0x68, 0x2f, 0x73, 0x65, 0x71, - 0x2d, 0x75, 0x69, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, - 0x75, 0x70, 0x73, 0x2f, 0x76, 0x31, 0x3b, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, - 0x70, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x61, 0x0a, 0x0e, 0x44, 0x69, 0x66, 0x66, 0x42, 0x79, 0x52, + 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x12, 0x25, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, + 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x42, 0x79, 0x52, + 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, + 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2e, 0x76, 0x31, 0x2e, + 0x44, 0x69, 0x66, 0x66, 0x42, 0x79, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x7a, 0x6f, 0x6e, 0x74, 0x65, 0x63, 0x68, 0x2f, + 0x73, 0x65, 0x71, 0x2d, 0x75, 0x69, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x2f, 0x76, 0x31, 0x3b, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x67, + 0x72, 0x6f, 0x75, 0x70, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1232,57 +1646,72 @@ func file_errorgroups_v1_errorgroups_proto_rawDescGZIP() []byte { } var file_errorgroups_v1_errorgroups_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_errorgroups_v1_errorgroups_proto_msgTypes = make([]protoimpl.MessageInfo, 15) +var file_errorgroups_v1_errorgroups_proto_msgTypes = make([]protoimpl.MessageInfo, 21) var file_errorgroups_v1_errorgroups_proto_goTypes = []any{ - (Order)(0), // 0: errorgroups.v1.Order - (*GetGroupsRequest)(nil), // 1: errorgroups.v1.GetGroupsRequest - (*GetGroupsResponse)(nil), // 2: errorgroups.v1.GetGroupsResponse - (*Group)(nil), // 3: errorgroups.v1.Group - (*GetHistRequest)(nil), // 4: errorgroups.v1.GetHistRequest - (*GetHistResponse)(nil), // 5: errorgroups.v1.GetHistResponse - (*Bucket)(nil), // 6: errorgroups.v1.Bucket - (*GetDetailsRequest)(nil), // 7: errorgroups.v1.GetDetailsRequest - (*GetDetailsResponse)(nil), // 8: errorgroups.v1.GetDetailsResponse - (*GetReleasesRequest)(nil), // 9: errorgroups.v1.GetReleasesRequest - (*GetReleasesResponse)(nil), // 10: errorgroups.v1.GetReleasesResponse - (*GetServicesRequest)(nil), // 11: errorgroups.v1.GetServicesRequest - (*GetServicesResponse)(nil), // 12: errorgroups.v1.GetServicesResponse - (*GetDetailsResponse_Distribution)(nil), // 13: errorgroups.v1.GetDetailsResponse.Distribution - (*GetDetailsResponse_Distributions)(nil), // 14: errorgroups.v1.GetDetailsResponse.Distributions - nil, // 15: errorgroups.v1.GetDetailsResponse.LogTagsEntry - (*durationpb.Duration)(nil), // 16: google.protobuf.Duration - (*timestamppb.Timestamp)(nil), // 17: google.protobuf.Timestamp + (Order)(0), // 0: errorgroups.v1.Order + (*GetGroupsRequest)(nil), // 1: errorgroups.v1.GetGroupsRequest + (*GetGroupsResponse)(nil), // 2: errorgroups.v1.GetGroupsResponse + (*Group)(nil), // 3: errorgroups.v1.Group + (*GetHistRequest)(nil), // 4: errorgroups.v1.GetHistRequest + (*GetHistResponse)(nil), // 5: errorgroups.v1.GetHistResponse + (*Bucket)(nil), // 6: errorgroups.v1.Bucket + (*GetDetailsRequest)(nil), // 7: errorgroups.v1.GetDetailsRequest + (*GetDetailsResponse)(nil), // 8: errorgroups.v1.GetDetailsResponse + (*GetReleasesRequest)(nil), // 9: errorgroups.v1.GetReleasesRequest + (*GetReleasesResponse)(nil), // 10: errorgroups.v1.GetReleasesResponse + (*GetServicesRequest)(nil), // 11: errorgroups.v1.GetServicesRequest + (*GetServicesResponse)(nil), // 12: errorgroups.v1.GetServicesResponse + (*DiffByReleasesRequest)(nil), // 13: errorgroups.v1.DiffByReleasesRequest + (*DiffByReleasesResponse)(nil), // 14: errorgroups.v1.DiffByReleasesResponse + (*GetGroupsRequest_Filter)(nil), // 15: errorgroups.v1.GetGroupsRequest.Filter + (*GetDetailsResponse_Distribution)(nil), // 16: errorgroups.v1.GetDetailsResponse.Distribution + (*GetDetailsResponse_Distributions)(nil), // 17: errorgroups.v1.GetDetailsResponse.Distributions + nil, // 18: errorgroups.v1.GetDetailsResponse.LogTagsEntry + (*DiffByReleasesResponse_ReleaseInfo)(nil), // 19: errorgroups.v1.DiffByReleasesResponse.ReleaseInfo + (*DiffByReleasesResponse_Group)(nil), // 20: errorgroups.v1.DiffByReleasesResponse.Group + nil, // 21: errorgroups.v1.DiffByReleasesResponse.Group.ReleaseInfosEntry + (*durationpb.Duration)(nil), // 22: google.protobuf.Duration + (*timestamppb.Timestamp)(nil), // 23: google.protobuf.Timestamp } var file_errorgroups_v1_errorgroups_proto_depIdxs = []int32{ - 16, // 0: errorgroups.v1.GetGroupsRequest.duration:type_name -> google.protobuf.Duration + 22, // 0: errorgroups.v1.GetGroupsRequest.duration:type_name -> google.protobuf.Duration 0, // 1: errorgroups.v1.GetGroupsRequest.order:type_name -> errorgroups.v1.Order - 3, // 2: errorgroups.v1.GetGroupsResponse.groups:type_name -> errorgroups.v1.Group - 17, // 3: errorgroups.v1.Group.first_seen_at:type_name -> google.protobuf.Timestamp - 17, // 4: errorgroups.v1.Group.last_seen_at:type_name -> google.protobuf.Timestamp - 16, // 5: errorgroups.v1.GetHistRequest.duration:type_name -> google.protobuf.Duration - 6, // 6: errorgroups.v1.GetHistResponse.buckets:type_name -> errorgroups.v1.Bucket - 17, // 7: errorgroups.v1.Bucket.time:type_name -> google.protobuf.Timestamp - 17, // 8: errorgroups.v1.GetDetailsResponse.first_seen_at:type_name -> google.protobuf.Timestamp - 17, // 9: errorgroups.v1.GetDetailsResponse.last_seen_at:type_name -> google.protobuf.Timestamp - 14, // 10: errorgroups.v1.GetDetailsResponse.distributions:type_name -> errorgroups.v1.GetDetailsResponse.Distributions - 15, // 11: errorgroups.v1.GetDetailsResponse.log_tags:type_name -> errorgroups.v1.GetDetailsResponse.LogTagsEntry - 13, // 12: errorgroups.v1.GetDetailsResponse.Distributions.by_env:type_name -> errorgroups.v1.GetDetailsResponse.Distribution - 13, // 13: errorgroups.v1.GetDetailsResponse.Distributions.by_release:type_name -> errorgroups.v1.GetDetailsResponse.Distribution - 1, // 14: errorgroups.v1.ErrorGroupsService.GetGroups:input_type -> errorgroups.v1.GetGroupsRequest - 4, // 15: errorgroups.v1.ErrorGroupsService.GetHist:input_type -> errorgroups.v1.GetHistRequest - 7, // 16: errorgroups.v1.ErrorGroupsService.GetDetails:input_type -> errorgroups.v1.GetDetailsRequest - 9, // 17: errorgroups.v1.ErrorGroupsService.GetReleases:input_type -> errorgroups.v1.GetReleasesRequest - 11, // 18: errorgroups.v1.ErrorGroupsService.GetServices:input_type -> errorgroups.v1.GetServicesRequest - 2, // 19: errorgroups.v1.ErrorGroupsService.GetGroups:output_type -> errorgroups.v1.GetGroupsResponse - 5, // 20: errorgroups.v1.ErrorGroupsService.GetHist:output_type -> errorgroups.v1.GetHistResponse - 8, // 21: errorgroups.v1.ErrorGroupsService.GetDetails:output_type -> errorgroups.v1.GetDetailsResponse - 10, // 22: errorgroups.v1.ErrorGroupsService.GetReleases:output_type -> errorgroups.v1.GetReleasesResponse - 12, // 23: errorgroups.v1.ErrorGroupsService.GetServices:output_type -> errorgroups.v1.GetServicesResponse - 19, // [19:24] is the sub-list for method output_type - 14, // [14:19] is the sub-list for method input_type - 14, // [14:14] is the sub-list for extension type_name - 14, // [14:14] is the sub-list for extension extendee - 0, // [0:14] is the sub-list for field type_name + 15, // 2: errorgroups.v1.GetGroupsRequest.filter:type_name -> errorgroups.v1.GetGroupsRequest.Filter + 3, // 3: errorgroups.v1.GetGroupsResponse.groups:type_name -> errorgroups.v1.Group + 23, // 4: errorgroups.v1.Group.first_seen_at:type_name -> google.protobuf.Timestamp + 23, // 5: errorgroups.v1.Group.last_seen_at:type_name -> google.protobuf.Timestamp + 22, // 6: errorgroups.v1.GetHistRequest.duration:type_name -> google.protobuf.Duration + 6, // 7: errorgroups.v1.GetHistResponse.buckets:type_name -> errorgroups.v1.Bucket + 23, // 8: errorgroups.v1.Bucket.time:type_name -> google.protobuf.Timestamp + 23, // 9: errorgroups.v1.GetDetailsResponse.first_seen_at:type_name -> google.protobuf.Timestamp + 23, // 10: errorgroups.v1.GetDetailsResponse.last_seen_at:type_name -> google.protobuf.Timestamp + 17, // 11: errorgroups.v1.GetDetailsResponse.distributions:type_name -> errorgroups.v1.GetDetailsResponse.Distributions + 18, // 12: errorgroups.v1.GetDetailsResponse.log_tags:type_name -> errorgroups.v1.GetDetailsResponse.LogTagsEntry + 0, // 13: errorgroups.v1.DiffByReleasesRequest.order:type_name -> errorgroups.v1.Order + 20, // 14: errorgroups.v1.DiffByReleasesResponse.groups:type_name -> errorgroups.v1.DiffByReleasesResponse.Group + 16, // 15: errorgroups.v1.GetDetailsResponse.Distributions.by_env:type_name -> errorgroups.v1.GetDetailsResponse.Distribution + 16, // 16: errorgroups.v1.GetDetailsResponse.Distributions.by_release:type_name -> errorgroups.v1.GetDetailsResponse.Distribution + 23, // 17: errorgroups.v1.DiffByReleasesResponse.Group.first_seen_at:type_name -> google.protobuf.Timestamp + 23, // 18: errorgroups.v1.DiffByReleasesResponse.Group.last_seen_at:type_name -> google.protobuf.Timestamp + 21, // 19: errorgroups.v1.DiffByReleasesResponse.Group.release_infos:type_name -> errorgroups.v1.DiffByReleasesResponse.Group.ReleaseInfosEntry + 19, // 20: errorgroups.v1.DiffByReleasesResponse.Group.ReleaseInfosEntry.value:type_name -> errorgroups.v1.DiffByReleasesResponse.ReleaseInfo + 1, // 21: errorgroups.v1.ErrorGroupsService.GetGroups:input_type -> errorgroups.v1.GetGroupsRequest + 4, // 22: errorgroups.v1.ErrorGroupsService.GetHist:input_type -> errorgroups.v1.GetHistRequest + 7, // 23: errorgroups.v1.ErrorGroupsService.GetDetails:input_type -> errorgroups.v1.GetDetailsRequest + 9, // 24: errorgroups.v1.ErrorGroupsService.GetReleases:input_type -> errorgroups.v1.GetReleasesRequest + 11, // 25: errorgroups.v1.ErrorGroupsService.GetServices:input_type -> errorgroups.v1.GetServicesRequest + 13, // 26: errorgroups.v1.ErrorGroupsService.DiffByReleases:input_type -> errorgroups.v1.DiffByReleasesRequest + 2, // 27: errorgroups.v1.ErrorGroupsService.GetGroups:output_type -> errorgroups.v1.GetGroupsResponse + 5, // 28: errorgroups.v1.ErrorGroupsService.GetHist:output_type -> errorgroups.v1.GetHistResponse + 8, // 29: errorgroups.v1.ErrorGroupsService.GetDetails:output_type -> errorgroups.v1.GetDetailsResponse + 10, // 30: errorgroups.v1.ErrorGroupsService.GetReleases:output_type -> errorgroups.v1.GetReleasesResponse + 12, // 31: errorgroups.v1.ErrorGroupsService.GetServices:output_type -> errorgroups.v1.GetServicesResponse + 14, // 32: errorgroups.v1.ErrorGroupsService.DiffByReleases:output_type -> errorgroups.v1.DiffByReleasesResponse + 27, // [27:33] is the sub-list for method output_type + 21, // [21:27] is the sub-list for method input_type + 21, // [21:21] is the sub-list for extension type_name + 21, // [21:21] is the sub-list for extension extendee + 0, // [0:21] is the sub-list for field type_name } func init() { file_errorgroups_v1_errorgroups_proto_init() } @@ -1436,7 +1865,7 @@ func file_errorgroups_v1_errorgroups_proto_init() { } } file_errorgroups_v1_errorgroups_proto_msgTypes[12].Exporter = func(v any, i int) any { - switch v := v.(*GetDetailsResponse_Distribution); i { + switch v := v.(*DiffByReleasesRequest); i { case 0: return &v.state case 1: @@ -1448,6 +1877,42 @@ func file_errorgroups_v1_errorgroups_proto_init() { } } file_errorgroups_v1_errorgroups_proto_msgTypes[13].Exporter = func(v any, i int) any { + switch v := v.(*DiffByReleasesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_errorgroups_v1_errorgroups_proto_msgTypes[14].Exporter = func(v any, i int) any { + switch v := v.(*GetGroupsRequest_Filter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_errorgroups_v1_errorgroups_proto_msgTypes[15].Exporter = func(v any, i int) any { + switch v := v.(*GetDetailsResponse_Distribution); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_errorgroups_v1_errorgroups_proto_msgTypes[16].Exporter = func(v any, i int) any { switch v := v.(*GetDetailsResponse_Distributions); i { case 0: return &v.state @@ -1459,19 +1924,44 @@ func file_errorgroups_v1_errorgroups_proto_init() { return nil } } + file_errorgroups_v1_errorgroups_proto_msgTypes[18].Exporter = func(v any, i int) any { + switch v := v.(*DiffByReleasesResponse_ReleaseInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_errorgroups_v1_errorgroups_proto_msgTypes[19].Exporter = func(v any, i int) any { + switch v := v.(*DiffByReleasesResponse_Group); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_errorgroups_v1_errorgroups_proto_msgTypes[0].OneofWrappers = []any{} file_errorgroups_v1_errorgroups_proto_msgTypes[3].OneofWrappers = []any{} file_errorgroups_v1_errorgroups_proto_msgTypes[6].OneofWrappers = []any{} file_errorgroups_v1_errorgroups_proto_msgTypes[8].OneofWrappers = []any{} file_errorgroups_v1_errorgroups_proto_msgTypes[10].OneofWrappers = []any{} + file_errorgroups_v1_errorgroups_proto_msgTypes[12].OneofWrappers = []any{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_errorgroups_v1_errorgroups_proto_rawDesc, NumEnums: 1, - NumMessages: 15, + NumMessages: 21, NumExtensions: 0, NumServices: 1, }, diff --git a/pkg/errorgroups/v1/errorgroups_grpc.pb.go b/pkg/errorgroups/v1/errorgroups_grpc.pb.go index 3070bdb..ba752fa 100644 --- a/pkg/errorgroups/v1/errorgroups_grpc.pb.go +++ b/pkg/errorgroups/v1/errorgroups_grpc.pb.go @@ -19,11 +19,12 @@ import ( const _ = grpc.SupportPackageIsVersion8 const ( - ErrorGroupsService_GetGroups_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetGroups" - ErrorGroupsService_GetHist_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetHist" - ErrorGroupsService_GetDetails_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetDetails" - ErrorGroupsService_GetReleases_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetReleases" - ErrorGroupsService_GetServices_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetServices" + ErrorGroupsService_GetGroups_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetGroups" + ErrorGroupsService_GetHist_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetHist" + ErrorGroupsService_GetDetails_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetDetails" + ErrorGroupsService_GetReleases_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetReleases" + ErrorGroupsService_GetServices_FullMethodName = "/errorgroups.v1.ErrorGroupsService/GetServices" + ErrorGroupsService_DiffByReleases_FullMethodName = "/errorgroups.v1.ErrorGroupsService/DiffByReleases" ) // ErrorGroupsServiceClient is the client API for ErrorGroupsService service. @@ -35,6 +36,7 @@ type ErrorGroupsServiceClient interface { GetDetails(ctx context.Context, in *GetDetailsRequest, opts ...grpc.CallOption) (*GetDetailsResponse, error) GetReleases(ctx context.Context, in *GetReleasesRequest, opts ...grpc.CallOption) (*GetReleasesResponse, error) GetServices(ctx context.Context, in *GetServicesRequest, opts ...grpc.CallOption) (*GetServicesResponse, error) + DiffByReleases(ctx context.Context, in *DiffByReleasesRequest, opts ...grpc.CallOption) (*DiffByReleasesResponse, error) } type errorGroupsServiceClient struct { @@ -95,6 +97,16 @@ func (c *errorGroupsServiceClient) GetServices(ctx context.Context, in *GetServi return out, nil } +func (c *errorGroupsServiceClient) DiffByReleases(ctx context.Context, in *DiffByReleasesRequest, opts ...grpc.CallOption) (*DiffByReleasesResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DiffByReleasesResponse) + err := c.cc.Invoke(ctx, ErrorGroupsService_DiffByReleases_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + // ErrorGroupsServiceServer is the server API for ErrorGroupsService service. // All implementations should embed UnimplementedErrorGroupsServiceServer // for forward compatibility @@ -104,6 +116,7 @@ type ErrorGroupsServiceServer interface { GetDetails(context.Context, *GetDetailsRequest) (*GetDetailsResponse, error) GetReleases(context.Context, *GetReleasesRequest) (*GetReleasesResponse, error) GetServices(context.Context, *GetServicesRequest) (*GetServicesResponse, error) + DiffByReleases(context.Context, *DiffByReleasesRequest) (*DiffByReleasesResponse, error) } // UnimplementedErrorGroupsServiceServer should be embedded to have forward compatible implementations. @@ -125,6 +138,9 @@ func (UnimplementedErrorGroupsServiceServer) GetReleases(context.Context, *GetRe func (UnimplementedErrorGroupsServiceServer) GetServices(context.Context, *GetServicesRequest) (*GetServicesResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetServices not implemented") } +func (UnimplementedErrorGroupsServiceServer) DiffByReleases(context.Context, *DiffByReleasesRequest) (*DiffByReleasesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DiffByReleases not implemented") +} // UnsafeErrorGroupsServiceServer may be embedded to opt out of forward compatibility for this service. // Use of this interface is not recommended, as added methods to ErrorGroupsServiceServer will @@ -227,6 +243,24 @@ func _ErrorGroupsService_GetServices_Handler(srv interface{}, ctx context.Contex return interceptor(ctx, in, info, handler) } +func _ErrorGroupsService_DiffByReleases_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DiffByReleasesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ErrorGroupsServiceServer).DiffByReleases(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ErrorGroupsService_DiffByReleases_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ErrorGroupsServiceServer).DiffByReleases(ctx, req.(*DiffByReleasesRequest)) + } + return interceptor(ctx, in, info, handler) +} + // ErrorGroupsService_ServiceDesc is the grpc.ServiceDesc for ErrorGroupsService service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -254,6 +288,10 @@ var ErrorGroupsService_ServiceDesc = grpc.ServiceDesc{ MethodName: "GetServices", Handler: _ErrorGroupsService_GetServices_Handler, }, + { + MethodName: "DiffByReleases", + Handler: _ErrorGroupsService_DiffByReleases_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "errorgroups/v1/errorgroups.proto", diff --git a/swagger/swagger.json b/swagger/swagger.json index a7e1704..3fa7637 100644 --- a/swagger/swagger.json +++ b/swagger/swagger.json @@ -309,6 +309,44 @@ } } }, + "/errorgroups/v1/diff_by_releases": { + "post": { + "security": [ + { + "bearer": [] + } + ], + "tags": [ + "errorgroups_v1" + ], + "operationId": "errorgroups_v1_diff_by_releases", + "parameters": [ + { + "description": "Request body", + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/errorgroups.v1.DiffByReleasesRequest" + } + } + ], + "responses": { + "200": { + "description": "A successful response", + "schema": { + "$ref": "#/definitions/errorgroups.v1.DiffByReleasesResponse" + } + }, + "default": { + "description": "An unexpected error response", + "schema": { + "$ref": "#/definitions/UnexpectedError" + } + } + } + } + }, "/errorgroups/v1/groups": { "post": { "security": [ @@ -1591,6 +1629,89 @@ } } }, + "errorgroups.v1.DiffByReleasesRequest": { + "type": "object", + "properties": { + "env": { + "type": "string" + }, + "limit": { + "type": "integer" + }, + "offset": { + "type": "integer" + }, + "order": { + "$ref": "#/definitions/errorgroups.v1.Order" + }, + "releases": { + "type": "array", + "items": { + "type": "string" + } + }, + "service": { + "type": "string" + }, + "source": { + "type": "string" + }, + "with_total": { + "type": "boolean" + } + } + }, + "errorgroups.v1.DiffByReleasesResponse": { + "type": "object", + "properties": { + "groups": { + "type": "array", + "items": { + "$ref": "#/definitions/errorgroups.v1.DiffGroup" + } + }, + "total": { + "type": "integer" + } + } + }, + "errorgroups.v1.DiffGroup": { + "type": "object", + "properties": { + "first_seen_at": { + "type": "string", + "format": "date-time" + }, + "hash": { + "type": "string", + "format": "uint64" + }, + "last_seen_at": { + "type": "string", + "format": "date-time" + }, + "message": { + "type": "string" + }, + "release_infos": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/errorgroups.v1.DiffReleaseInfo" + } + }, + "source": { + "type": "string" + } + } + }, + "errorgroups.v1.DiffReleaseInfo": { + "type": "object", + "properties": { + "seen_total": { + "type": "integer" + } + } + }, "errorgroups.v1.Distribution": { "type": "object", "properties": { @@ -1705,6 +1826,9 @@ "env": { "type": "string" }, + "filter": { + "$ref": "#/definitions/errorgroups.v1.GroupsFilter" + }, "limit": { "type": "integer" }, @@ -1856,6 +1980,14 @@ } } }, + "errorgroups.v1.GroupsFilter": { + "type": "object", + "properties": { + "is_new": { + "type": "boolean" + } + } + }, "errorgroups.v1.Order": { "type": "string", "enum": [