mirror of https://github.com/go-gitea/gitea
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662. Replace #24822 and #25708 (although it has been merged) ## Background In the past, Gitea supported issue searching with a keyword and conditions in a less efficient way. It worked by searching for issues with the keyword and obtaining limited IDs (as it is heavy to get all) on the indexer (bleve/elasticsearch/meilisearch), and then querying with conditions on the database to find a subset of the found IDs. This is why the results could be incomplete. To solve this issue, we need to store all fields that could be used as conditions in the indexer and support both keyword and additional conditions when searching with the indexer. ## Major changes - Redefine `IndexerData` to include all fields that could be used as filter conditions. - Refactor `Search(ctx context.Context, kw string, repoIDs []int64, limit, start int, state string)` to `Search(ctx context.Context, options *SearchOptions)`, so it supports more conditions now. - Change the data type stored in `issueIndexerQueue`. Use `IndexerMetadata` instead of `IndexerData` in case the data has been updated while it is in the queue. This also reduces the storage size of the queue. - Enhance searching with Bleve/Elasticsearch/Meilisearch, make them fully support `SearchOptions`. Also, update the data versions. - Keep most logic of database indexer, but remove `issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is the entry point to search issues. - Start a Meilisearch instance to test it in unit tests. - Add unit tests with almost full coverage to test Bleve/Elasticsearch/Meilisearch indexer. --------- Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>pull/26046/head^2
parent
aba9096999
commit
1e76a824bc
@ -0,0 +1,53 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package bleve |
||||
|
||||
import ( |
||||
"github.com/blevesearch/bleve/v2" |
||||
"github.com/blevesearch/bleve/v2/search/query" |
||||
) |
||||
|
||||
// NumericEqualityQuery generates a numeric equality query for the given value and field
|
||||
func NumericEqualityQuery(value int64, field string) *query.NumericRangeQuery { |
||||
f := float64(value) |
||||
tru := true |
||||
q := bleve.NewNumericRangeInclusiveQuery(&f, &f, &tru, &tru) |
||||
q.SetField(field) |
||||
return q |
||||
} |
||||
|
||||
// MatchPhraseQuery generates a match phrase query for the given phrase, field and analyzer
|
||||
func MatchPhraseQuery(matchPhrase, field, analyzer string) *query.MatchPhraseQuery { |
||||
q := bleve.NewMatchPhraseQuery(matchPhrase) |
||||
q.FieldVal = field |
||||
q.Analyzer = analyzer |
||||
return q |
||||
} |
||||
|
||||
// BoolFieldQuery generates a bool field query for the given value and field
|
||||
func BoolFieldQuery(value bool, field string) *query.BoolFieldQuery { |
||||
q := bleve.NewBoolFieldQuery(value) |
||||
q.SetField(field) |
||||
return q |
||||
} |
||||
|
||||
func NumericRangeInclusiveQuery(min, max *int64, field string) *query.NumericRangeQuery { |
||||
var minF, maxF *float64 |
||||
var minI, maxI *bool |
||||
if min != nil { |
||||
minF = new(float64) |
||||
*minF = float64(*min) |
||||
minI = new(bool) |
||||
*minI = true |
||||
} |
||||
if max != nil { |
||||
maxF = new(float64) |
||||
*maxF = float64(*max) |
||||
maxI = new(bool) |
||||
*maxI = true |
||||
} |
||||
q := bleve.NewNumericRangeInclusiveQuery(minF, maxF, minI, maxI) |
||||
q.SetField(field) |
||||
return q |
||||
} |
@ -0,0 +1,119 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package meilisearch |
||||
|
||||
import ( |
||||
"fmt" |
||||
"strings" |
||||
) |
||||
|
||||
// Filter represents a filter for meilisearch queries.
|
||||
// It's just a simple wrapper around a string.
|
||||
// DO NOT assume that it is a complete implementation.
|
||||
type Filter interface { |
||||
Statement() string |
||||
} |
||||
|
||||
type FilterAnd struct { |
||||
filters []Filter |
||||
} |
||||
|
||||
func (f *FilterAnd) Statement() string { |
||||
var statements []string |
||||
for _, filter := range f.filters { |
||||
if s := filter.Statement(); s != "" { |
||||
statements = append(statements, fmt.Sprintf("(%s)", s)) |
||||
} |
||||
} |
||||
return strings.Join(statements, " AND ") |
||||
} |
||||
|
||||
func (f *FilterAnd) And(filter Filter) *FilterAnd { |
||||
f.filters = append(f.filters, filter) |
||||
return f |
||||
} |
||||
|
||||
type FilterOr struct { |
||||
filters []Filter |
||||
} |
||||
|
||||
func (f *FilterOr) Statement() string { |
||||
var statements []string |
||||
for _, filter := range f.filters { |
||||
if s := filter.Statement(); s != "" { |
||||
statements = append(statements, fmt.Sprintf("(%s)", s)) |
||||
} |
||||
} |
||||
return strings.Join(statements, " OR ") |
||||
} |
||||
|
||||
func (f *FilterOr) Or(filter Filter) *FilterOr { |
||||
f.filters = append(f.filters, filter) |
||||
return f |
||||
} |
||||
|
||||
type FilterIn string |
||||
|
||||
// NewFilterIn creates a new FilterIn.
|
||||
// It supports int64 only, to avoid extra works to handle strings with special characters.
|
||||
func NewFilterIn[T int64](field string, values ...T) FilterIn { |
||||
if len(values) == 0 { |
||||
return "" |
||||
} |
||||
vs := make([]string, len(values)) |
||||
for i, v := range values { |
||||
vs[i] = fmt.Sprintf("%v", v) |
||||
} |
||||
return FilterIn(fmt.Sprintf("%s IN [%v]", field, strings.Join(vs, ", "))) |
||||
} |
||||
|
||||
func (f FilterIn) Statement() string { |
||||
return string(f) |
||||
} |
||||
|
||||
type FilterEq string |
||||
|
||||
// NewFilterEq creates a new FilterEq.
|
||||
// It supports int64 and bool only, to avoid extra works to handle strings with special characters.
|
||||
func NewFilterEq[T bool | int64](field string, value T) FilterEq { |
||||
return FilterEq(fmt.Sprintf("%s = %v", field, value)) |
||||
} |
||||
|
||||
func (f FilterEq) Statement() string { |
||||
return string(f) |
||||
} |
||||
|
||||
type FilterNot string |
||||
|
||||
func NewFilterNot(filter Filter) FilterNot { |
||||
return FilterNot(fmt.Sprintf("NOT (%s)", filter.Statement())) |
||||
} |
||||
|
||||
func (f FilterNot) Statement() string { |
||||
return string(f) |
||||
} |
||||
|
||||
type FilterGte string |
||||
|
||||
// NewFilterGte creates a new FilterGte.
|
||||
// It supports int64 only, to avoid extra works to handle strings with special characters.
|
||||
func NewFilterGte[T int64](field string, value T) FilterGte { |
||||
return FilterGte(fmt.Sprintf("%s >= %v", field, value)) |
||||
} |
||||
|
||||
func (f FilterGte) Statement() string { |
||||
return string(f) |
||||
} |
||||
|
||||
type FilterLte string |
||||
|
||||
// NewFilterLte creates a new FilterLte.
|
||||
// It supports int64 only, to avoid extra works to handle strings with special characters.
|
||||
func NewFilterLte[T int64](field string, value T) FilterLte { |
||||
return FilterLte(fmt.Sprintf("%s <= %v", field, value)) |
||||
} |
||||
|
||||
func (f FilterLte) Statement() string { |
||||
return string(f) |
||||
} |
@ -0,0 +1,41 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package internal |
||||
|
||||
import ( |
||||
"math" |
||||
|
||||
"code.gitea.io/gitea/models/db" |
||||
) |
||||
|
||||
// ParsePaginator parses a db.Paginator into a skip and limit
|
||||
func ParsePaginator(paginator db.Paginator, max ...int) (int, int) { |
||||
// Use a very large number to indicate no limit
|
||||
unlimited := math.MaxInt32 |
||||
if len(max) > 0 { |
||||
// Some indexer engines have a limit on the page size, respect that
|
||||
unlimited = max[0] |
||||
} |
||||
|
||||
if paginator == nil || paginator.IsListAll() { |
||||
return 0, unlimited |
||||
} |
||||
|
||||
// Warning: Do not use GetSkipTake() for *db.ListOptions
|
||||
// Its implementation could reset the page size with setting.API.MaxResponseItems
|
||||
if listOptions, ok := paginator.(*db.ListOptions); ok { |
||||
if listOptions.Page >= 0 && listOptions.PageSize > 0 { |
||||
var start int |
||||
if listOptions.Page == 0 { |
||||
start = 0 |
||||
} else { |
||||
start = (listOptions.Page - 1) * listOptions.PageSize |
||||
} |
||||
return start, listOptions.PageSize |
||||
} |
||||
return 0, unlimited |
||||
} |
||||
|
||||
return paginator.GetSkipTake() |
||||
} |
@ -0,0 +1,114 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package db |
||||
|
||||
import ( |
||||
"context" |
||||
"fmt" |
||||
|
||||
"code.gitea.io/gitea/models/db" |
||||
issue_model "code.gitea.io/gitea/models/issues" |
||||
"code.gitea.io/gitea/modules/container" |
||||
"code.gitea.io/gitea/modules/indexer/issues/internal" |
||||
) |
||||
|
||||
func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_model.IssuesOptions, error) { |
||||
convertID := func(id *int64) int64 { |
||||
if id == nil { |
||||
return 0 |
||||
} |
||||
if *id == 0 { |
||||
return db.NoConditionID |
||||
} |
||||
return *id |
||||
} |
||||
convertInt64 := func(i *int64) int64 { |
||||
if i == nil { |
||||
return 0 |
||||
} |
||||
return *i |
||||
} |
||||
var sortType string |
||||
switch options.SortBy { |
||||
case internal.SortByCreatedAsc: |
||||
sortType = "oldest" |
||||
case internal.SortByUpdatedAsc: |
||||
sortType = "leastupdate" |
||||
case internal.SortByCommentsAsc: |
||||
sortType = "leastcomment" |
||||
case internal.SortByDeadlineAsc: |
||||
sortType = "farduedate" |
||||
case internal.SortByCreatedDesc: |
||||
sortType = "newest" |
||||
case internal.SortByUpdatedDesc: |
||||
sortType = "recentupdate" |
||||
case internal.SortByCommentsDesc: |
||||
sortType = "mostcomment" |
||||
case internal.SortByDeadlineDesc: |
||||
sortType = "nearduedate" |
||||
default: |
||||
sortType = "newest" |
||||
} |
||||
|
||||
opts := &issue_model.IssuesOptions{ |
||||
Paginator: options.Paginator, |
||||
RepoIDs: options.RepoIDs, |
||||
RepoCond: nil, |
||||
AssigneeID: convertID(options.AssigneeID), |
||||
PosterID: convertID(options.PosterID), |
||||
MentionedID: convertID(options.MentionID), |
||||
ReviewRequestedID: convertID(options.ReviewRequestedID), |
||||
ReviewedID: convertID(options.ReviewedID), |
||||
SubscriberID: convertID(options.SubscriberID), |
||||
ProjectID: convertID(options.ProjectID), |
||||
ProjectBoardID: convertID(options.ProjectBoardID), |
||||
IsClosed: options.IsClosed, |
||||
IsPull: options.IsPull, |
||||
IncludedLabelNames: nil, |
||||
ExcludedLabelNames: nil, |
||||
IncludeMilestones: nil, |
||||
SortType: sortType, |
||||
IssueIDs: nil, |
||||
UpdatedAfterUnix: convertInt64(options.UpdatedAfterUnix), |
||||
UpdatedBeforeUnix: convertInt64(options.UpdatedBeforeUnix), |
||||
PriorityRepoID: 0, |
||||
IsArchived: 0, |
||||
Org: nil, |
||||
Team: nil, |
||||
User: nil, |
||||
} |
||||
|
||||
if len(options.MilestoneIDs) == 1 && options.MilestoneIDs[0] == 0 { |
||||
opts.MilestoneIDs = []int64{db.NoConditionID} |
||||
} else { |
||||
opts.MilestoneIDs = options.MilestoneIDs |
||||
} |
||||
|
||||
if options.NoLabelOnly { |
||||
opts.LabelIDs = []int64{0} // Be careful, it's zero, not db.NoConditionID
|
||||
} else { |
||||
opts.LabelIDs = make([]int64, 0, len(options.IncludedLabelIDs)+len(options.ExcludedLabelIDs)) |
||||
opts.LabelIDs = append(opts.LabelIDs, options.IncludedLabelIDs...) |
||||
for _, id := range options.ExcludedLabelIDs { |
||||
opts.LabelIDs = append(opts.LabelIDs, -id) |
||||
} |
||||
|
||||
if len(options.IncludedLabelIDs) == 0 && len(options.IncludedAnyLabelIDs) > 0 { |
||||
_ = ctx // issue_model.GetLabelsByIDs should be called with ctx, this line can be removed when it's done.
|
||||
labels, err := issue_model.GetLabelsByIDs(options.IncludedAnyLabelIDs, "name") |
||||
if err != nil { |
||||
return nil, fmt.Errorf("GetLabelsByIDs: %v", err) |
||||
} |
||||
set := container.Set[string]{} |
||||
for _, label := range labels { |
||||
if !set.Contains(label.Name) { |
||||
set.Add(label.Name) |
||||
opts.IncludedLabelNames = append(opts.IncludedLabelNames, label.Name) |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
return opts, nil |
||||
} |
@ -0,0 +1,93 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package issues |
||||
|
||||
import ( |
||||
"code.gitea.io/gitea/models/db" |
||||
issues_model "code.gitea.io/gitea/models/issues" |
||||
) |
||||
|
||||
func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOptions { |
||||
searchOpt := &SearchOptions{ |
||||
Keyword: keyword, |
||||
RepoIDs: opts.RepoIDs, |
||||
AllPublic: false, |
||||
IsPull: opts.IsPull, |
||||
IsClosed: opts.IsClosed, |
||||
} |
||||
|
||||
if len(opts.LabelIDs) == 1 && opts.LabelIDs[0] == 0 { |
||||
searchOpt.NoLabelOnly = true |
||||
} else { |
||||
for _, labelID := range opts.LabelIDs { |
||||
if labelID > 0 { |
||||
searchOpt.IncludedLabelIDs = append(searchOpt.IncludedLabelIDs, labelID) |
||||
} else { |
||||
searchOpt.ExcludedLabelIDs = append(searchOpt.ExcludedLabelIDs, -labelID) |
||||
} |
||||
} |
||||
// opts.IncludedLabelNames and opts.ExcludedLabelNames are not supported here.
|
||||
// It's not a TO DO, it's just unnecessary.
|
||||
} |
||||
|
||||
if len(opts.MilestoneIDs) == 1 && opts.MilestoneIDs[0] == db.NoConditionID { |
||||
searchOpt.MilestoneIDs = []int64{0} |
||||
} else { |
||||
searchOpt.MilestoneIDs = opts.MilestoneIDs |
||||
} |
||||
|
||||
if opts.AssigneeID > 0 { |
||||
searchOpt.AssigneeID = &opts.AssigneeID |
||||
} |
||||
if opts.PosterID > 0 { |
||||
searchOpt.PosterID = &opts.PosterID |
||||
} |
||||
if opts.MentionedID > 0 { |
||||
searchOpt.MentionID = &opts.MentionedID |
||||
} |
||||
if opts.ReviewedID > 0 { |
||||
searchOpt.ReviewedID = &opts.ReviewedID |
||||
} |
||||
if opts.ReviewRequestedID > 0 { |
||||
searchOpt.ReviewRequestedID = &opts.ReviewRequestedID |
||||
} |
||||
if opts.SubscriberID > 0 { |
||||
searchOpt.SubscriberID = &opts.SubscriberID |
||||
} |
||||
|
||||
if opts.UpdatedAfterUnix > 0 { |
||||
searchOpt.UpdatedAfterUnix = &opts.UpdatedAfterUnix |
||||
} |
||||
if opts.UpdatedBeforeUnix > 0 { |
||||
searchOpt.UpdatedBeforeUnix = &opts.UpdatedBeforeUnix |
||||
} |
||||
|
||||
searchOpt.Paginator = opts.Paginator |
||||
|
||||
switch opts.SortType { |
||||
case "": |
||||
searchOpt.SortBy = SortByCreatedDesc |
||||
case "oldest": |
||||
searchOpt.SortBy = SortByCreatedAsc |
||||
case "recentupdate": |
||||
searchOpt.SortBy = SortByUpdatedDesc |
||||
case "leastupdate": |
||||
searchOpt.SortBy = SortByUpdatedAsc |
||||
case "mostcomment": |
||||
searchOpt.SortBy = SortByCommentsDesc |
||||
case "leastcomment": |
||||
searchOpt.SortBy = SortByCommentsAsc |
||||
case "nearduedate": |
||||
searchOpt.SortBy = SortByDeadlineAsc |
||||
case "farduedate": |
||||
searchOpt.SortBy = SortByDeadlineDesc |
||||
case "priority", "priorityrepo", "project-column-sorting": |
||||
// Unsupported sort type for search
|
||||
searchOpt.SortBy = SortByUpdatedDesc |
||||
default: |
||||
searchOpt.SortBy = SortByUpdatedDesc |
||||
} |
||||
|
||||
return searchOpt |
||||
} |
@ -0,0 +1,48 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package elasticsearch |
||||
|
||||
import ( |
||||
"fmt" |
||||
"net/http" |
||||
"os" |
||||
"testing" |
||||
"time" |
||||
|
||||
"code.gitea.io/gitea/modules/indexer/issues/internal/tests" |
||||
) |
||||
|
||||
func TestElasticsearchIndexer(t *testing.T) { |
||||
// The elasticsearch instance started by pull-db-tests.yml > test-unit > services > elasticsearch
|
||||
url := "http://elastic:changeme@elasticsearch:9200" |
||||
|
||||
if os.Getenv("CI") == "" { |
||||
// Make it possible to run tests against a local elasticsearch instance
|
||||
url = os.Getenv("TEST_ELASTICSEARCH_URL") |
||||
if url == "" { |
||||
t.Skip("TEST_ELASTICSEARCH_URL not set and not running in CI") |
||||
return |
||||
} |
||||
} |
||||
|
||||
ok := false |
||||
for i := 0; i < 60; i++ { |
||||
resp, err := http.Get(url) |
||||
if err == nil && resp.StatusCode == http.StatusOK { |
||||
ok = true |
||||
break |
||||
} |
||||
t.Logf("Waiting for elasticsearch to be up: %v", err) |
||||
time.Sleep(time.Second) |
||||
} |
||||
if !ok { |
||||
t.Fatalf("Failed to wait for elasticsearch to be up") |
||||
return |
||||
} |
||||
|
||||
indexer := NewIndexer(url, fmt.Sprintf("test_elasticsearch_indexer_%d", time.Now().Unix())) |
||||
defer indexer.Close() |
||||
|
||||
tests.TestIndexer(t, indexer) |
||||
} |
@ -0,0 +1,804 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
// This package contains tests for issues indexer modules.
|
||||
// All the code in this package is only used for testing.
|
||||
// Do not put any production code in this package to avoid it being included in the final binary.
|
||||
|
||||
package tests |
||||
|
||||
import ( |
||||
"context" |
||||
"fmt" |
||||
"testing" |
||||
"time" |
||||
|
||||
"code.gitea.io/gitea/models/db" |
||||
"code.gitea.io/gitea/modules/indexer/issues/internal" |
||||
"code.gitea.io/gitea/modules/timeutil" |
||||
"code.gitea.io/gitea/modules/util" |
||||
|
||||
"github.com/stretchr/testify/assert" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func TestIndexer(t *testing.T, indexer internal.Indexer) { |
||||
_, err := indexer.Init(context.Background()) |
||||
require.NoError(t, err) |
||||
|
||||
require.NoError(t, indexer.Ping(context.Background())) |
||||
|
||||
var ( |
||||
ids []int64 |
||||
data = map[int64]*internal.IndexerData{} |
||||
) |
||||
{ |
||||
d := generateDefaultIndexerData() |
||||
for _, v := range d { |
||||
ids = append(ids, v.ID) |
||||
data[v.ID] = v |
||||
} |
||||
require.NoError(t, indexer.Index(context.Background(), d...)) |
||||
require.NoError(t, waitData(indexer, int64(len(data)))) |
||||
} |
||||
|
||||
defer func() { |
||||
require.NoError(t, indexer.Delete(context.Background(), ids...)) |
||||
}() |
||||
|
||||
for _, c := range cases { |
||||
t.Run(c.Name, func(t *testing.T) { |
||||
if len(c.ExtraData) > 0 { |
||||
require.NoError(t, indexer.Index(context.Background(), c.ExtraData...)) |
||||
for _, v := range c.ExtraData { |
||||
data[v.ID] = v |
||||
} |
||||
require.NoError(t, waitData(indexer, int64(len(data)))) |
||||
defer func() { |
||||
for _, v := range c.ExtraData { |
||||
require.NoError(t, indexer.Delete(context.Background(), v.ID)) |
||||
delete(data, v.ID) |
||||
} |
||||
require.NoError(t, waitData(indexer, int64(len(data)))) |
||||
}() |
||||
} |
||||
|
||||
result, err := indexer.Search(context.Background(), c.SearchOptions) |
||||
require.NoError(t, err) |
||||
|
||||
if c.Expected != nil { |
||||
c.Expected(t, data, result) |
||||
} else { |
||||
ids := make([]int64, 0, len(result.Hits)) |
||||
for _, hit := range result.Hits { |
||||
ids = append(ids, hit.ID) |
||||
} |
||||
assert.Equal(t, c.ExpectedIDs, ids) |
||||
assert.Equal(t, c.ExpectedTotal, result.Total) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
var cases = []*testIndexerCase{ |
||||
{ |
||||
Name: "default", |
||||
SearchOptions: &internal.SearchOptions{}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "empty", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Keyword: "f1dfac73-fda6-4a6b-b8a4-2408fcb8ef69", |
||||
}, |
||||
ExpectedIDs: []int64{}, |
||||
ExpectedTotal: 0, |
||||
}, |
||||
{ |
||||
Name: "with limit", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "Keyword", |
||||
ExtraData: []*internal.IndexerData{ |
||||
{ID: 1000, Title: "hi hello world"}, |
||||
{ID: 1001, Content: "hi hello world"}, |
||||
{ID: 1002, Comments: []string{"hi", "hello world"}}, |
||||
}, |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Keyword: "hello", |
||||
}, |
||||
ExpectedIDs: []int64{1002, 1001, 1000}, |
||||
ExpectedTotal: 3, |
||||
}, |
||||
{ |
||||
Name: "RepoIDs", |
||||
ExtraData: []*internal.IndexerData{ |
||||
{ID: 1001, Title: "hello world", RepoID: 1, IsPublic: false}, |
||||
{ID: 1002, Title: "hello world", RepoID: 1, IsPublic: false}, |
||||
{ID: 1003, Title: "hello world", RepoID: 2, IsPublic: true}, |
||||
{ID: 1004, Title: "hello world", RepoID: 2, IsPublic: true}, |
||||
{ID: 1005, Title: "hello world", RepoID: 3, IsPublic: true}, |
||||
{ID: 1006, Title: "hello world", RepoID: 4, IsPublic: false}, |
||||
{ID: 1007, Title: "hello world", RepoID: 5, IsPublic: false}, |
||||
}, |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Keyword: "hello", |
||||
RepoIDs: []int64{1, 4}, |
||||
}, |
||||
ExpectedIDs: []int64{1006, 1002, 1001}, |
||||
ExpectedTotal: 3, |
||||
}, |
||||
{ |
||||
Name: "RepoIDs and AllPublic", |
||||
ExtraData: []*internal.IndexerData{ |
||||
{ID: 1001, Title: "hello world", RepoID: 1, IsPublic: false}, |
||||
{ID: 1002, Title: "hello world", RepoID: 1, IsPublic: false}, |
||||
{ID: 1003, Title: "hello world", RepoID: 2, IsPublic: true}, |
||||
{ID: 1004, Title: "hello world", RepoID: 2, IsPublic: true}, |
||||
{ID: 1005, Title: "hello world", RepoID: 3, IsPublic: true}, |
||||
{ID: 1006, Title: "hello world", RepoID: 4, IsPublic: false}, |
||||
{ID: 1007, Title: "hello world", RepoID: 5, IsPublic: false}, |
||||
}, |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Keyword: "hello", |
||||
RepoIDs: []int64{1, 4}, |
||||
AllPublic: true, |
||||
}, |
||||
ExpectedIDs: []int64{1006, 1005, 1004, 1003, 1002, 1001}, |
||||
ExpectedTotal: 6, |
||||
}, |
||||
{ |
||||
Name: "issue only", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
IsPull: util.OptionalBoolFalse, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.False(t, data[v.ID].IsPull) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { return !v.IsPull }), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "pull only", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
IsPull: util.OptionalBoolTrue, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.True(t, data[v.ID].IsPull) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { return v.IsPull }), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "opened only", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
IsClosed: util.OptionalBoolFalse, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.False(t, data[v.ID].IsClosed) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { return !v.IsClosed }), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "closed only", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
IsClosed: util.OptionalBoolTrue, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.True(t, data[v.ID].IsClosed) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { return v.IsClosed }), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "labels", |
||||
ExtraData: []*internal.IndexerData{ |
||||
{ID: 1000, Title: "hello a", LabelIDs: []int64{2000, 2001, 2002}}, |
||||
{ID: 1001, Title: "hello b", LabelIDs: []int64{2000, 2001}}, |
||||
{ID: 1002, Title: "hello c", LabelIDs: []int64{2000, 2001, 2003}}, |
||||
{ID: 1003, Title: "hello d", LabelIDs: []int64{2000}}, |
||||
{ID: 1004, Title: "hello e", LabelIDs: []int64{}}, |
||||
}, |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Keyword: "hello", |
||||
IncludedLabelIDs: []int64{2000, 2001}, |
||||
ExcludedLabelIDs: []int64{2003}, |
||||
}, |
||||
ExpectedIDs: []int64{1001, 1000}, |
||||
ExpectedTotal: 2, |
||||
}, |
||||
{ |
||||
Name: "include any labels", |
||||
ExtraData: []*internal.IndexerData{ |
||||
{ID: 1000, Title: "hello a", LabelIDs: []int64{2000, 2001, 2002}}, |
||||
{ID: 1001, Title: "hello b", LabelIDs: []int64{2001}}, |
||||
{ID: 1002, Title: "hello c", LabelIDs: []int64{2000, 2001, 2003}}, |
||||
{ID: 1003, Title: "hello d", LabelIDs: []int64{2002}}, |
||||
{ID: 1004, Title: "hello e", LabelIDs: []int64{}}, |
||||
}, |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Keyword: "hello", |
||||
IncludedAnyLabelIDs: []int64{2001, 2002}, |
||||
ExcludedLabelIDs: []int64{2003}, |
||||
}, |
||||
ExpectedIDs: []int64{1003, 1001, 1000}, |
||||
ExpectedTotal: 3, |
||||
}, |
||||
{ |
||||
Name: "MilestoneIDs", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
MilestoneIDs: []int64{1, 2, 6}, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Contains(t, []int64{1, 2, 6}, data[v.ID].MilestoneID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.MilestoneID == 1 || v.MilestoneID == 2 || v.MilestoneID == 6 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "no MilestoneIDs", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
MilestoneIDs: []int64{0}, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Equal(t, int64(0), data[v.ID].MilestoneID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.MilestoneID == 0 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "ProjectID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
ProjectID: func() *int64 { |
||||
id := int64(1) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Equal(t, int64(1), data[v.ID].ProjectID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.ProjectID == 1 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "no ProjectID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
ProjectID: func() *int64 { |
||||
id := int64(0) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Equal(t, int64(0), data[v.ID].ProjectID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.ProjectID == 0 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "ProjectBoardID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
ProjectBoardID: func() *int64 { |
||||
id := int64(1) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Equal(t, int64(1), data[v.ID].ProjectBoardID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.ProjectBoardID == 1 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "no ProjectBoardID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
ProjectBoardID: func() *int64 { |
||||
id := int64(0) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Equal(t, int64(0), data[v.ID].ProjectBoardID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.ProjectBoardID == 0 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "PosterID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
PosterID: func() *int64 { |
||||
id := int64(1) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Equal(t, int64(1), data[v.ID].PosterID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.PosterID == 1 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "AssigneeID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
AssigneeID: func() *int64 { |
||||
id := int64(1) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Equal(t, int64(1), data[v.ID].AssigneeID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.AssigneeID == 1 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "no AssigneeID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
AssigneeID: func() *int64 { |
||||
id := int64(0) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Equal(t, int64(0), data[v.ID].AssigneeID) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return v.AssigneeID == 0 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "MentionID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
MentionID: func() *int64 { |
||||
id := int64(1) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Contains(t, data[v.ID].MentionIDs, int64(1)) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return util.SliceContains(v.MentionIDs, 1) |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "ReviewedID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
ReviewedID: func() *int64 { |
||||
id := int64(1) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Contains(t, data[v.ID].ReviewedIDs, int64(1)) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return util.SliceContains(v.ReviewedIDs, 1) |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "ReviewRequestedID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
ReviewRequestedID: func() *int64 { |
||||
id := int64(1) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Contains(t, data[v.ID].ReviewRequestedIDs, int64(1)) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return util.SliceContains(v.ReviewRequestedIDs, 1) |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SubscriberID", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
SubscriberID: func() *int64 { |
||||
id := int64(1) |
||||
return &id |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.Contains(t, data[v.ID].SubscriberIDs, int64(1)) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return util.SliceContains(v.SubscriberIDs, 1) |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "updated", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 5, |
||||
}, |
||||
UpdatedAfterUnix: func() *int64 { |
||||
var t int64 = 20 |
||||
return &t |
||||
}(), |
||||
UpdatedBeforeUnix: func() *int64 { |
||||
var t int64 = 30 |
||||
return &t |
||||
}(), |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, 5, len(result.Hits)) |
||||
for _, v := range result.Hits { |
||||
assert.GreaterOrEqual(t, data[v.ID].UpdatedUnix, int64(20)) |
||||
assert.LessOrEqual(t, data[v.ID].UpdatedUnix, int64(30)) |
||||
} |
||||
assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { |
||||
return data[v.ID].UpdatedUnix >= 20 && data[v.ID].UpdatedUnix <= 30 |
||||
}), result.Total) |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SortByCreatedDesc", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
SortBy: internal.SortByCreatedDesc, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
for i, v := range result.Hits { |
||||
if i < len(result.Hits)-1 { |
||||
assert.GreaterOrEqual(t, data[v.ID].CreatedUnix, data[result.Hits[i+1].ID].CreatedUnix) |
||||
} |
||||
} |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SortByUpdatedDesc", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
SortBy: internal.SortByUpdatedDesc, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
for i, v := range result.Hits { |
||||
if i < len(result.Hits)-1 { |
||||
assert.GreaterOrEqual(t, data[v.ID].UpdatedUnix, data[result.Hits[i+1].ID].UpdatedUnix) |
||||
} |
||||
} |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SortByCommentsDesc", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
SortBy: internal.SortByCommentsDesc, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
for i, v := range result.Hits { |
||||
if i < len(result.Hits)-1 { |
||||
assert.GreaterOrEqual(t, data[v.ID].CommentCount, data[result.Hits[i+1].ID].CommentCount) |
||||
} |
||||
} |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SortByDeadlineDesc", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
SortBy: internal.SortByDeadlineDesc, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
for i, v := range result.Hits { |
||||
if i < len(result.Hits)-1 { |
||||
assert.GreaterOrEqual(t, data[v.ID].DeadlineUnix, data[result.Hits[i+1].ID].DeadlineUnix) |
||||
} |
||||
} |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SortByCreatedAsc", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
SortBy: internal.SortByCreatedAsc, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
for i, v := range result.Hits { |
||||
if i < len(result.Hits)-1 { |
||||
assert.LessOrEqual(t, data[v.ID].CreatedUnix, data[result.Hits[i+1].ID].CreatedUnix) |
||||
} |
||||
} |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SortByUpdatedAsc", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
SortBy: internal.SortByUpdatedAsc, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
for i, v := range result.Hits { |
||||
if i < len(result.Hits)-1 { |
||||
assert.LessOrEqual(t, data[v.ID].UpdatedUnix, data[result.Hits[i+1].ID].UpdatedUnix) |
||||
} |
||||
} |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SortByCommentsAsc", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
SortBy: internal.SortByCommentsAsc, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
for i, v := range result.Hits { |
||||
if i < len(result.Hits)-1 { |
||||
assert.LessOrEqual(t, data[v.ID].CommentCount, data[result.Hits[i+1].ID].CommentCount) |
||||
} |
||||
} |
||||
}, |
||||
}, |
||||
{ |
||||
Name: "SortByDeadlineAsc", |
||||
SearchOptions: &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
SortBy: internal.SortByDeadlineAsc, |
||||
}, |
||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { |
||||
assert.Equal(t, len(data), len(result.Hits)) |
||||
assert.Equal(t, len(data), int(result.Total)) |
||||
for i, v := range result.Hits { |
||||
if i < len(result.Hits)-1 { |
||||
assert.LessOrEqual(t, data[v.ID].DeadlineUnix, data[result.Hits[i+1].ID].DeadlineUnix) |
||||
} |
||||
} |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
type testIndexerCase struct { |
||||
Name string |
||||
ExtraData []*internal.IndexerData |
||||
|
||||
SearchOptions *internal.SearchOptions |
||||
|
||||
Expected func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) // if nil, use ExpectedIDs, ExpectedTotal
|
||||
ExpectedIDs []int64 |
||||
ExpectedTotal int64 |
||||
} |
||||
|
||||
func generateDefaultIndexerData() []*internal.IndexerData { |
||||
var id int64 |
||||
var data []*internal.IndexerData |
||||
for repoID := int64(1); repoID <= 10; repoID++ { |
||||
for issueIndex := int64(1); issueIndex <= 20; issueIndex++ { |
||||
id++ |
||||
|
||||
comments := make([]string, id%4) |
||||
for i := range comments { |
||||
comments[i] = fmt.Sprintf("comment%d", i) |
||||
} |
||||
|
||||
labelIDs := make([]int64, id%5) |
||||
for i := range labelIDs { |
||||
labelIDs[i] = int64(i) + 1 // LabelID should not be 0
|
||||
} |
||||
mentionIDs := make([]int64, id%6) |
||||
for i := range mentionIDs { |
||||
mentionIDs[i] = int64(i) + 1 // MentionID should not be 0
|
||||
} |
||||
reviewedIDs := make([]int64, id%7) |
||||
for i := range reviewedIDs { |
||||
reviewedIDs[i] = int64(i) + 1 // ReviewID should not be 0
|
||||
} |
||||
reviewRequestedIDs := make([]int64, id%8) |
||||
for i := range reviewRequestedIDs { |
||||
reviewRequestedIDs[i] = int64(i) + 1 // ReviewRequestedID should not be 0
|
||||
} |
||||
subscriberIDs := make([]int64, id%9) |
||||
for i := range subscriberIDs { |
||||
subscriberIDs[i] = int64(i) + 1 // SubscriberID should not be 0
|
||||
} |
||||
|
||||
data = append(data, &internal.IndexerData{ |
||||
ID: id, |
||||
RepoID: repoID, |
||||
IsPublic: repoID%2 == 0, |
||||
Title: fmt.Sprintf("issue%d of repo%d", issueIndex, repoID), |
||||
Content: fmt.Sprintf("content%d", issueIndex), |
||||
Comments: comments, |
||||
IsPull: issueIndex%2 == 0, |
||||
IsClosed: issueIndex%3 == 0, |
||||
LabelIDs: labelIDs, |
||||
NoLabel: len(labelIDs) == 0, |
||||
MilestoneID: issueIndex % 4, |
||||
ProjectID: issueIndex % 5, |
||||
ProjectBoardID: issueIndex % 6, |
||||
PosterID: id%10 + 1, // PosterID should not be 0
|
||||
AssigneeID: issueIndex % 10, |
||||
MentionIDs: mentionIDs, |
||||
ReviewedIDs: reviewedIDs, |
||||
ReviewRequestedIDs: reviewRequestedIDs, |
||||
SubscriberIDs: subscriberIDs, |
||||
UpdatedUnix: timeutil.TimeStamp(id + issueIndex), |
||||
CreatedUnix: timeutil.TimeStamp(id), |
||||
DeadlineUnix: timeutil.TimeStamp(id + issueIndex + repoID), |
||||
CommentCount: int64(len(comments)), |
||||
}) |
||||
} |
||||
} |
||||
|
||||
return data |
||||
} |
||||
|
||||
func countIndexerData(data map[int64]*internal.IndexerData, f func(v *internal.IndexerData) bool) int64 { |
||||
var count int64 |
||||
for _, v := range data { |
||||
if f(v) { |
||||
count++ |
||||
} |
||||
} |
||||
return count |
||||
} |
||||
|
||||
// waitData waits for the indexer to index all data.
|
||||
// Some engines like Elasticsearch index data asynchronously, so we need to wait for a while.
|
||||
func waitData(indexer internal.Indexer, total int64) error { |
||||
var actual int64 |
||||
for i := 0; i < 100; i++ { |
||||
result, err := indexer.Search(context.Background(), &internal.SearchOptions{ |
||||
Paginator: &db.ListOptions{ |
||||
PageSize: 0, |
||||
}, |
||||
}) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
actual = result.Total |
||||
if actual == total { |
||||
return nil |
||||
} |
||||
time.Sleep(100 * time.Millisecond) |
||||
} |
||||
return fmt.Errorf("waitData: expected %d, actual %d", total, actual) |
||||
} |
@ -0,0 +1,50 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package meilisearch |
||||
|
||||
import ( |
||||
"fmt" |
||||
"net/http" |
||||
"os" |
||||
"testing" |
||||
"time" |
||||
|
||||
"code.gitea.io/gitea/modules/indexer/issues/internal/tests" |
||||
) |
||||
|
||||
func TestMeilisearchIndexer(t *testing.T) { |
||||
// The meilisearch instance started by pull-db-tests.yml > test-unit > services > meilisearch
|
||||
url := "http://meilisearch:7700" |
||||
key := "" // auth has been disabled in test environment
|
||||
|
||||
if os.Getenv("CI") == "" { |
||||
// Make it possible to run tests against a local meilisearch instance
|
||||
url = os.Getenv("TEST_MEILISEARCH_URL") |
||||
if url == "" { |
||||
t.Skip("TEST_MEILISEARCH_URL not set and not running in CI") |
||||
return |
||||
} |
||||
key = os.Getenv("TEST_MEILISEARCH_KEY") |
||||
} |
||||
|
||||
ok := false |
||||
for i := 0; i < 60; i++ { |
||||
resp, err := http.Get(url) |
||||
if err == nil && resp.StatusCode == http.StatusOK { |
||||
ok = true |
||||
break |
||||
} |
||||
t.Logf("Waiting for meilisearch to be up: %v", err) |
||||
time.Sleep(time.Second) |
||||
} |
||||
if !ok { |
||||
t.Fatalf("Failed to wait for meilisearch to be up") |
||||
return |
||||
} |
||||
|
||||
indexer := NewIndexer(url, key, fmt.Sprintf("test_meilisearch_indexer_%d", time.Now().Unix())) |
||||
defer indexer.Close() |
||||
|
||||
tests.TestIndexer(t, indexer) |
||||
} |
@ -0,0 +1,173 @@ |
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package issues |
||||
|
||||
import ( |
||||
"context" |
||||
"errors" |
||||
"fmt" |
||||
|
||||
"code.gitea.io/gitea/models/db" |
||||
issue_model "code.gitea.io/gitea/models/issues" |
||||
"code.gitea.io/gitea/modules/container" |
||||
"code.gitea.io/gitea/modules/indexer/issues/internal" |
||||
"code.gitea.io/gitea/modules/log" |
||||
"code.gitea.io/gitea/modules/queue" |
||||
) |
||||
|
||||
// getIssueIndexerData returns the indexer data of an issue and a bool value indicating whether the issue exists.
|
||||
func getIssueIndexerData(ctx context.Context, issueID int64) (*internal.IndexerData, bool, error) { |
||||
issue, err := issue_model.GetIssueByID(ctx, issueID) |
||||
if err != nil { |
||||
if issue_model.IsErrIssueNotExist(err) { |
||||
return nil, false, nil |
||||
} |
||||
return nil, false, err |
||||
} |
||||
|
||||
// FIXME: what if users want to search for a review comment of a pull request?
|
||||
// The comment type is CommentTypeCode or CommentTypeReview.
|
||||
// But LoadDiscussComments only loads CommentTypeComment.
|
||||
if err := issue.LoadDiscussComments(ctx); err != nil { |
||||
return nil, false, err |
||||
} |
||||
|
||||
comments := make([]string, 0, len(issue.Comments)) |
||||
for _, comment := range issue.Comments { |
||||
if comment.Content != "" { |
||||
// what ever the comment type is, index the content if it is not empty.
|
||||
comments = append(comments, comment.Content) |
||||
} |
||||
} |
||||
|
||||
if err := issue.LoadAttributes(ctx); err != nil { |
||||
return nil, false, err |
||||
} |
||||
|
||||
labels := make([]int64, 0, len(issue.Labels)) |
||||
for _, label := range issue.Labels { |
||||
labels = append(labels, label.ID) |
||||
} |
||||
|
||||
mentionIDs, err := issue_model.GetIssueMentionIDs(ctx, issueID) |
||||
if err != nil { |
||||
return nil, false, err |
||||
} |
||||
|
||||
var ( |
||||
reviewedIDs []int64 |
||||
reviewRequestedIDs []int64 |
||||
) |
||||
{ |
||||
reviews, err := issue_model.FindReviews(ctx, issue_model.FindReviewOptions{ |
||||
ListOptions: db.ListOptions{ |
||||
ListAll: true, |
||||
}, |
||||
IssueID: issueID, |
||||
OfficialOnly: false, |
||||
}) |
||||
if err != nil { |
||||
return nil, false, err |
||||
} |
||||
|
||||
reviewedIDsSet := make(container.Set[int64], len(reviews)) |
||||
reviewRequestedIDsSet := make(container.Set[int64], len(reviews)) |
||||
for _, review := range reviews { |
||||
if review.Type == issue_model.ReviewTypeRequest { |
||||
reviewRequestedIDsSet.Add(review.ReviewerID) |
||||
} else { |
||||
reviewedIDsSet.Add(review.ReviewerID) |
||||
} |
||||
} |
||||
reviewedIDs = reviewedIDsSet.Values() |
||||
reviewRequestedIDs = reviewRequestedIDsSet.Values() |
||||
} |
||||
|
||||
subscriberIDs, err := issue_model.GetIssueWatchersIDs(ctx, issue.ID, true) |
||||
if err != nil { |
||||
return nil, false, err |
||||
} |
||||
|
||||
var projectID int64 |
||||
if issue.Project != nil { |
||||
projectID = issue.Project.ID |
||||
} |
||||
|
||||
return &internal.IndexerData{ |
||||
ID: issue.ID, |
||||
RepoID: issue.RepoID, |
||||
IsPublic: !issue.Repo.IsPrivate, |
||||
Title: issue.Title, |
||||
Content: issue.Content, |
||||
Comments: comments, |
||||
IsPull: issue.IsPull, |
||||
IsClosed: issue.IsClosed, |
||||
LabelIDs: labels, |
||||
NoLabel: len(labels) == 0, |
||||
MilestoneID: issue.MilestoneID, |
||||
ProjectID: projectID, |
||||
ProjectBoardID: issue.ProjectBoardID(), |
||||
PosterID: issue.PosterID, |
||||
AssigneeID: issue.AssigneeID, |
||||
MentionIDs: mentionIDs, |
||||
ReviewedIDs: reviewedIDs, |
||||
ReviewRequestedIDs: reviewRequestedIDs, |
||||
SubscriberIDs: subscriberIDs, |
||||
UpdatedUnix: issue.UpdatedUnix, |
||||
CreatedUnix: issue.CreatedUnix, |
||||
DeadlineUnix: issue.DeadlineUnix, |
||||
CommentCount: int64(len(issue.Comments)), |
||||
}, true, nil |
||||
} |
||||
|
||||
func updateRepoIndexer(ctx context.Context, repoID int64) error { |
||||
ids, err := issue_model.GetIssueIDsByRepoID(ctx, repoID) |
||||
if err != nil { |
||||
return fmt.Errorf("issue_model.GetIssueIDsByRepoID: %w", err) |
||||
} |
||||
for _, id := range ids { |
||||
if err := updateIssueIndexer(id); err != nil { |
||||
return err |
||||
} |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func updateIssueIndexer(issueID int64) error { |
||||
return pushIssueIndexerQueue(&IndexerMetadata{ID: issueID}) |
||||
} |
||||
|
||||
func deleteRepoIssueIndexer(ctx context.Context, repoID int64) error { |
||||
var ids []int64 |
||||
ids, err := issue_model.GetIssueIDsByRepoID(ctx, repoID) |
||||
if err != nil { |
||||
return fmt.Errorf("issue_model.GetIssueIDsByRepoID: %w", err) |
||||
} |
||||
|
||||
if len(ids) == 0 { |
||||
return nil |
||||
} |
||||
return pushIssueIndexerQueue(&IndexerMetadata{ |
||||
IDs: ids, |
||||
IsDelete: true, |
||||
}) |
||||
} |
||||
|
||||
func pushIssueIndexerQueue(data *IndexerMetadata) error { |
||||
if issueIndexerQueue == nil { |
||||
// Some unit tests will trigger indexing, but the queue is not initialized.
|
||||
// It's OK to ignore it, but log a warning message in case it's not a unit test.
|
||||
log.Warn("Trying to push %+v to issue indexer queue, but the queue is not initialized, it's OK if it's a unit test", data) |
||||
return nil |
||||
} |
||||
|
||||
err := issueIndexerQueue.Push(data) |
||||
if errors.Is(err, queue.ErrAlreadyInQueue) { |
||||
return nil |
||||
} |
||||
if errors.Is(err, context.DeadlineExceeded) { |
||||
log.Warn("It seems that issue indexer is slow and the queue is full. Please check the issue indexer or increase the queue size.") |
||||
} |
||||
return err |
||||
} |
Loading…
Reference in new issue