2019-02-19 17:39:39 +03:00
|
|
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
2022-11-27 21:20:29 +03:00
|
|
|
// SPDX-License-Identifier: MIT
|
2019-02-19 17:39:39 +03:00
|
|
|
|
|
|
|
package issues
|
|
|
|
|
2019-02-21 03:54:05 +03:00
|
|
|
import (
|
2020-01-07 14:23:09 +03:00
|
|
|
"context"
|
|
|
|
"fmt"
|
|
|
|
"os"
|
2022-03-31 20:01:43 +03:00
|
|
|
"runtime/pprof"
|
2019-10-15 16:39:51 +03:00
|
|
|
"sync"
|
|
|
|
"time"
|
2019-02-21 03:54:05 +03:00
|
|
|
|
2021-09-24 14:32:56 +03:00
|
|
|
"code.gitea.io/gitea/models/db"
|
2022-06-13 12:37:59 +03:00
|
|
|
issues_model "code.gitea.io/gitea/models/issues"
|
2021-12-10 04:27:50 +03:00
|
|
|
repo_model "code.gitea.io/gitea/models/repo"
|
2019-10-15 16:39:51 +03:00
|
|
|
"code.gitea.io/gitea/modules/graceful"
|
2019-02-21 03:54:05 +03:00
|
|
|
"code.gitea.io/gitea/modules/log"
|
2022-03-31 20:01:43 +03:00
|
|
|
"code.gitea.io/gitea/modules/process"
|
2020-01-07 14:23:09 +03:00
|
|
|
"code.gitea.io/gitea/modules/queue"
|
2019-02-21 03:54:05 +03:00
|
|
|
"code.gitea.io/gitea/modules/setting"
|
|
|
|
"code.gitea.io/gitea/modules/util"
|
|
|
|
)
|
|
|
|
|
2019-02-19 17:39:39 +03:00
|
|
|
// IndexerData data stored in the issue indexer
|
|
|
|
type IndexerData struct {
|
2020-02-13 09:06:17 +03:00
|
|
|
ID int64 `json:"id"`
|
|
|
|
RepoID int64 `json:"repo_id"`
|
|
|
|
Title string `json:"title"`
|
|
|
|
Content string `json:"content"`
|
|
|
|
Comments []string `json:"comments"`
|
|
|
|
IsDelete bool `json:"is_delete"`
|
|
|
|
IDs []int64 `json:"ids"`
|
2019-02-19 17:39:39 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Match represents on search result
|
|
|
|
type Match struct {
|
Allow cross-repository dependencies on issues (#7901)
* in progress changes for #7405, added ability to add cross-repo dependencies
* removed unused repolink var
* fixed query that was breaking ci tests; fixed check in issue dependency add so that the id of the issue and dependency is checked rather than the indexes
* reverted removal of string in local files becasue these are done via crowdin, not updated manually
* removed 'Select("issue.*")' from getBlockedByDependencies and getBlockingDependencies based on comments in PR review
* changed getBlockedByDependencies and getBlockingDependencies to use a more xorm-like query, also updated the sidebar as a result
* simplified the getBlockingDependencies and getBlockedByDependencies methods; changed the sidebar to show the dependencies in a different format where you can see the name of the repository
* made some changes to the issue view in the dependencies (issue name on top, repo full name on separate line). Change view of issue in the dependency search results (also showing the full repo name on separate line)
* replace call to FindUserAccessibleRepoIDs with SearchRepositoryByName. The former was hardcoded to use isPrivate = false on the repo search, but this code needed it to be true. The SearchRepositoryByName method is used more in the code including on the user's dashboard
* some more tweaks to the layout of the issues when showing dependencies and in the search box when you add new dependencies
* added Name to the RepositoryMeta struct
* updated swagger doc
* fixed total count for link header on SearchIssues
* fixed indentation
* fixed aligment of remove icon on dependencies in issue sidebar
* removed unnecessary nil check (unnecessary because issue.loadRepo is called prior to this block)
* reverting .css change, somehow missed or forgot that less is used
* updated less file and generated css; updated sidebar template with styles to line up delete and issue index
* added ordering to the blocked by/depends on queries
* fixed sorting in issue dependency search and the depends on/blocks views to show issues from the current repo first, then by created date descending; added a "all cross repository dependencies" setting to allow this feature to be turned off, if turned off, the issue dependency search will work the way it did before (restricted to the current repository)
* re-applied my swagger changes after merge
* fixed split string condition in issue search
* changed ALLOW_CROSS_REPOSITORY_DEPENDENCIES description to sound more global than just the issue dependency search; returning 400 in the cross repo issue search api method if not enabled; fixed bug where the issue count did not respect the state parameter
* when adding a dependency to an issue, added a check to make sure the issue and dependency are in the same repo if cross repo dependencies is not enabled
* updated sortIssuesSession call in PullRequests, another commit moved this method from pull.go to pull_list.go so I had to re-apply my change here
* fixed incorrect setting of user id parameter in search repos call
2019-10-31 08:06:10 +03:00
|
|
|
ID int64 `json:"id"`
|
|
|
|
Score float64 `json:"score"`
|
2019-02-19 17:39:39 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// SearchResult represents search results
|
|
|
|
type SearchResult struct {
|
2019-02-21 08:01:28 +03:00
|
|
|
Total int64
|
|
|
|
Hits []Match
|
2019-02-19 17:39:39 +03:00
|
|
|
}
|
|
|
|
|
2019-12-23 15:31:16 +03:00
|
|
|
// Indexer defines an interface to indexer issues contents
|
2019-02-19 17:39:39 +03:00
|
|
|
type Indexer interface {
|
|
|
|
Init() (bool, error)
|
2022-01-27 11:30:51 +03:00
|
|
|
Ping() bool
|
|
|
|
SetAvailabilityChangeCallback(callback func(bool))
|
2019-02-19 17:39:39 +03:00
|
|
|
Index(issue []*IndexerData) error
|
|
|
|
Delete(ids ...int64) error
|
2022-01-27 11:30:51 +03:00
|
|
|
Search(ctx context.Context, kw string, repoIDs []int64, limit, start int) (*SearchResult, error)
|
2020-01-07 14:23:09 +03:00
|
|
|
Close()
|
2019-02-19 17:39:39 +03:00
|
|
|
}
|
2019-02-21 03:54:05 +03:00
|
|
|
|
2019-10-15 16:39:51 +03:00
|
|
|
type indexerHolder struct {
|
2020-01-07 14:23:09 +03:00
|
|
|
indexer Indexer
|
|
|
|
mutex sync.RWMutex
|
|
|
|
cond *sync.Cond
|
|
|
|
cancelled bool
|
2019-10-15 16:39:51 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
func newIndexerHolder() *indexerHolder {
|
|
|
|
h := &indexerHolder{}
|
|
|
|
h.cond = sync.NewCond(h.mutex.RLocker())
|
|
|
|
return h
|
|
|
|
}
|
|
|
|
|
2020-01-07 14:23:09 +03:00
|
|
|
func (h *indexerHolder) cancel() {
|
|
|
|
h.mutex.Lock()
|
|
|
|
defer h.mutex.Unlock()
|
|
|
|
h.cancelled = true
|
|
|
|
h.cond.Broadcast()
|
|
|
|
}
|
|
|
|
|
2019-10-15 16:39:51 +03:00
|
|
|
func (h *indexerHolder) set(indexer Indexer) {
|
|
|
|
h.mutex.Lock()
|
|
|
|
defer h.mutex.Unlock()
|
|
|
|
h.indexer = indexer
|
|
|
|
h.cond.Broadcast()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (h *indexerHolder) get() Indexer {
|
|
|
|
h.mutex.RLock()
|
|
|
|
defer h.mutex.RUnlock()
|
2020-01-07 14:23:09 +03:00
|
|
|
if h.indexer == nil && !h.cancelled {
|
2019-10-15 16:39:51 +03:00
|
|
|
h.cond.Wait()
|
|
|
|
}
|
|
|
|
return h.indexer
|
|
|
|
}
|
|
|
|
|
2019-02-21 03:54:05 +03:00
|
|
|
var (
|
2019-04-08 12:05:15 +03:00
|
|
|
// issueIndexerQueue queue of issue ids to be updated
|
2020-01-07 14:23:09 +03:00
|
|
|
issueIndexerQueue queue.Queue
|
2019-10-15 16:39:51 +03:00
|
|
|
holder = newIndexerHolder()
|
2019-02-21 03:54:05 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
// InitIssueIndexer initialize issue indexer, syncReindex is true then reindex until
|
|
|
|
// all issue index done.
|
2019-10-15 16:39:51 +03:00
|
|
|
func InitIssueIndexer(syncReindex bool) {
|
2022-03-31 20:01:43 +03:00
|
|
|
ctx, _, finished := process.GetManager().AddTypedContext(context.Background(), "Service: IssueIndexer", process.SystemProcessType, false)
|
|
|
|
|
2022-04-27 02:22:26 +03:00
|
|
|
waitChannel := make(chan time.Duration, 1)
|
2020-01-07 14:23:09 +03:00
|
|
|
|
|
|
|
// Create the Queue
|
|
|
|
switch setting.Indexer.IssueType {
|
2020-02-13 09:06:17 +03:00
|
|
|
case "bleve", "elasticsearch":
|
2022-01-23 00:22:14 +03:00
|
|
|
handler := func(data ...queue.Data) []queue.Data {
|
2020-01-07 14:23:09 +03:00
|
|
|
indexer := holder.get()
|
|
|
|
if indexer == nil {
|
|
|
|
log.Error("Issue indexer handler: unable to get indexer!")
|
2022-01-23 00:22:14 +03:00
|
|
|
return data
|
2020-01-07 14:23:09 +03:00
|
|
|
}
|
|
|
|
|
2022-01-20 20:00:38 +03:00
|
|
|
iData := make([]*IndexerData, 0, len(data))
|
2022-01-27 11:30:51 +03:00
|
|
|
unhandled := make([]queue.Data, 0, len(data))
|
2020-01-07 14:23:09 +03:00
|
|
|
for _, datum := range data {
|
|
|
|
indexerData, ok := datum.(*IndexerData)
|
|
|
|
if !ok {
|
|
|
|
log.Error("Unable to process provided datum: %v - not possible to cast to IndexerData", datum)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
log.Trace("IndexerData Process: %d %v %t", indexerData.ID, indexerData.IDs, indexerData.IsDelete)
|
|
|
|
if indexerData.IsDelete {
|
2022-01-27 11:30:51 +03:00
|
|
|
if err := indexer.Delete(indexerData.IDs...); err != nil {
|
|
|
|
log.Error("Error whilst deleting from index: %v Error: %v", indexerData.IDs, err)
|
|
|
|
if indexer.Ping() {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
// Add back to queue
|
|
|
|
unhandled = append(unhandled, datum)
|
|
|
|
}
|
2020-01-07 14:23:09 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
iData = append(iData, indexerData)
|
|
|
|
}
|
2022-01-27 11:30:51 +03:00
|
|
|
if len(unhandled) > 0 {
|
|
|
|
for _, indexerData := range iData {
|
|
|
|
unhandled = append(unhandled, indexerData)
|
|
|
|
}
|
|
|
|
return unhandled
|
|
|
|
}
|
2020-01-07 14:23:09 +03:00
|
|
|
if err := indexer.Index(iData); err != nil {
|
|
|
|
log.Error("Error whilst indexing: %v Error: %v", iData, err)
|
2022-01-27 11:30:51 +03:00
|
|
|
if indexer.Ping() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
// Add back to queue
|
|
|
|
for _, indexerData := range iData {
|
|
|
|
unhandled = append(unhandled, indexerData)
|
|
|
|
}
|
|
|
|
return unhandled
|
2020-01-07 14:23:09 +03:00
|
|
|
}
|
2022-01-23 00:22:14 +03:00
|
|
|
return nil
|
2020-01-07 14:23:09 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
issueIndexerQueue = queue.CreateQueue("issue_indexer", handler, &IndexerData{})
|
|
|
|
|
|
|
|
if issueIndexerQueue == nil {
|
|
|
|
log.Fatal("Unable to create issue indexer queue")
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
issueIndexerQueue = &queue.DummyQueue{}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create the Indexer
|
2019-10-15 16:39:51 +03:00
|
|
|
go func() {
|
2022-03-31 20:01:43 +03:00
|
|
|
pprof.SetGoroutineLabels(ctx)
|
2019-10-15 16:39:51 +03:00
|
|
|
start := time.Now()
|
2020-01-07 14:23:09 +03:00
|
|
|
log.Info("PID %d: Initializing Issue Indexer: %s", os.Getpid(), setting.Indexer.IssueType)
|
2019-10-15 16:39:51 +03:00
|
|
|
var populate bool
|
|
|
|
switch setting.Indexer.IssueType {
|
|
|
|
case "bleve":
|
2020-02-29 01:00:09 +03:00
|
|
|
defer func() {
|
|
|
|
if err := recover(); err != nil {
|
|
|
|
log.Error("PANIC whilst initializing issue indexer: %v\nStacktrace: %s", err, log.Stack(2))
|
|
|
|
log.Error("The indexer files are likely corrupted and may need to be deleted")
|
2020-04-22 23:16:58 +03:00
|
|
|
log.Error("You can completely remove the %q directory to make Gitea recreate the indexes", setting.Indexer.IssuePath)
|
2020-02-29 01:00:09 +03:00
|
|
|
holder.cancel()
|
|
|
|
log.Fatal("PID: %d Unable to initialize the Bleve Issue Indexer at path: %s Error: %v", os.Getpid(), setting.Indexer.IssuePath, err)
|
|
|
|
}
|
|
|
|
}()
|
2020-02-12 02:21:20 +03:00
|
|
|
issueIndexer := NewBleveIndexer(setting.Indexer.IssuePath)
|
|
|
|
exist, err := issueIndexer.Init()
|
|
|
|
if err != nil {
|
|
|
|
holder.cancel()
|
2020-04-22 23:16:58 +03:00
|
|
|
log.Fatal("Unable to initialize Bleve Issue Indexer at path: %s Error: %v", setting.Indexer.IssuePath, err)
|
2020-02-12 02:21:20 +03:00
|
|
|
}
|
|
|
|
populate = !exist
|
|
|
|
holder.set(issueIndexer)
|
2021-05-15 17:22:26 +03:00
|
|
|
graceful.GetManager().RunAtTerminate(func() {
|
2020-02-12 02:21:20 +03:00
|
|
|
log.Debug("Closing issue indexer")
|
|
|
|
issueIndexer := holder.get()
|
|
|
|
if issueIndexer != nil {
|
|
|
|
issueIndexer.Close()
|
2020-01-07 14:23:09 +03:00
|
|
|
}
|
2022-03-31 20:01:43 +03:00
|
|
|
finished()
|
2020-02-12 02:21:20 +03:00
|
|
|
log.Info("PID: %d Issue Indexer closed", os.Getpid())
|
2020-01-07 14:23:09 +03:00
|
|
|
})
|
2020-02-12 02:21:20 +03:00
|
|
|
log.Debug("Created Bleve Indexer")
|
2020-02-13 09:06:17 +03:00
|
|
|
case "elasticsearch":
|
2021-05-15 17:22:26 +03:00
|
|
|
graceful.GetManager().RunWithShutdownFns(func(_, atTerminate func(func())) {
|
2022-03-31 20:01:43 +03:00
|
|
|
pprof.SetGoroutineLabels(ctx)
|
2020-08-18 19:08:51 +03:00
|
|
|
issueIndexer, err := NewElasticSearchIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueIndexerName)
|
2020-02-13 09:06:17 +03:00
|
|
|
if err != nil {
|
2020-04-22 23:16:58 +03:00
|
|
|
log.Fatal("Unable to initialize Elastic Search Issue Indexer at connection: %s Error: %v", setting.Indexer.IssueConnStr, err)
|
2020-02-13 09:06:17 +03:00
|
|
|
}
|
|
|
|
exist, err := issueIndexer.Init()
|
|
|
|
if err != nil {
|
2020-04-22 23:16:58 +03:00
|
|
|
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", setting.Indexer.IssueConnStr, err)
|
2020-02-13 09:06:17 +03:00
|
|
|
}
|
|
|
|
populate = !exist
|
|
|
|
holder.set(issueIndexer)
|
2022-03-31 20:01:43 +03:00
|
|
|
atTerminate(finished)
|
2020-02-13 09:06:17 +03:00
|
|
|
})
|
2019-10-15 16:39:51 +03:00
|
|
|
case "db":
|
|
|
|
issueIndexer := &DBIndexer{}
|
|
|
|
holder.set(issueIndexer)
|
2022-03-31 20:01:43 +03:00
|
|
|
graceful.GetManager().RunAtTerminate(finished)
|
2019-10-15 16:39:51 +03:00
|
|
|
default:
|
2020-01-07 14:23:09 +03:00
|
|
|
holder.cancel()
|
2019-10-15 16:39:51 +03:00
|
|
|
log.Fatal("Unknown issue indexer type: %s", setting.Indexer.IssueType)
|
2019-02-21 03:54:05 +03:00
|
|
|
}
|
|
|
|
|
2022-01-27 11:30:51 +03:00
|
|
|
if queue, ok := issueIndexerQueue.(queue.Pausable); ok {
|
|
|
|
holder.get().SetAvailabilityChangeCallback(func(available bool) {
|
|
|
|
if !available {
|
|
|
|
log.Info("Issue index queue paused")
|
|
|
|
queue.Pause()
|
|
|
|
} else {
|
|
|
|
log.Info("Issue index queue resumed")
|
|
|
|
queue.Resume()
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-01-07 14:23:09 +03:00
|
|
|
// Start processing the queue
|
|
|
|
go graceful.GetManager().RunWithShutdownFns(issueIndexerQueue.Run)
|
2019-02-21 03:54:05 +03:00
|
|
|
|
2020-01-07 14:23:09 +03:00
|
|
|
// Populate the index
|
2019-10-15 16:39:51 +03:00
|
|
|
if populate {
|
|
|
|
if syncReindex {
|
2020-01-07 14:23:09 +03:00
|
|
|
graceful.GetManager().RunWithShutdownContext(populateIssueIndexer)
|
2019-10-15 16:39:51 +03:00
|
|
|
} else {
|
2020-01-07 14:23:09 +03:00
|
|
|
go graceful.GetManager().RunWithShutdownContext(populateIssueIndexer)
|
2019-10-15 16:39:51 +03:00
|
|
|
}
|
2019-02-21 03:54:05 +03:00
|
|
|
}
|
2019-10-15 16:39:51 +03:00
|
|
|
waitChannel <- time.Since(start)
|
2020-01-07 14:23:09 +03:00
|
|
|
close(waitChannel)
|
2019-10-15 16:39:51 +03:00
|
|
|
}()
|
2020-01-07 14:23:09 +03:00
|
|
|
|
2019-10-15 16:39:51 +03:00
|
|
|
if syncReindex {
|
2020-01-07 14:23:09 +03:00
|
|
|
select {
|
|
|
|
case <-waitChannel:
|
|
|
|
case <-graceful.GetManager().IsShutdown():
|
|
|
|
}
|
2019-10-15 16:39:51 +03:00
|
|
|
} else if setting.Indexer.StartupTimeout > 0 {
|
|
|
|
go func() {
|
2022-03-31 20:01:43 +03:00
|
|
|
pprof.SetGoroutineLabels(ctx)
|
2019-10-15 16:39:51 +03:00
|
|
|
timeout := setting.Indexer.StartupTimeout
|
2019-12-15 12:51:28 +03:00
|
|
|
if graceful.GetManager().IsChild() && setting.GracefulHammerTime > 0 {
|
2019-10-15 16:39:51 +03:00
|
|
|
timeout += setting.GracefulHammerTime
|
|
|
|
}
|
|
|
|
select {
|
|
|
|
case duration := <-waitChannel:
|
|
|
|
log.Info("Issue Indexer Initialization took %v", duration)
|
2020-01-07 14:23:09 +03:00
|
|
|
case <-graceful.GetManager().IsShutdown():
|
|
|
|
log.Warn("Shutdown occurred before issue index initialisation was complete")
|
2019-10-15 16:39:51 +03:00
|
|
|
case <-time.After(timeout):
|
2020-01-07 14:23:09 +03:00
|
|
|
if shutdownable, ok := issueIndexerQueue.(queue.Shutdownable); ok {
|
|
|
|
shutdownable.Terminate()
|
|
|
|
}
|
2019-10-15 16:39:51 +03:00
|
|
|
log.Fatal("Issue Indexer Initialization timed-out after: %v", timeout)
|
|
|
|
}
|
|
|
|
}()
|
2019-02-21 03:54:05 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// populateIssueIndexer populate the issue indexer with issue data
|
2020-01-07 14:23:09 +03:00
|
|
|
func populateIssueIndexer(ctx context.Context) {
|
2022-03-31 20:01:43 +03:00
|
|
|
ctx, _, finished := process.GetManager().AddTypedContext(ctx, "Service: PopulateIssueIndexer", process.SystemProcessType, true)
|
|
|
|
defer finished()
|
2019-02-21 03:54:05 +03:00
|
|
|
for page := 1; ; page++ {
|
2020-01-07 14:23:09 +03:00
|
|
|
select {
|
|
|
|
case <-ctx.Done():
|
|
|
|
log.Warn("Issue Indexer population shutdown before completion")
|
|
|
|
return
|
|
|
|
default:
|
|
|
|
}
|
2022-11-19 11:12:33 +03:00
|
|
|
repos, _, err := repo_model.SearchRepositoryByName(ctx, &repo_model.SearchRepoOptions{
|
2022-06-06 11:01:49 +03:00
|
|
|
ListOptions: db.ListOptions{Page: page, PageSize: repo_model.RepositoryListDefaultPageSize},
|
2021-11-24 12:49:20 +03:00
|
|
|
OrderBy: db.SearchOrderByID,
|
2019-02-21 03:54:05 +03:00
|
|
|
Private: true,
|
|
|
|
Collaborate: util.OptionalBoolFalse,
|
|
|
|
})
|
|
|
|
if err != nil {
|
2019-04-02 10:48:31 +03:00
|
|
|
log.Error("SearchRepositoryByName: %v", err)
|
2019-02-21 03:54:05 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if len(repos) == 0 {
|
2020-01-07 14:23:09 +03:00
|
|
|
log.Debug("Issue Indexer population complete")
|
2019-02-21 03:54:05 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, repo := range repos {
|
2020-01-07 14:23:09 +03:00
|
|
|
select {
|
|
|
|
case <-ctx.Done():
|
|
|
|
log.Info("Issue Indexer population shutdown before completion")
|
|
|
|
return
|
|
|
|
default:
|
|
|
|
}
|
2022-11-19 11:12:33 +03:00
|
|
|
UpdateRepoIndexer(ctx, repo)
|
2019-02-21 03:54:05 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-13 00:46:43 +03:00
|
|
|
// UpdateRepoIndexer add/update all issues of the repositories
|
2022-11-19 11:12:33 +03:00
|
|
|
func UpdateRepoIndexer(ctx context.Context, repo *repo_model.Repository) {
|
|
|
|
is, err := issues_model.Issues(ctx, &issues_model.IssuesOptions{
|
2022-04-25 17:06:24 +03:00
|
|
|
RepoID: repo.ID,
|
2019-12-13 00:46:43 +03:00
|
|
|
IsClosed: util.OptionalBoolNone,
|
|
|
|
IsPull: util.OptionalBoolNone,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Issues: %v", err)
|
|
|
|
return
|
|
|
|
}
|
2022-11-19 11:12:33 +03:00
|
|
|
if err = issues_model.IssueList(is).LoadDiscussComments(ctx); err != nil {
|
|
|
|
log.Error("LoadDiscussComments: %v", err)
|
2019-12-13 00:46:43 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, issue := range is {
|
|
|
|
UpdateIssueIndexer(issue)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-21 03:54:05 +03:00
|
|
|
// UpdateIssueIndexer add/update an issue to the issue indexer
|
2022-06-13 12:37:59 +03:00
|
|
|
func UpdateIssueIndexer(issue *issues_model.Issue) {
|
2019-02-21 03:54:05 +03:00
|
|
|
var comments []string
|
|
|
|
for _, comment := range issue.Comments {
|
2022-06-13 12:37:59 +03:00
|
|
|
if comment.Type == issues_model.CommentTypeComment {
|
2019-02-21 03:54:05 +03:00
|
|
|
comments = append(comments, comment.Content)
|
|
|
|
}
|
|
|
|
}
|
2020-01-07 14:23:09 +03:00
|
|
|
indexerData := &IndexerData{
|
2019-02-21 03:54:05 +03:00
|
|
|
ID: issue.ID,
|
|
|
|
RepoID: issue.RepoID,
|
|
|
|
Title: issue.Title,
|
|
|
|
Content: issue.Content,
|
|
|
|
Comments: comments,
|
2019-10-15 16:39:51 +03:00
|
|
|
}
|
2020-01-07 14:23:09 +03:00
|
|
|
log.Debug("Adding to channel: %v", indexerData)
|
|
|
|
if err := issueIndexerQueue.Push(indexerData); err != nil {
|
|
|
|
log.Error("Unable to push to issue indexer: %v: Error: %v", indexerData, err)
|
|
|
|
}
|
2019-02-21 03:54:05 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// DeleteRepoIssueIndexer deletes repo's all issues indexes
|
2022-11-19 11:12:33 +03:00
|
|
|
func DeleteRepoIssueIndexer(ctx context.Context, repo *repo_model.Repository) {
|
2019-02-21 03:54:05 +03:00
|
|
|
var ids []int64
|
2022-11-19 11:12:33 +03:00
|
|
|
ids, err := issues_model.GetIssueIDsByRepoID(ctx, repo.ID)
|
2019-02-21 03:54:05 +03:00
|
|
|
if err != nil {
|
2022-11-19 11:12:33 +03:00
|
|
|
log.Error("GetIssueIDsByRepoID failed: %v", err)
|
2019-02-21 03:54:05 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-06-12 22:41:28 +03:00
|
|
|
if len(ids) == 0 {
|
2019-02-21 03:54:05 +03:00
|
|
|
return
|
|
|
|
}
|
2020-01-07 14:23:09 +03:00
|
|
|
indexerData := &IndexerData{
|
2019-02-21 03:54:05 +03:00
|
|
|
IDs: ids,
|
|
|
|
IsDelete: true,
|
2019-10-15 16:39:51 +03:00
|
|
|
}
|
2020-01-07 14:23:09 +03:00
|
|
|
if err := issueIndexerQueue.Push(indexerData); err != nil {
|
|
|
|
log.Error("Unable to push to issue indexer: %v: Error: %v", indexerData, err)
|
|
|
|
}
|
2019-02-21 03:54:05 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// SearchIssuesByKeyword search issue ids by keywords and repo id
|
2020-02-13 09:06:17 +03:00
|
|
|
// WARNNING: You have to ensure user have permission to visit repoIDs' issues
|
2022-01-27 11:30:51 +03:00
|
|
|
func SearchIssuesByKeyword(ctx context.Context, repoIDs []int64, keyword string) ([]int64, error) {
|
2019-02-21 03:54:05 +03:00
|
|
|
var issueIDs []int64
|
2020-01-07 14:23:09 +03:00
|
|
|
indexer := holder.get()
|
|
|
|
|
|
|
|
if indexer == nil {
|
|
|
|
log.Error("SearchIssuesByKeyword(): unable to get indexer!")
|
|
|
|
return nil, fmt.Errorf("unable to get issue indexer")
|
|
|
|
}
|
2022-01-27 11:30:51 +03:00
|
|
|
res, err := indexer.Search(ctx, keyword, repoIDs, 50, 0)
|
2019-02-21 03:54:05 +03:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
for _, r := range res.Hits {
|
|
|
|
issueIDs = append(issueIDs, r.ID)
|
|
|
|
}
|
|
|
|
return issueIDs, nil
|
|
|
|
}
|
2022-01-27 11:30:51 +03:00
|
|
|
|
|
|
|
// IsAvailable checks if issue indexer is available
|
|
|
|
func IsAvailable() bool {
|
|
|
|
indexer := holder.get()
|
|
|
|
if indexer == nil {
|
|
|
|
log.Error("IsAvailable(): unable to get indexer!")
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
return indexer.Ping()
|
|
|
|
}
|