2014-04-13 09:57:42 +04:00
|
|
|
// Copyright 2014 The Gogs Authors. All rights reserved.
|
2019-02-08 19:45:43 +03:00
|
|
|
// Copyright 2019 The Gitea Authors. All rights reserved.
|
2022-11-27 21:20:29 +03:00
|
|
|
// SPDX-License-Identifier: MIT
|
2014-04-13 09:57:42 +04:00
|
|
|
|
|
|
|
package user
|
|
|
|
|
|
|
|
import (
|
2014-11-23 10:33:47 +03:00
|
|
|
"bytes"
|
2014-04-13 09:57:42 +04:00
|
|
|
"fmt"
|
2021-04-05 18:30:52 +03:00
|
|
|
"net/http"
|
2019-12-02 06:50:36 +03:00
|
|
|
"regexp"
|
2017-12-04 07:39:01 +03:00
|
|
|
"sort"
|
2019-12-02 06:50:36 +03:00
|
|
|
"strconv"
|
2019-01-23 07:10:38 +03:00
|
|
|
"strings"
|
2014-04-13 09:57:42 +04:00
|
|
|
|
2022-08-25 05:31:57 +03:00
|
|
|
activities_model "code.gitea.io/gitea/models/activities"
|
2021-12-10 11:14:24 +03:00
|
|
|
asymkey_model "code.gitea.io/gitea/models/asymkey"
|
2021-09-24 14:32:56 +03:00
|
|
|
"code.gitea.io/gitea/models/db"
|
2022-04-08 12:11:15 +03:00
|
|
|
issues_model "code.gitea.io/gitea/models/issues"
|
2022-03-29 09:29:02 +03:00
|
|
|
"code.gitea.io/gitea/models/organization"
|
2021-12-10 04:27:50 +03:00
|
|
|
repo_model "code.gitea.io/gitea/models/repo"
|
2021-11-09 22:57:58 +03:00
|
|
|
"code.gitea.io/gitea/models/unit"
|
2021-11-24 12:49:20 +03:00
|
|
|
user_model "code.gitea.io/gitea/models/user"
|
2016-11-10 19:24:48 +03:00
|
|
|
"code.gitea.io/gitea/modules/base"
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
"code.gitea.io/gitea/modules/container"
|
2016-11-10 19:24:48 +03:00
|
|
|
"code.gitea.io/gitea/modules/context"
|
2020-02-29 09:52:05 +03:00
|
|
|
issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
|
2021-07-24 19:03:58 +03:00
|
|
|
"code.gitea.io/gitea/modules/json"
|
2019-10-08 20:55:16 +03:00
|
|
|
"code.gitea.io/gitea/modules/log"
|
2021-04-20 01:25:08 +03:00
|
|
|
"code.gitea.io/gitea/modules/markup"
|
2019-12-15 17:20:08 +03:00
|
|
|
"code.gitea.io/gitea/modules/markup/markdown"
|
2016-11-10 19:24:48 +03:00
|
|
|
"code.gitea.io/gitea/modules/setting"
|
2017-01-25 05:43:02 +03:00
|
|
|
"code.gitea.io/gitea/modules/util"
|
2023-04-27 09:06:45 +03:00
|
|
|
"code.gitea.io/gitea/routers/web/feed"
|
|
|
|
context_service "code.gitea.io/gitea/services/context"
|
2020-05-15 01:55:43 +03:00
|
|
|
issue_service "code.gitea.io/gitea/services/issue"
|
2020-04-10 14:26:37 +03:00
|
|
|
pull_service "code.gitea.io/gitea/services/pull"
|
2017-12-26 02:25:16 +03:00
|
|
|
|
2019-04-14 19:43:56 +03:00
|
|
|
"github.com/keybase/go-crypto/openpgp"
|
|
|
|
"github.com/keybase/go-crypto/openpgp/armor"
|
2020-03-31 10:47:00 +03:00
|
|
|
"xorm.io/builder"
|
2014-04-13 09:57:42 +04:00
|
|
|
)
|
|
|
|
|
2014-06-23 07:11:12 +04:00
|
|
|
const (
|
2019-12-15 17:20:08 +03:00
|
|
|
tplDashboard base.TplName = "user/dashboard/dashboard"
|
|
|
|
tplIssues base.TplName = "user/dashboard/issues"
|
|
|
|
tplMilestones base.TplName = "user/dashboard/milestones"
|
|
|
|
tplProfile base.TplName = "user/profile"
|
2014-06-23 07:11:12 +04:00
|
|
|
)
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// getDashboardContextUser finds out which context user dashboard is being viewed as .
|
2021-11-24 12:49:20 +03:00
|
|
|
func getDashboardContextUser(ctx *context.Context) *user_model.User {
|
2022-03-22 10:03:22 +03:00
|
|
|
ctxUser := ctx.Doer
|
2014-07-27 07:53:16 +04:00
|
|
|
orgName := ctx.Params(":org")
|
|
|
|
if len(orgName) > 0 {
|
2021-11-19 14:41:40 +03:00
|
|
|
ctxUser = ctx.Org.Organization.AsUser()
|
|
|
|
ctx.Data["Teams"] = ctx.Org.Teams
|
2015-08-25 17:58:34 +03:00
|
|
|
}
|
|
|
|
ctx.Data["ContextUser"] = ctxUser
|
|
|
|
|
2022-08-25 05:31:57 +03:00
|
|
|
orgs, err := organization.GetUserOrgsList(ctx.Doer)
|
2021-06-14 15:18:09 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetUserOrgsList", err)
|
2015-08-25 17:58:34 +03:00
|
|
|
return nil
|
|
|
|
}
|
2021-06-14 15:18:09 +03:00
|
|
|
ctx.Data["Orgs"] = orgs
|
2015-08-25 17:58:34 +03:00
|
|
|
|
|
|
|
return ctxUser
|
|
|
|
}
|
|
|
|
|
2020-08-17 06:07:38 +03:00
|
|
|
// Dashboard render the dashboard page
|
2016-03-11 19:56:52 +03:00
|
|
|
func Dashboard(ctx *context.Context) {
|
2016-03-10 07:56:03 +03:00
|
|
|
ctxUser := getDashboardContextUser(ctx)
|
2015-08-25 17:58:34 +03:00
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-02-25 00:15:10 +03:00
|
|
|
var (
|
|
|
|
date = ctx.FormString("date")
|
|
|
|
page = ctx.FormInt("page")
|
|
|
|
)
|
|
|
|
|
|
|
|
// Make sure page number is at least 1. Will be posted to ctx.Data.
|
|
|
|
if page <= 1 {
|
|
|
|
page = 1
|
|
|
|
}
|
|
|
|
|
2016-07-23 20:08:22 +03:00
|
|
|
ctx.Data["Title"] = ctxUser.DisplayName() + " - " + ctx.Tr("dashboard")
|
|
|
|
ctx.Data["PageIsDashboard"] = true
|
|
|
|
ctx.Data["PageIsNews"] = true
|
2022-03-29 09:29:02 +03:00
|
|
|
cnt, _ := organization.GetOrganizationCount(ctx, ctxUser)
|
2021-11-22 18:21:55 +03:00
|
|
|
ctx.Data["UserOrgsCount"] = cnt
|
2022-06-04 14:42:17 +03:00
|
|
|
ctx.Data["MirrorsEnabled"] = setting.Mirror.Enabled
|
2023-02-25 00:15:10 +03:00
|
|
|
ctx.Data["Date"] = date
|
2021-10-15 05:35:26 +03:00
|
|
|
|
2021-10-19 07:38:33 +03:00
|
|
|
var uid int64
|
|
|
|
if ctxUser != nil {
|
|
|
|
uid = ctxUser.ID
|
|
|
|
}
|
|
|
|
|
2023-07-04 21:36:08 +03:00
|
|
|
ctx.PageData["dashboardRepoList"] = map[string]any{
|
2021-10-15 05:35:26 +03:00
|
|
|
"searchLimit": setting.UI.User.RepoPagingNum,
|
2021-10-19 07:38:33 +03:00
|
|
|
"uid": uid,
|
2021-10-15 05:35:26 +03:00
|
|
|
}
|
2020-12-27 22:58:03 +03:00
|
|
|
|
2021-03-05 01:59:13 +03:00
|
|
|
if setting.Service.EnableUserHeatmap {
|
2022-08-25 05:31:57 +03:00
|
|
|
data, err := activities_model.GetUserHeatmapDataByUserTeam(ctxUser, ctx.Org.Team, ctx.Doer)
|
2020-11-19 01:00:16 +03:00
|
|
|
if err != nil {
|
2020-12-27 22:58:03 +03:00
|
|
|
ctx.ServerError("GetUserHeatmapDataByUserTeam", err)
|
2020-11-19 01:00:16 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.Data["HeatmapData"] = data
|
2023-04-17 21:26:01 +03:00
|
|
|
ctx.Data["HeatmapTotalContributions"] = activities_model.GetTotalContributionsInHeatmap(data)
|
2020-11-19 01:00:16 +03:00
|
|
|
}
|
2016-07-23 20:08:22 +03:00
|
|
|
|
2023-02-25 00:15:10 +03:00
|
|
|
feeds, count, err := activities_model.GetFeeds(ctx, activities_model.GetFeedsOptions{
|
2018-02-21 13:55:34 +03:00
|
|
|
RequestedUser: ctxUser,
|
2020-12-27 22:58:03 +03:00
|
|
|
RequestedTeam: ctx.Org.Team,
|
2022-03-22 10:03:22 +03:00
|
|
|
Actor: ctx.Doer,
|
2017-08-23 04:30:54 +03:00
|
|
|
IncludePrivate: true,
|
|
|
|
OnlyPerformedBy: false,
|
|
|
|
IncludeDeleted: false,
|
2021-08-11 03:31:13 +03:00
|
|
|
Date: ctx.FormString("date"),
|
2023-02-25 00:15:10 +03:00
|
|
|
ListOptions: db.ListOptions{
|
|
|
|
Page: page,
|
|
|
|
PageSize: setting.UI.FeedPagingNum,
|
|
|
|
},
|
2017-08-23 04:30:54 +03:00
|
|
|
})
|
2022-03-13 19:40:47 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetFeeds", err)
|
2014-04-13 09:57:42 +04:00
|
|
|
return
|
|
|
|
}
|
2021-10-16 17:21:16 +03:00
|
|
|
|
2023-02-25 00:15:10 +03:00
|
|
|
ctx.Data["Feeds"] = feeds
|
|
|
|
|
|
|
|
pager := context.NewPagination(int(count), setting.UI.FeedPagingNum, page, 5)
|
|
|
|
pager.AddParam(ctx, "date", "Date")
|
|
|
|
ctx.Data["Page"] = pager
|
|
|
|
|
2021-04-05 18:30:52 +03:00
|
|
|
ctx.HTML(http.StatusOK, tplDashboard)
|
2014-04-13 09:57:42 +04:00
|
|
|
}
|
|
|
|
|
2019-12-15 17:20:08 +03:00
|
|
|
// Milestones render the user milestones page
|
|
|
|
func Milestones(ctx *context.Context) {
|
2021-11-09 22:57:58 +03:00
|
|
|
if unit.TypeIssues.UnitGlobalDisabled() && unit.TypePullRequests.UnitGlobalDisabled() {
|
2020-01-17 10:34:37 +03:00
|
|
|
log.Debug("Milestones overview page not available as both issues and pull requests are globally disabled")
|
2022-03-23 07:54:07 +03:00
|
|
|
ctx.Status(http.StatusNotFound)
|
2020-01-17 10:34:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-12-15 17:20:08 +03:00
|
|
|
ctx.Data["Title"] = ctx.Tr("milestones")
|
|
|
|
ctx.Data["PageIsMilestonesDashboard"] = true
|
|
|
|
|
|
|
|
ctxUser := getDashboardContextUser(ctx)
|
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-06-06 11:01:49 +03:00
|
|
|
repoOpts := repo_model.SearchRepoOptions{
|
2020-12-27 22:58:03 +03:00
|
|
|
Actor: ctxUser,
|
|
|
|
OwnerID: ctxUser.ID,
|
|
|
|
Private: true,
|
2022-06-04 21:30:01 +03:00
|
|
|
AllPublic: false, // Include also all public repositories of users and public organisations
|
|
|
|
AllLimited: false, // Include also all public repositories of limited organisations
|
|
|
|
Archived: util.OptionalBoolFalse,
|
2020-12-27 22:58:03 +03:00
|
|
|
HasMilestones: util.OptionalBoolTrue, // Just needs display repos has milestones
|
|
|
|
}
|
|
|
|
|
|
|
|
if ctxUser.IsOrganization() && ctx.Org.Team != nil {
|
|
|
|
repoOpts.TeamID = ctx.Org.Team.ID
|
|
|
|
}
|
2019-12-15 17:20:08 +03:00
|
|
|
|
2020-12-27 22:58:03 +03:00
|
|
|
var (
|
2022-06-06 11:01:49 +03:00
|
|
|
userRepoCond = repo_model.SearchRepositoryCondition(&repoOpts) // all repo condition user could visit
|
2020-03-31 10:47:00 +03:00
|
|
|
repoCond = userRepoCond
|
|
|
|
repoIDs []int64
|
2019-12-15 17:20:08 +03:00
|
|
|
|
2021-08-11 03:31:13 +03:00
|
|
|
reposQuery = ctx.FormString("repos")
|
|
|
|
isShowClosed = ctx.FormString("state") == "closed"
|
|
|
|
sortType = ctx.FormString("sort")
|
2021-07-29 04:42:15 +03:00
|
|
|
page = ctx.FormInt("page")
|
2021-08-11 18:08:52 +03:00
|
|
|
keyword = ctx.FormTrim("q")
|
2020-03-31 10:47:00 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
if page <= 1 {
|
|
|
|
page = 1
|
2019-12-15 17:20:08 +03:00
|
|
|
}
|
|
|
|
|
2020-01-05 04:23:29 +03:00
|
|
|
if len(reposQuery) != 0 {
|
|
|
|
if issueReposQueryPattern.MatchString(reposQuery) {
|
|
|
|
// remove "[" and "]" from string
|
|
|
|
reposQuery = reposQuery[1 : len(reposQuery)-1]
|
2022-01-20 20:46:10 +03:00
|
|
|
// for each ID (delimiter ",") add to int to repoIDs
|
2020-03-31 10:47:00 +03:00
|
|
|
|
2020-01-05 04:23:29 +03:00
|
|
|
for _, rID := range strings.Split(reposQuery, ",") {
|
|
|
|
// Ensure nonempty string entries
|
|
|
|
if rID != "" && rID != "0" {
|
|
|
|
rIDint64, err := strconv.ParseInt(rID, 10, 64)
|
|
|
|
// If the repo id specified by query is not parseable or not accessible by user, just ignore it.
|
2020-03-31 10:47:00 +03:00
|
|
|
if err == nil {
|
2020-01-05 04:23:29 +03:00
|
|
|
repoIDs = append(repoIDs, rIDint64)
|
|
|
|
}
|
2019-12-15 17:20:08 +03:00
|
|
|
}
|
|
|
|
}
|
2020-03-31 10:47:00 +03:00
|
|
|
if len(repoIDs) > 0 {
|
|
|
|
// Don't just let repoCond = builder.In("id", repoIDs) because user may has no permission on repoIDs
|
|
|
|
// But the original repoCond has a limitation
|
|
|
|
repoCond = repoCond.And(builder.In("id", repoIDs))
|
2020-01-05 04:23:29 +03:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Warn("issueReposQueryPattern not match with query")
|
2019-12-15 17:20:08 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-08 12:11:15 +03:00
|
|
|
counts, err := issues_model.CountMilestonesByRepoCondAndKw(userRepoCond, keyword, isShowClosed)
|
2019-12-15 17:20:08 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("CountMilestonesByRepoIDs", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-08 12:11:15 +03:00
|
|
|
milestones, err := issues_model.SearchMilestones(repoCond, page, isShowClosed, sortType, keyword)
|
2019-12-15 17:20:08 +03:00
|
|
|
if err != nil {
|
2021-04-08 14:53:59 +03:00
|
|
|
ctx.ServerError("SearchMilestones", err)
|
2019-12-15 17:20:08 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-11-19 11:12:33 +03:00
|
|
|
showRepos, _, err := repo_model.SearchRepositoryByCondition(ctx, &repoOpts, userRepoCond, false)
|
2020-03-31 10:47:00 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("SearchRepositoryByCondition", err)
|
2019-12-15 17:20:08 +03:00
|
|
|
return
|
|
|
|
}
|
2020-03-31 10:47:00 +03:00
|
|
|
sort.Sort(showRepos)
|
2019-12-15 17:20:08 +03:00
|
|
|
|
2020-03-31 10:47:00 +03:00
|
|
|
for i := 0; i < len(milestones); {
|
|
|
|
for _, repo := range showRepos {
|
|
|
|
if milestones[i].RepoID == repo.ID {
|
|
|
|
milestones[i].Repo = repo
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if milestones[i].Repo == nil {
|
|
|
|
log.Warn("Cannot find milestone %d 's repository %d", milestones[i].ID, milestones[i].RepoID)
|
|
|
|
milestones = append(milestones[:i], milestones[i+1:]...)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2021-04-20 01:25:08 +03:00
|
|
|
milestones[i].RenderedContent, err = markdown.RenderString(&markup.RenderContext{
|
|
|
|
URLPrefix: milestones[i].Repo.Link(),
|
|
|
|
Metas: milestones[i].Repo.ComposeMetas(),
|
2021-08-28 23:15:56 +03:00
|
|
|
Ctx: ctx,
|
2021-04-20 01:25:08 +03:00
|
|
|
}, milestones[i].Content)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("RenderString", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-12-10 05:46:31 +03:00
|
|
|
if milestones[i].Repo.IsTimetrackerEnabled(ctx) {
|
2020-03-31 10:47:00 +03:00
|
|
|
err := milestones[i].LoadTotalTrackedTime()
|
2019-12-15 17:20:08 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("LoadTotalTrackedTime", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2020-03-31 10:47:00 +03:00
|
|
|
i++
|
2019-12-15 17:20:08 +03:00
|
|
|
}
|
|
|
|
|
2022-04-08 12:11:15 +03:00
|
|
|
milestoneStats, err := issues_model.GetMilestonesStatsByRepoCondAndKw(repoCond, keyword)
|
2019-12-15 17:20:08 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetMilestoneStats", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-08 12:11:15 +03:00
|
|
|
var totalMilestoneStats *issues_model.MilestonesStats
|
2020-03-31 10:47:00 +03:00
|
|
|
if len(repoIDs) == 0 {
|
|
|
|
totalMilestoneStats = milestoneStats
|
|
|
|
} else {
|
2022-04-08 12:11:15 +03:00
|
|
|
totalMilestoneStats, err = issues_model.GetMilestonesStatsByRepoCondAndKw(userRepoCond, keyword)
|
2020-03-31 10:47:00 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetMilestoneStats", err)
|
|
|
|
return
|
|
|
|
}
|
2019-12-15 17:20:08 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
var pagerCount int
|
|
|
|
if isShowClosed {
|
|
|
|
ctx.Data["State"] = "closed"
|
|
|
|
ctx.Data["Total"] = totalMilestoneStats.ClosedCount
|
|
|
|
pagerCount = int(milestoneStats.ClosedCount)
|
|
|
|
} else {
|
|
|
|
ctx.Data["State"] = "open"
|
|
|
|
ctx.Data["Total"] = totalMilestoneStats.OpenCount
|
|
|
|
pagerCount = int(milestoneStats.OpenCount)
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.Data["Milestones"] = milestones
|
|
|
|
ctx.Data["Repos"] = showRepos
|
|
|
|
ctx.Data["Counts"] = counts
|
|
|
|
ctx.Data["MilestoneStats"] = milestoneStats
|
|
|
|
ctx.Data["SortType"] = sortType
|
2021-04-08 14:53:59 +03:00
|
|
|
ctx.Data["Keyword"] = keyword
|
2020-03-31 10:47:00 +03:00
|
|
|
if milestoneStats.Total() != totalMilestoneStats.Total() {
|
2019-12-15 17:20:08 +03:00
|
|
|
ctx.Data["RepoIDs"] = repoIDs
|
|
|
|
}
|
|
|
|
ctx.Data["IsShowClosed"] = isShowClosed
|
|
|
|
|
|
|
|
pager := context.NewPagination(pagerCount, setting.UI.IssuePagingNum, page, 5)
|
2021-04-08 14:53:59 +03:00
|
|
|
pager.AddParam(ctx, "q", "Keyword")
|
2019-12-15 17:20:08 +03:00
|
|
|
pager.AddParam(ctx, "repos", "RepoIDs")
|
|
|
|
pager.AddParam(ctx, "sort", "SortType")
|
|
|
|
pager.AddParam(ctx, "state", "State")
|
|
|
|
ctx.Data["Page"] = pager
|
|
|
|
|
2021-04-05 18:30:52 +03:00
|
|
|
ctx.HTML(http.StatusOK, tplMilestones)
|
2019-12-15 17:20:08 +03:00
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// Pulls renders the user's pull request overview page
|
|
|
|
func Pulls(ctx *context.Context) {
|
2021-11-09 22:57:58 +03:00
|
|
|
if unit.TypePullRequests.UnitGlobalDisabled() {
|
2021-01-13 07:19:17 +03:00
|
|
|
log.Debug("Pull request overview page not available as it is globally disabled.")
|
2022-03-23 07:54:07 +03:00
|
|
|
ctx.Status(http.StatusNotFound)
|
2021-01-13 07:19:17 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.Data["Title"] = ctx.Tr("pull_requests")
|
|
|
|
ctx.Data["PageIsPulls"] = true
|
2022-08-22 15:51:48 +03:00
|
|
|
ctx.Data["SingleRepoAction"] = "pull"
|
2021-11-09 22:57:58 +03:00
|
|
|
buildIssueOverview(ctx, unit.TypePullRequests)
|
2021-01-13 07:19:17 +03:00
|
|
|
}
|
2019-12-02 06:50:36 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// Issues renders the user's issues overview page
|
2016-03-11 19:56:52 +03:00
|
|
|
func Issues(ctx *context.Context) {
|
2021-11-09 22:57:58 +03:00
|
|
|
if unit.TypeIssues.UnitGlobalDisabled() {
|
2021-01-13 07:19:17 +03:00
|
|
|
log.Debug("Issues overview page not available as it is globally disabled.")
|
2022-03-23 07:54:07 +03:00
|
|
|
ctx.Status(http.StatusNotFound)
|
2021-01-13 07:19:17 +03:00
|
|
|
return
|
|
|
|
}
|
2020-01-17 10:34:37 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
ctx.Data["Title"] = ctx.Tr("issues")
|
|
|
|
ctx.Data["PageIsIssues"] = true
|
2022-08-22 15:51:48 +03:00
|
|
|
ctx.Data["SingleRepoAction"] = "issue"
|
2021-11-09 22:57:58 +03:00
|
|
|
buildIssueOverview(ctx, unit.TypeIssues)
|
2021-01-13 07:19:17 +03:00
|
|
|
}
|
2020-01-17 10:34:37 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// Regexp for repos query
|
|
|
|
var issueReposQueryPattern = regexp.MustCompile(`^\[\d+(,\d+)*,?\]$`)
|
|
|
|
|
2021-11-09 22:57:58 +03:00
|
|
|
func buildIssueOverview(ctx *context.Context, unitType unit.Type) {
|
2021-01-13 07:19:17 +03:00
|
|
|
// ----------------------------------------------------
|
|
|
|
// Determine user; can be either user or organization.
|
|
|
|
// Return with NotFound or ServerError if unsuccessful.
|
|
|
|
// ----------------------------------------------------
|
2015-08-25 17:58:34 +03:00
|
|
|
|
|
|
|
ctxUser := getDashboardContextUser(ctx)
|
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
|
|
|
viewType string
|
2021-08-11 03:31:13 +03:00
|
|
|
sortType = ctx.FormString("sort")
|
2022-03-24 01:57:09 +03:00
|
|
|
filterMode int
|
2015-08-25 17:58:34 +03:00
|
|
|
)
|
2017-02-14 17:15:18 +03:00
|
|
|
|
2023-02-17 10:13:35 +03:00
|
|
|
// Default to recently updated, unlike repository issues list
|
|
|
|
if sortType == "" {
|
|
|
|
sortType = "recentupdate"
|
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// --------------------------------------------------------------------------------
|
|
|
|
// Distinguish User from Organization.
|
|
|
|
// Org:
|
|
|
|
// - Remember pre-determined viewType string for later. Will be posted to ctx.Data.
|
|
|
|
// Organization does not have view type and filter mode.
|
|
|
|
// User:
|
2021-08-11 03:31:13 +03:00
|
|
|
// - Use ctx.FormString("type") to determine filterMode.
|
2021-01-13 07:19:17 +03:00
|
|
|
// The type is set when clicking for example "assigned to me" on the overview page.
|
|
|
|
// - Remember either this or a fallback. Will be posted to ctx.Data.
|
|
|
|
// --------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
// TODO: distinguish during routing
|
|
|
|
|
2021-08-11 03:31:13 +03:00
|
|
|
viewType = ctx.FormString("type")
|
2021-01-03 20:29:12 +03:00
|
|
|
switch viewType {
|
|
|
|
case "assigned":
|
2022-06-13 12:37:59 +03:00
|
|
|
filterMode = issues_model.FilterModeAssign
|
2021-01-03 20:29:12 +03:00
|
|
|
case "created_by":
|
2022-06-13 12:37:59 +03:00
|
|
|
filterMode = issues_model.FilterModeCreate
|
2021-01-03 20:29:12 +03:00
|
|
|
case "mentioned":
|
2022-06-13 12:37:59 +03:00
|
|
|
filterMode = issues_model.FilterModeMention
|
2021-01-17 19:34:19 +03:00
|
|
|
case "review_requested":
|
2022-06-13 12:37:59 +03:00
|
|
|
filterMode = issues_model.FilterModeReviewRequested
|
2023-02-25 05:55:50 +03:00
|
|
|
case "reviewed_by":
|
|
|
|
filterMode = issues_model.FilterModeReviewed
|
2022-03-24 01:57:09 +03:00
|
|
|
case "your_repositories":
|
|
|
|
fallthrough
|
2021-01-03 20:29:12 +03:00
|
|
|
default:
|
2022-06-13 12:37:59 +03:00
|
|
|
filterMode = issues_model.FilterModeYourRepositories
|
2019-12-03 09:01:29 +03:00
|
|
|
viewType = "your_repositories"
|
2015-08-25 17:58:34 +03:00
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// --------------------------------------------------------------------------
|
|
|
|
// Build opts (IssuesOptions), which contains filter information.
|
|
|
|
// Will eventually be used to retrieve issues relevant for the overview page.
|
|
|
|
// Note: Non-final states of opts are used in-between, namely for:
|
|
|
|
// - Keyword search
|
|
|
|
// - Count Issues by repo
|
|
|
|
// --------------------------------------------------------------------------
|
2017-02-14 17:15:18 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// Get repository IDs where User/Org/Team has access.
|
2022-03-29 09:29:02 +03:00
|
|
|
var team *organization.Team
|
|
|
|
var org *organization.Organization
|
2021-01-13 07:19:17 +03:00
|
|
|
if ctx.Org != nil {
|
2021-12-29 16:02:12 +03:00
|
|
|
org = ctx.Org.Organization
|
2021-01-13 07:19:17 +03:00
|
|
|
team = ctx.Org.Team
|
2017-02-17 03:58:19 +03:00
|
|
|
}
|
2021-12-29 16:02:12 +03:00
|
|
|
|
|
|
|
isPullList := unitType == unit.TypePullRequests
|
2022-06-13 12:37:59 +03:00
|
|
|
opts := &issues_model.IssuesOptions{
|
2021-12-29 16:02:12 +03:00
|
|
|
IsPull: util.OptionalBoolOf(isPullList),
|
|
|
|
SortType: sortType,
|
|
|
|
IsArchived: util.OptionalBoolFalse,
|
|
|
|
Org: org,
|
|
|
|
Team: team,
|
2022-03-22 10:03:22 +03:00
|
|
|
User: ctx.Doer,
|
2019-10-08 20:55:16 +03:00
|
|
|
}
|
|
|
|
|
2022-03-24 01:57:09 +03:00
|
|
|
// Search all repositories which
|
|
|
|
//
|
|
|
|
// As user:
|
|
|
|
// - Owns the repository.
|
|
|
|
// - Have collaborator permissions in repository.
|
|
|
|
//
|
|
|
|
// As org:
|
|
|
|
// - Owns the repository.
|
|
|
|
//
|
|
|
|
// As team:
|
|
|
|
// - Team org's owns the repository.
|
|
|
|
// - Team has read permission to repository.
|
2022-06-06 11:01:49 +03:00
|
|
|
repoOpts := &repo_model.SearchRepoOptions{
|
2022-03-24 01:57:09 +03:00
|
|
|
Actor: ctx.Doer,
|
|
|
|
OwnerID: ctx.Doer.ID,
|
|
|
|
Private: true,
|
|
|
|
AllPublic: false,
|
|
|
|
AllLimited: false,
|
|
|
|
}
|
|
|
|
|
2022-05-16 12:49:17 +03:00
|
|
|
if team != nil {
|
|
|
|
repoOpts.TeamID = team.ID
|
2022-03-24 01:57:09 +03:00
|
|
|
}
|
|
|
|
|
2017-02-14 17:15:18 +03:00
|
|
|
switch filterMode {
|
2022-06-13 12:37:59 +03:00
|
|
|
case issues_model.FilterModeAll:
|
|
|
|
case issues_model.FilterModeYourRepositories:
|
|
|
|
case issues_model.FilterModeAssign:
|
2022-03-22 10:03:22 +03:00
|
|
|
opts.AssigneeID = ctx.Doer.ID
|
2022-06-13 12:37:59 +03:00
|
|
|
case issues_model.FilterModeCreate:
|
2022-03-22 10:03:22 +03:00
|
|
|
opts.PosterID = ctx.Doer.ID
|
2022-06-13 12:37:59 +03:00
|
|
|
case issues_model.FilterModeMention:
|
2022-03-22 10:03:22 +03:00
|
|
|
opts.MentionedID = ctx.Doer.ID
|
2022-06-13 12:37:59 +03:00
|
|
|
case issues_model.FilterModeReviewRequested:
|
2022-03-22 10:03:22 +03:00
|
|
|
opts.ReviewRequestedID = ctx.Doer.ID
|
2023-02-25 05:55:50 +03:00
|
|
|
case issues_model.FilterModeReviewed:
|
|
|
|
opts.ReviewedID = ctx.Doer.ID
|
2021-01-03 20:29:12 +03:00
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// keyword holds the search term entered into the search field.
|
2021-08-11 03:31:13 +03:00
|
|
|
keyword := strings.Trim(ctx.FormString("q"), " ")
|
2021-01-13 07:19:17 +03:00
|
|
|
ctx.Data["Keyword"] = keyword
|
2020-02-29 09:52:05 +03:00
|
|
|
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
accessibleRepos := container.Set[int64]{}
|
|
|
|
{
|
|
|
|
ids, err := issues_model.GetRepoIDsForIssuesOptions(opts, ctxUser)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetRepoIDsForIssuesOptions", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, id := range ids {
|
|
|
|
accessibleRepos.Add(id)
|
|
|
|
}
|
2021-01-13 07:19:17 +03:00
|
|
|
}
|
2020-02-29 09:52:05 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// Educated guess: Do or don't show closed issues.
|
2021-08-11 03:31:13 +03:00
|
|
|
isShowClosed := ctx.FormString("state") == "closed"
|
2020-02-29 09:52:05 +03:00
|
|
|
opts.IsClosed = util.OptionalBoolOf(isShowClosed)
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// Filter repos and count issues in them. Count will be used later.
|
|
|
|
// USING NON-FINAL STATE OF opts FOR A QUERY.
|
|
|
|
var issueCountByRepo map[int64]int64
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
{
|
|
|
|
issueIDs, err := issueIDsFromSearch(ctx, keyword, opts)
|
2020-02-29 09:52:05 +03:00
|
|
|
if err != nil {
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
ctx.ServerError("issueIDsFromSearch", err)
|
2020-02-29 09:52:05 +03:00
|
|
|
return
|
|
|
|
}
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
if len(issueIDs) > 0 { // else, no issues found, just leave issueCountByRepo empty
|
|
|
|
opts.IssueIDs = issueIDs
|
|
|
|
issueCountByRepo, err = issues_model.CountIssuesByRepo(ctx, opts)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("CountIssuesByRepo", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
opts.IssueIDs = nil // reset, the opts will be used later
|
|
|
|
}
|
2019-10-08 20:55:16 +03:00
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// Make sure page number is at least 1. Will be posted to ctx.Data.
|
2021-07-29 04:42:15 +03:00
|
|
|
page := ctx.FormInt("page")
|
2021-01-13 07:19:17 +03:00
|
|
|
if page <= 1 {
|
|
|
|
page = 1
|
|
|
|
}
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
opts.Paginator = &db.ListOptions{
|
|
|
|
Page: page,
|
|
|
|
PageSize: setting.UI.IssuePagingNum,
|
|
|
|
}
|
2021-01-13 07:19:17 +03:00
|
|
|
|
|
|
|
// Get IDs for labels (a filter option for issues/pulls).
|
|
|
|
// Required for IssuesOptions.
|
2019-01-23 07:10:38 +03:00
|
|
|
var labelIDs []int64
|
2021-08-11 03:31:13 +03:00
|
|
|
selectedLabels := ctx.FormString("labels")
|
2021-01-13 07:19:17 +03:00
|
|
|
if len(selectedLabels) > 0 && selectedLabels != "0" {
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
var err error
|
2021-01-13 07:19:17 +03:00
|
|
|
labelIDs, err = base.StringsToInt64s(strings.Split(selectedLabels, ","))
|
2019-01-23 07:10:38 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("StringsToInt64s", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
opts.LabelIDs = labelIDs
|
2018-10-28 09:55:01 +03:00
|
|
|
|
2021-08-11 03:31:13 +03:00
|
|
|
// Parse ctx.FormString("repos") and remember matched repo IDs for later.
|
2021-01-13 07:19:17 +03:00
|
|
|
// Gets set when clicking filters on the issues overview page.
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
repoIDs := getRepoIDs(ctx.FormString("repos"))
|
|
|
|
if len(repoIDs) == 0 {
|
|
|
|
repoIDs = accessibleRepos.Values()
|
|
|
|
} else {
|
|
|
|
// Remove repo IDs that are not accessible to the user.
|
|
|
|
repoIDs = util.SliceRemoveAllFunc(repoIDs, func(v int64) bool {
|
|
|
|
return !accessibleRepos.Contains(v)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
opts.RepoIDs = repoIDs
|
2019-12-02 06:50:36 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// ------------------------------
|
|
|
|
// Get issues as defined by opts.
|
|
|
|
// ------------------------------
|
|
|
|
|
|
|
|
// Slice of Issues that will be displayed on the overview page
|
|
|
|
// USING FINAL STATE OF opts FOR A QUERY.
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
var issues issues_model.IssueList
|
|
|
|
{
|
|
|
|
issueIDs, err := issueIDsFromSearch(ctx, keyword, opts)
|
2020-02-29 09:52:05 +03:00
|
|
|
if err != nil {
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
ctx.ServerError("issueIDsFromSearch", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
issues, err = issues_model.GetIssuesByIDs(ctx, issueIDs, true)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetIssuesByIDs", err)
|
2020-02-29 09:52:05 +03:00
|
|
|
return
|
|
|
|
}
|
2017-02-17 03:58:19 +03:00
|
|
|
}
|
2015-09-02 23:18:09 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// ----------------------------------
|
|
|
|
// Add repository pointers to Issues.
|
|
|
|
// ----------------------------------
|
2019-10-08 20:55:16 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// showReposMap maps repository IDs to their Repository pointers.
|
2021-12-29 16:02:12 +03:00
|
|
|
showReposMap, err := loadRepoByIDs(ctxUser, issueCountByRepo, unitType)
|
2021-01-13 07:19:17 +03:00
|
|
|
if err != nil {
|
2021-12-10 04:27:50 +03:00
|
|
|
if repo_model.IsErrRepoNotExist(err) {
|
2021-01-13 07:19:17 +03:00
|
|
|
ctx.NotFound("GetRepositoryByID", err)
|
|
|
|
return
|
2019-10-08 20:55:16 +03:00
|
|
|
}
|
2021-12-29 16:02:12 +03:00
|
|
|
ctx.ServerError("loadRepoByIDs", err)
|
2021-01-13 07:19:17 +03:00
|
|
|
return
|
2019-10-08 20:55:16 +03:00
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// a RepositoryList
|
2022-06-06 11:01:49 +03:00
|
|
|
showRepos := repo_model.RepositoryListOfMap(showReposMap)
|
2017-12-04 07:39:01 +03:00
|
|
|
sort.Sort(showRepos)
|
2015-08-25 18:22:05 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// maps pull request IDs to their CommitStatus. Will be posted to ctx.Data.
|
2017-08-03 08:09:16 +03:00
|
|
|
for _, issue := range issues {
|
2021-12-29 16:02:12 +03:00
|
|
|
if issue.Repo == nil {
|
|
|
|
issue.Repo = showReposMap[issue.RepoID]
|
|
|
|
}
|
2021-04-15 20:34:43 +03:00
|
|
|
}
|
2019-04-02 22:54:29 +03:00
|
|
|
|
2022-04-27 01:40:01 +03:00
|
|
|
commitStatuses, lastStatus, err := pull_service.GetIssuesAllCommitStatus(ctx, issues)
|
2021-04-15 20:34:43 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetIssuesLastCommitStatus", err)
|
|
|
|
return
|
2017-08-03 08:09:16 +03:00
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// -------------------------------
|
|
|
|
// Fill stats to post to ctx.Data.
|
|
|
|
// -------------------------------
|
2022-06-13 12:37:59 +03:00
|
|
|
var issueStats *issues_model.IssueStats
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
{
|
2023-05-19 17:17:48 +03:00
|
|
|
statsOpts := issues_model.IssuesOptions{
|
|
|
|
User: ctx.Doer,
|
|
|
|
IsPull: util.OptionalBoolOf(isPullList),
|
|
|
|
IsClosed: util.OptionalBoolOf(isShowClosed),
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
IssueIDs: nil,
|
2021-12-29 16:02:12 +03:00
|
|
|
IsArchived: util.OptionalBoolFalse,
|
|
|
|
LabelIDs: opts.LabelIDs,
|
|
|
|
Org: org,
|
|
|
|
Team: team,
|
2022-07-24 19:53:40 +03:00
|
|
|
RepoCond: opts.RepoCond,
|
2020-02-29 09:52:05 +03:00
|
|
|
}
|
2022-05-16 12:49:17 +03:00
|
|
|
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
if keyword != "" {
|
|
|
|
statsOpts.RepoIDs = opts.RepoIDs
|
|
|
|
allIssueIDs, err := issueIDsFromSearch(ctx, keyword, &statsOpts)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("issueIDsFromSearch", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
statsOpts.IssueIDs = allIssueIDs
|
|
|
|
}
|
|
|
|
|
|
|
|
if keyword != "" && len(statsOpts.IssueIDs) == 0 {
|
|
|
|
// So it did search with the keyword, but no issue found.
|
|
|
|
// Just set issueStats to empty.
|
|
|
|
issueStats = &issues_model.IssueStats{}
|
|
|
|
} else {
|
|
|
|
// So it did search with the keyword, and found some issues. It needs to get issueStats of these issues.
|
|
|
|
// Or the keyword is empty, so it doesn't need issueIDs as filter, just get issueStats with statsOpts.
|
|
|
|
issueStats, err = issues_model.GetUserIssueStats(filterMode, statsOpts)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetUserIssueStats", err)
|
|
|
|
return
|
|
|
|
}
|
2020-02-29 09:52:05 +03:00
|
|
|
}
|
2019-12-02 06:50:36 +03:00
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
// Will be posted to ctx.Data.
|
2019-12-02 06:50:36 +03:00
|
|
|
var shownIssues int
|
2015-08-25 18:22:05 +03:00
|
|
|
if !isShowClosed {
|
2021-12-29 16:02:12 +03:00
|
|
|
shownIssues = int(issueStats.OpenCount)
|
2015-08-25 18:22:05 +03:00
|
|
|
} else {
|
2021-12-29 16:02:12 +03:00
|
|
|
shownIssues = int(issueStats.ClosedCount)
|
2015-08-25 18:22:05 +03:00
|
|
|
}
|
2023-05-19 17:17:48 +03:00
|
|
|
if len(opts.RepoIDs) != 0 {
|
2022-06-04 23:12:14 +03:00
|
|
|
shownIssues = 0
|
2023-05-19 17:17:48 +03:00
|
|
|
for _, repoID := range opts.RepoIDs {
|
2022-06-04 23:12:14 +03:00
|
|
|
shownIssues += int(issueCountByRepo[repoID])
|
|
|
|
}
|
|
|
|
}
|
2022-08-17 16:13:41 +03:00
|
|
|
|
|
|
|
var allIssueCount int64
|
|
|
|
for _, issueCount := range issueCountByRepo {
|
|
|
|
allIssueCount += issueCount
|
|
|
|
}
|
|
|
|
ctx.Data["TotalIssueCount"] = allIssueCount
|
|
|
|
|
2023-05-19 17:17:48 +03:00
|
|
|
if len(opts.RepoIDs) == 1 {
|
|
|
|
repo := showReposMap[opts.RepoIDs[0]]
|
2022-08-09 14:30:09 +03:00
|
|
|
if repo != nil {
|
|
|
|
ctx.Data["SingleRepoLink"] = repo.Link()
|
|
|
|
}
|
|
|
|
}
|
2015-08-25 18:22:05 +03:00
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
ctx.Data["IsShowClosed"] = isShowClosed
|
|
|
|
|
2022-01-20 20:46:10 +03:00
|
|
|
ctx.Data["IssueRefEndNames"], ctx.Data["IssueRefURLs"] = issue_service.GetRefEndNamesAndURLs(issues, ctx.FormString("RepoLink"))
|
2020-05-15 01:55:43 +03:00
|
|
|
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
if err := issues.LoadAttributes(ctx); err != nil {
|
|
|
|
ctx.ServerError("issues.LoadAttributes", err)
|
|
|
|
return
|
|
|
|
}
|
2015-08-25 17:58:34 +03:00
|
|
|
ctx.Data["Issues"] = issues
|
2021-01-13 07:19:17 +03:00
|
|
|
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
approvalCounts, err := issues.GetApprovalCounts(ctx)
|
2021-01-13 07:19:17 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("ApprovalCounts", err)
|
|
|
|
return
|
|
|
|
}
|
2020-03-06 06:44:06 +03:00
|
|
|
ctx.Data["ApprovalCounts"] = func(issueID int64, typ string) int64 {
|
|
|
|
counts, ok := approvalCounts[issueID]
|
|
|
|
if !ok || len(counts) == 0 {
|
|
|
|
return 0
|
|
|
|
}
|
2022-06-13 12:37:59 +03:00
|
|
|
reviewTyp := issues_model.ReviewTypeApprove
|
2020-03-06 06:44:06 +03:00
|
|
|
if typ == "reject" {
|
2022-06-13 12:37:59 +03:00
|
|
|
reviewTyp = issues_model.ReviewTypeReject
|
2020-04-06 19:33:34 +03:00
|
|
|
} else if typ == "waiting" {
|
2022-06-13 12:37:59 +03:00
|
|
|
reviewTyp = issues_model.ReviewTypeRequest
|
2020-03-06 06:44:06 +03:00
|
|
|
}
|
|
|
|
for _, count := range counts {
|
|
|
|
if count.Type == reviewTyp {
|
|
|
|
return count.Count
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0
|
|
|
|
}
|
2022-04-27 01:40:01 +03:00
|
|
|
ctx.Data["CommitLastStatus"] = lastStatus
|
|
|
|
ctx.Data["CommitStatuses"] = commitStatuses
|
2017-02-14 17:15:18 +03:00
|
|
|
ctx.Data["Repos"] = showRepos
|
2021-01-13 07:19:17 +03:00
|
|
|
ctx.Data["Counts"] = issueCountByRepo
|
2021-12-29 16:02:12 +03:00
|
|
|
ctx.Data["IssueStats"] = issueStats
|
2015-08-25 17:58:34 +03:00
|
|
|
ctx.Data["ViewType"] = viewType
|
2015-11-04 20:50:02 +03:00
|
|
|
ctx.Data["SortType"] = sortType
|
2023-05-19 17:17:48 +03:00
|
|
|
ctx.Data["RepoIDs"] = opts.RepoIDs
|
2015-08-25 17:58:34 +03:00
|
|
|
ctx.Data["IsShowClosed"] = isShowClosed
|
2021-01-13 07:19:17 +03:00
|
|
|
ctx.Data["SelectLabels"] = selectedLabels
|
2017-02-14 17:15:18 +03:00
|
|
|
|
2015-08-25 17:58:34 +03:00
|
|
|
if isShowClosed {
|
|
|
|
ctx.Data["State"] = "closed"
|
|
|
|
} else {
|
|
|
|
ctx.Data["State"] = "open"
|
|
|
|
}
|
|
|
|
|
2019-12-02 06:50:36 +03:00
|
|
|
// Convert []int64 to string
|
2023-05-19 17:17:48 +03:00
|
|
|
reposParam, _ := json.Marshal(opts.RepoIDs)
|
2019-12-02 06:50:36 +03:00
|
|
|
|
|
|
|
ctx.Data["ReposParam"] = string(reposParam)
|
|
|
|
|
|
|
|
pager := context.NewPagination(shownIssues, setting.UI.IssuePagingNum, page, 5)
|
2020-02-29 09:52:05 +03:00
|
|
|
pager.AddParam(ctx, "q", "Keyword")
|
2019-04-20 07:15:19 +03:00
|
|
|
pager.AddParam(ctx, "type", "ViewType")
|
2019-12-02 06:50:36 +03:00
|
|
|
pager.AddParam(ctx, "repos", "ReposParam")
|
2019-04-20 07:15:19 +03:00
|
|
|
pager.AddParam(ctx, "sort", "SortType")
|
|
|
|
pager.AddParam(ctx, "state", "State")
|
|
|
|
pager.AddParam(ctx, "labels", "SelectLabels")
|
|
|
|
pager.AddParam(ctx, "milestone", "MilestoneID")
|
|
|
|
pager.AddParam(ctx, "assignee", "AssigneeID")
|
|
|
|
ctx.Data["Page"] = pager
|
|
|
|
|
2021-04-05 18:30:52 +03:00
|
|
|
ctx.HTML(http.StatusOK, tplIssues)
|
2015-08-25 17:58:34 +03:00
|
|
|
}
|
|
|
|
|
2021-01-13 07:19:17 +03:00
|
|
|
func getRepoIDs(reposQuery string) []int64 {
|
2021-03-12 06:06:33 +03:00
|
|
|
if len(reposQuery) == 0 || reposQuery == "[]" {
|
2021-01-13 07:19:17 +03:00
|
|
|
return []int64{}
|
|
|
|
}
|
|
|
|
if !issueReposQueryPattern.MatchString(reposQuery) {
|
|
|
|
log.Warn("issueReposQueryPattern does not match query")
|
|
|
|
return []int64{}
|
|
|
|
}
|
|
|
|
|
|
|
|
var repoIDs []int64
|
|
|
|
// remove "[" and "]" from string
|
|
|
|
reposQuery = reposQuery[1 : len(reposQuery)-1]
|
2022-01-20 20:46:10 +03:00
|
|
|
// for each ID (delimiter ",") add to int to repoIDs
|
2021-01-13 07:19:17 +03:00
|
|
|
for _, rID := range strings.Split(reposQuery, ",") {
|
|
|
|
// Ensure nonempty string entries
|
|
|
|
if rID != "" && rID != "0" {
|
|
|
|
rIDint64, err := strconv.ParseInt(rID, 10, 64)
|
|
|
|
if err == nil {
|
|
|
|
repoIDs = append(repoIDs, rIDint64)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return repoIDs
|
|
|
|
}
|
|
|
|
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
func issueIDsFromSearch(ctx *context.Context, keyword string, opts *issues_model.IssuesOptions) ([]int64, error) {
|
|
|
|
ids, _, err := issue_indexer.SearchIssues(ctx, issue_indexer.ToSearchOptions(keyword, opts))
|
2021-01-13 07:19:17 +03:00
|
|
|
if err != nil {
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
return nil, fmt.Errorf("SearchIssues: %w", err)
|
2021-01-13 07:19:17 +03:00
|
|
|
}
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 09:28:53 +03:00
|
|
|
return ids, nil
|
2021-01-13 07:19:17 +03:00
|
|
|
}
|
|
|
|
|
2021-12-29 16:02:12 +03:00
|
|
|
func loadRepoByIDs(ctxUser *user_model.User, issueCountByRepo map[int64]int64, unitType unit.Type) (map[int64]*repo_model.Repository, error) {
|
2022-01-20 20:46:10 +03:00
|
|
|
totalRes := make(map[int64]*repo_model.Repository, len(issueCountByRepo))
|
|
|
|
repoIDs := make([]int64, 0, 500)
|
2021-01-13 07:19:17 +03:00
|
|
|
for id := range issueCountByRepo {
|
|
|
|
if id <= 0 {
|
|
|
|
continue
|
|
|
|
}
|
2021-12-29 16:02:12 +03:00
|
|
|
repoIDs = append(repoIDs, id)
|
|
|
|
if len(repoIDs) == 500 {
|
|
|
|
if err := repo_model.FindReposMapByIDs(repoIDs, totalRes); err != nil {
|
2021-01-13 07:19:17 +03:00
|
|
|
return nil, err
|
|
|
|
}
|
2021-12-29 16:02:12 +03:00
|
|
|
repoIDs = repoIDs[:0]
|
2021-01-13 07:19:17 +03:00
|
|
|
}
|
2021-12-29 16:02:12 +03:00
|
|
|
}
|
|
|
|
if len(repoIDs) > 0 {
|
|
|
|
if err := repo_model.FindReposMapByIDs(repoIDs, totalRes); err != nil {
|
|
|
|
return nil, err
|
2021-01-13 07:19:17 +03:00
|
|
|
}
|
|
|
|
}
|
2021-12-29 16:02:12 +03:00
|
|
|
return totalRes, nil
|
2021-01-13 07:19:17 +03:00
|
|
|
}
|
|
|
|
|
2016-11-27 14:59:12 +03:00
|
|
|
// ShowSSHKeys output all the ssh keys of user by uid
|
2022-03-26 12:04:22 +03:00
|
|
|
func ShowSSHKeys(ctx *context.Context) {
|
|
|
|
keys, err := asymkey_model.ListPublicKeys(ctx.ContextUser.ID, db.ListOptions{})
|
2014-11-23 10:33:47 +03:00
|
|
|
if err != nil {
|
2018-01-11 00:34:17 +03:00
|
|
|
ctx.ServerError("ListPublicKeys", err)
|
2014-11-23 10:33:47 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var buf bytes.Buffer
|
|
|
|
for i := range keys {
|
|
|
|
buf.WriteString(keys[i].OmitEmail())
|
2015-06-08 10:40:38 +03:00
|
|
|
buf.WriteString("\n")
|
2014-11-23 10:33:47 +03:00
|
|
|
}
|
2021-12-15 09:59:57 +03:00
|
|
|
ctx.PlainTextBytes(http.StatusOK, buf.Bytes())
|
2014-11-23 10:33:47 +03:00
|
|
|
}
|
|
|
|
|
2019-04-14 19:43:56 +03:00
|
|
|
// ShowGPGKeys output all the public GPG keys of user by uid
|
2022-03-26 12:04:22 +03:00
|
|
|
func ShowGPGKeys(ctx *context.Context) {
|
|
|
|
keys, err := asymkey_model.ListGPGKeys(ctx, ctx.ContextUser.ID, db.ListOptions{})
|
2019-04-14 19:43:56 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("ListGPGKeys", err)
|
|
|
|
return
|
|
|
|
}
|
2022-03-02 19:32:18 +03:00
|
|
|
|
2019-04-14 19:43:56 +03:00
|
|
|
entities := make([]*openpgp.Entity, 0)
|
|
|
|
failedEntitiesID := make([]string, 0)
|
|
|
|
for _, k := range keys {
|
2021-12-10 11:14:24 +03:00
|
|
|
e, err := asymkey_model.GPGKeyToEntity(k)
|
2019-04-14 19:43:56 +03:00
|
|
|
if err != nil {
|
2021-12-10 11:14:24 +03:00
|
|
|
if asymkey_model.IsErrGPGKeyImportNotExist(err) {
|
2019-04-14 19:43:56 +03:00
|
|
|
failedEntitiesID = append(failedEntitiesID, k.KeyID)
|
2022-01-20 20:46:10 +03:00
|
|
|
continue // Skip previous import without backup of imported armored key
|
2019-04-14 19:43:56 +03:00
|
|
|
}
|
|
|
|
ctx.ServerError("ShowGPGKeys", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
entities = append(entities, e)
|
|
|
|
}
|
|
|
|
var buf bytes.Buffer
|
|
|
|
|
|
|
|
headers := make(map[string]string)
|
2022-01-20 20:46:10 +03:00
|
|
|
if len(failedEntitiesID) > 0 { // If some key need re-import to be exported
|
2019-04-14 19:43:56 +03:00
|
|
|
headers["Note"] = fmt.Sprintf("The keys with the following IDs couldn't be exported and need to be reuploaded %s", strings.Join(failedEntitiesID, ", "))
|
2022-03-02 19:32:18 +03:00
|
|
|
} else if len(entities) == 0 {
|
|
|
|
headers["Note"] = "This user hasn't uploaded any GPG keys."
|
2019-04-14 19:43:56 +03:00
|
|
|
}
|
|
|
|
writer, _ := armor.Encode(&buf, "PGP PUBLIC KEY BLOCK", headers)
|
|
|
|
for _, e := range entities {
|
2022-01-20 20:46:10 +03:00
|
|
|
err = e.Serialize(writer) // TODO find why key are exported with a different cipherTypeByte as original (should not be blocking but strange)
|
2019-04-14 19:43:56 +03:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("ShowGPGKeys", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
writer.Close()
|
2021-12-15 09:59:57 +03:00
|
|
|
ctx.PlainTextBytes(http.StatusOK, buf.Bytes())
|
2019-04-14 19:43:56 +03:00
|
|
|
}
|
2023-04-27 09:06:45 +03:00
|
|
|
|
|
|
|
func UsernameSubRoute(ctx *context.Context) {
|
|
|
|
// WORKAROUND to support usernames with "." in it
|
|
|
|
// https://github.com/go-chi/chi/issues/781
|
|
|
|
username := ctx.Params("username")
|
|
|
|
reloadParam := func(suffix string) (success bool) {
|
|
|
|
ctx.SetParams("username", strings.TrimSuffix(username, suffix))
|
|
|
|
context_service.UserAssignmentWeb()(ctx)
|
|
|
|
return !ctx.Written()
|
|
|
|
}
|
|
|
|
switch {
|
|
|
|
case strings.HasSuffix(username, ".png"):
|
|
|
|
if reloadParam(".png") {
|
|
|
|
AvatarByUserName(ctx)
|
|
|
|
}
|
|
|
|
case strings.HasSuffix(username, ".keys"):
|
|
|
|
if reloadParam(".keys") {
|
|
|
|
ShowSSHKeys(ctx)
|
|
|
|
}
|
|
|
|
case strings.HasSuffix(username, ".gpg"):
|
|
|
|
if reloadParam(".gpg") {
|
|
|
|
ShowGPGKeys(ctx)
|
|
|
|
}
|
|
|
|
case strings.HasSuffix(username, ".rss"):
|
|
|
|
if !setting.Other.EnableFeed {
|
|
|
|
ctx.Error(http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if reloadParam(".rss") {
|
|
|
|
context_service.UserAssignmentWeb()(ctx)
|
|
|
|
feed.ShowUserFeedRSS(ctx)
|
|
|
|
}
|
|
|
|
case strings.HasSuffix(username, ".atom"):
|
|
|
|
if !setting.Other.EnableFeed {
|
|
|
|
ctx.Error(http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if reloadParam(".atom") {
|
|
|
|
feed.ShowUserFeedAtom(ctx)
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
context_service.UserAssignmentWeb()(ctx)
|
|
|
|
if !ctx.Written() {
|
|
|
|
ctx.Data["EnableFeed"] = setting.Other.EnableFeed
|
2023-07-06 21:59:24 +03:00
|
|
|
OwnerProfile(ctx)
|
2023-04-27 09:06:45 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|