2017-01-25 03:43:02 +01:00
|
|
|
// Copyright 2016 The Gitea Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a MIT-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2019-12-10 14:29:40 +01:00
|
|
|
package code
|
2017-01-25 03:43:02 +01:00
|
|
|
|
|
|
|
import (
|
2019-12-24 08:26:34 +01:00
|
|
|
"context"
|
|
|
|
"os"
|
2020-08-30 18:08:01 +02:00
|
|
|
"strconv"
|
|
|
|
"strings"
|
2019-12-23 13:31:16 +01:00
|
|
|
"time"
|
2017-09-16 22:16:21 +02:00
|
|
|
|
2021-09-19 13:49:59 +02:00
|
|
|
"code.gitea.io/gitea/models/db"
|
2021-12-10 02:27:50 +01:00
|
|
|
repo_model "code.gitea.io/gitea/models/repo"
|
2019-12-23 13:31:16 +01:00
|
|
|
"code.gitea.io/gitea/modules/graceful"
|
|
|
|
"code.gitea.io/gitea/modules/log"
|
2020-09-07 17:05:08 +02:00
|
|
|
"code.gitea.io/gitea/modules/queue"
|
2018-02-05 19:29:17 +01:00
|
|
|
"code.gitea.io/gitea/modules/setting"
|
2020-02-20 20:53:55 +01:00
|
|
|
"code.gitea.io/gitea/modules/timeutil"
|
2019-12-23 13:31:16 +01:00
|
|
|
)
|
2018-02-05 19:29:17 +01:00
|
|
|
|
2019-12-23 13:31:16 +01:00
|
|
|
// SearchResult result of performing a search in a repo
|
|
|
|
type SearchResult struct {
|
2020-02-20 20:53:55 +01:00
|
|
|
RepoID int64
|
|
|
|
StartIndex int
|
|
|
|
EndIndex int
|
|
|
|
Filename string
|
|
|
|
Content string
|
|
|
|
CommitID string
|
|
|
|
UpdatedUnix timeutil.TimeStamp
|
|
|
|
Language string
|
|
|
|
Color string
|
|
|
|
}
|
|
|
|
|
|
|
|
// SearchResultLanguages result of top languages count in search results
|
|
|
|
type SearchResultLanguages struct {
|
|
|
|
Language string
|
|
|
|
Color string
|
|
|
|
Count int
|
2017-09-16 22:16:21 +02:00
|
|
|
}
|
|
|
|
|
2020-09-07 17:05:08 +02:00
|
|
|
// Indexer defines an interface to index and search code contents
|
2019-12-23 13:31:16 +01:00
|
|
|
type Indexer interface {
|
2022-01-20 00:26:57 +01:00
|
|
|
Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *repoChanges) error
|
2019-12-23 13:31:16 +01:00
|
|
|
Delete(repoID int64) error
|
2021-01-27 11:00:35 +01:00
|
|
|
Search(repoIDs []int64, language, keyword string, page, pageSize int, isMatch bool) (int64, []*SearchResult, []*SearchResultLanguages, error)
|
2019-12-23 13:31:16 +01:00
|
|
|
Close()
|
2017-09-16 22:16:21 +02:00
|
|
|
}
|
|
|
|
|
2020-08-30 18:08:01 +02:00
|
|
|
func filenameIndexerID(repoID int64, filename string) string {
|
|
|
|
return indexerID(repoID) + "_" + filename
|
|
|
|
}
|
|
|
|
|
2020-09-14 12:40:07 +02:00
|
|
|
func indexerID(id int64) string {
|
|
|
|
return strconv.FormatInt(id, 36)
|
|
|
|
}
|
|
|
|
|
2020-08-30 18:08:01 +02:00
|
|
|
func parseIndexerID(indexerID string) (int64, string) {
|
|
|
|
index := strings.IndexByte(indexerID, '_')
|
|
|
|
if index == -1 {
|
|
|
|
log.Error("Unexpected ID in repo indexer: %s", indexerID)
|
|
|
|
}
|
2020-09-14 12:40:07 +02:00
|
|
|
repoID, _ := strconv.ParseInt(indexerID[:index], 36, 64)
|
2020-08-30 18:08:01 +02:00
|
|
|
return repoID, indexerID[index+1:]
|
|
|
|
}
|
|
|
|
|
|
|
|
func filenameOfIndexerID(indexerID string) string {
|
|
|
|
index := strings.IndexByte(indexerID, '_')
|
|
|
|
if index == -1 {
|
|
|
|
log.Error("Unexpected ID in repo indexer: %s", indexerID)
|
|
|
|
}
|
|
|
|
return indexerID[index+1:]
|
|
|
|
}
|
|
|
|
|
2020-09-07 17:05:08 +02:00
|
|
|
// IndexerData represents data stored in the code indexer
|
|
|
|
type IndexerData struct {
|
2021-11-02 04:14:24 +01:00
|
|
|
RepoID int64
|
2020-09-07 17:05:08 +02:00
|
|
|
}
|
|
|
|
|
2022-01-20 18:46:10 +01:00
|
|
|
var indexerQueue queue.UniqueQueue
|
2020-09-07 17:05:08 +02:00
|
|
|
|
2022-01-20 00:26:57 +01:00
|
|
|
func index(ctx context.Context, indexer Indexer, repoID int64) error {
|
2021-12-10 02:27:50 +01:00
|
|
|
repo, err := repo_model.GetRepositoryByID(repoID)
|
|
|
|
if repo_model.IsErrRepoNotExist(err) {
|
2021-11-02 04:14:24 +01:00
|
|
|
return indexer.Delete(repoID)
|
|
|
|
}
|
2020-09-07 17:05:08 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-01-20 00:26:57 +01:00
|
|
|
sha, err := getDefaultBranchSha(ctx, repo)
|
2020-09-07 17:05:08 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2022-01-20 00:26:57 +01:00
|
|
|
changes, err := getRepoChanges(ctx, repo, sha)
|
2020-09-07 17:05:08 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
} else if changes == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-01-20 00:26:57 +01:00
|
|
|
if err := indexer.Index(ctx, repo, sha, changes); err != nil {
|
2020-09-07 17:05:08 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-12-10 02:27:50 +01:00
|
|
|
return repo_model.UpdateIndexerStatus(repo, repo_model.RepoIndexerTypeCode, sha)
|
2020-09-07 17:05:08 +02:00
|
|
|
}
|
|
|
|
|
2019-12-23 13:31:16 +01:00
|
|
|
// Init initialize the repo indexer
|
|
|
|
func Init() {
|
|
|
|
if !setting.Indexer.RepoIndexerEnabled {
|
2019-12-24 08:26:34 +01:00
|
|
|
indexer.Close()
|
2019-12-23 13:31:16 +01:00
|
|
|
return
|
|
|
|
}
|
2017-09-25 02:08:48 +02:00
|
|
|
|
2019-12-24 08:26:34 +01:00
|
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
|
|
|
2021-05-15 16:22:26 +02:00
|
|
|
graceful.GetManager().RunAtTerminate(func() {
|
|
|
|
select {
|
|
|
|
case <-ctx.Done():
|
|
|
|
return
|
|
|
|
default:
|
|
|
|
}
|
|
|
|
cancel()
|
2019-12-24 08:26:34 +01:00
|
|
|
log.Debug("Closing repository indexer")
|
|
|
|
indexer.Close()
|
|
|
|
log.Info("PID: %d Repository Indexer closed", os.Getpid())
|
|
|
|
})
|
|
|
|
|
2019-12-23 13:31:16 +01:00
|
|
|
waitChannel := make(chan time.Duration)
|
2020-09-07 17:05:08 +02:00
|
|
|
|
|
|
|
// Create the Queue
|
|
|
|
switch setting.Indexer.RepoType {
|
|
|
|
case "bleve", "elasticsearch":
|
|
|
|
handler := func(data ...queue.Data) {
|
|
|
|
idx, err := indexer.get()
|
|
|
|
if idx == nil || err != nil {
|
|
|
|
log.Error("Codes indexer handler: unable to get indexer!")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, datum := range data {
|
|
|
|
indexerData, ok := datum.(*IndexerData)
|
|
|
|
if !ok {
|
|
|
|
log.Error("Unable to process provided datum: %v - not possible to cast to IndexerData", datum)
|
|
|
|
continue
|
|
|
|
}
|
2021-11-02 04:14:24 +01:00
|
|
|
log.Trace("IndexerData Process Repo: %d", indexerData.RepoID)
|
|
|
|
|
2022-01-20 00:26:57 +01:00
|
|
|
if err := index(ctx, indexer, indexerData.RepoID); err != nil {
|
2021-11-02 04:14:24 +01:00
|
|
|
log.Error("index: %v", err)
|
|
|
|
continue
|
2020-09-07 17:05:08 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-02 04:14:24 +01:00
|
|
|
indexerQueue = queue.CreateUniqueQueue("code_indexer", handler, &IndexerData{})
|
2020-09-07 17:05:08 +02:00
|
|
|
if indexerQueue == nil {
|
|
|
|
log.Fatal("Unable to create codes indexer queue")
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
log.Fatal("Unknown codes indexer type; %s", setting.Indexer.RepoType)
|
|
|
|
}
|
|
|
|
|
2019-12-23 13:31:16 +01:00
|
|
|
go func() {
|
|
|
|
start := time.Now()
|
2020-08-30 18:08:01 +02:00
|
|
|
var (
|
|
|
|
rIndexer Indexer
|
|
|
|
populate bool
|
|
|
|
err error
|
|
|
|
)
|
|
|
|
switch setting.Indexer.RepoType {
|
|
|
|
case "bleve":
|
|
|
|
log.Info("PID: %d Initializing Repository Indexer at: %s", os.Getpid(), setting.Indexer.RepoPath)
|
|
|
|
defer func() {
|
|
|
|
if err := recover(); err != nil {
|
|
|
|
log.Error("PANIC whilst initializing repository indexer: %v\nStacktrace: %s", err, log.Stack(2))
|
|
|
|
log.Error("The indexer files are likely corrupted and may need to be deleted")
|
|
|
|
log.Error("You can completely remove the \"%s\" directory to make Gitea recreate the indexes", setting.Indexer.RepoPath)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
rIndexer, populate, err = NewBleveIndexer(setting.Indexer.RepoPath)
|
|
|
|
if err != nil {
|
2020-02-28 23:00:09 +01:00
|
|
|
cancel()
|
|
|
|
indexer.Close()
|
|
|
|
close(waitChannel)
|
2020-08-30 18:08:01 +02:00
|
|
|
log.Fatal("PID: %d Unable to initialize the bleve Repository Indexer at path: %s Error: %v", os.Getpid(), setting.Indexer.RepoPath, err)
|
2020-02-28 23:00:09 +01:00
|
|
|
}
|
2020-08-30 18:08:01 +02:00
|
|
|
case "elasticsearch":
|
|
|
|
log.Info("PID: %d Initializing Repository Indexer at: %s", os.Getpid(), setting.Indexer.RepoConnStr)
|
|
|
|
defer func() {
|
|
|
|
if err := recover(); err != nil {
|
|
|
|
log.Error("PANIC whilst initializing repository indexer: %v\nStacktrace: %s", err, log.Stack(2))
|
|
|
|
log.Error("The indexer files are likely corrupted and may need to be deleted")
|
|
|
|
log.Error("You can completely remove the \"%s\" index to make Gitea recreate the indexes", setting.Indexer.RepoConnStr)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
rIndexer, populate, err = NewElasticSearchIndexer(setting.Indexer.RepoConnStr, setting.Indexer.RepoIndexerName)
|
|
|
|
if err != nil {
|
|
|
|
cancel()
|
|
|
|
indexer.Close()
|
|
|
|
close(waitChannel)
|
|
|
|
log.Fatal("PID: %d Unable to initialize the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), setting.Indexer.RepoConnStr, err)
|
2019-12-24 08:26:34 +01:00
|
|
|
}
|
2020-08-30 18:08:01 +02:00
|
|
|
default:
|
|
|
|
log.Fatal("PID: %d Unknown Indexer type: %s", os.Getpid(), setting.Indexer.RepoType)
|
2019-12-23 13:31:16 +01:00
|
|
|
}
|
2020-08-30 18:08:01 +02:00
|
|
|
|
|
|
|
indexer.set(rIndexer)
|
2017-09-25 02:08:48 +02:00
|
|
|
|
2020-09-07 17:05:08 +02:00
|
|
|
// Start processing the queue
|
|
|
|
go graceful.GetManager().RunWithShutdownFns(indexerQueue.Run)
|
2017-09-25 02:08:48 +02:00
|
|
|
|
2020-08-30 18:08:01 +02:00
|
|
|
if populate {
|
2020-09-07 17:05:08 +02:00
|
|
|
go graceful.GetManager().RunWithShutdownContext(populateRepoIndexer)
|
2019-12-23 13:31:16 +01:00
|
|
|
}
|
2019-12-24 08:26:34 +01:00
|
|
|
select {
|
|
|
|
case waitChannel <- time.Since(start):
|
|
|
|
case <-graceful.GetManager().IsShutdown():
|
|
|
|
}
|
2017-09-25 02:08:48 +02:00
|
|
|
|
2019-12-24 08:26:34 +01:00
|
|
|
close(waitChannel)
|
2019-12-23 13:31:16 +01:00
|
|
|
}()
|
2017-09-25 02:08:48 +02:00
|
|
|
|
2019-12-23 13:31:16 +01:00
|
|
|
if setting.Indexer.StartupTimeout > 0 {
|
|
|
|
go func() {
|
|
|
|
timeout := setting.Indexer.StartupTimeout
|
|
|
|
if graceful.GetManager().IsChild() && setting.GracefulHammerTime > 0 {
|
|
|
|
timeout += setting.GracefulHammerTime
|
|
|
|
}
|
|
|
|
select {
|
2019-12-24 08:26:34 +01:00
|
|
|
case <-graceful.GetManager().IsShutdown():
|
|
|
|
log.Warn("Shutdown before Repository Indexer completed initialization")
|
|
|
|
cancel()
|
|
|
|
indexer.Close()
|
|
|
|
case duration, ok := <-waitChannel:
|
|
|
|
if !ok {
|
|
|
|
log.Warn("Repository Indexer Initialization failed")
|
|
|
|
cancel()
|
|
|
|
indexer.Close()
|
|
|
|
return
|
|
|
|
}
|
2019-12-23 13:31:16 +01:00
|
|
|
log.Info("Repository Indexer Initialization took %v", duration)
|
|
|
|
case <-time.After(timeout):
|
2019-12-24 08:26:34 +01:00
|
|
|
cancel()
|
|
|
|
indexer.Close()
|
2019-12-23 13:31:16 +01:00
|
|
|
log.Fatal("Repository Indexer Initialization Timed-Out after: %v", timeout)
|
|
|
|
}
|
|
|
|
}()
|
2017-09-25 02:08:48 +02:00
|
|
|
}
|
|
|
|
}
|
2020-09-07 17:05:08 +02:00
|
|
|
|
|
|
|
// UpdateRepoIndexer update a repository's entries in the indexer
|
2021-12-10 02:27:50 +01:00
|
|
|
func UpdateRepoIndexer(repo *repo_model.Repository) {
|
2020-09-07 17:05:08 +02:00
|
|
|
indexData := &IndexerData{RepoID: repo.ID}
|
|
|
|
if err := indexerQueue.Push(indexData); err != nil {
|
|
|
|
log.Error("Update repo index data %v failed: %v", indexData, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// populateRepoIndexer populate the repo indexer with pre-existing data. This
|
|
|
|
// should only be run when the indexer is created for the first time.
|
|
|
|
func populateRepoIndexer(ctx context.Context) {
|
|
|
|
log.Info("Populating the repo indexer with existing repositories")
|
|
|
|
|
2021-09-19 13:49:59 +02:00
|
|
|
exist, err := db.IsTableNotEmpty("repository")
|
2020-09-07 17:05:08 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Fatal("System error: %v", err)
|
|
|
|
} else if !exist {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// if there is any existing repo indexer metadata in the DB, delete it
|
|
|
|
// since we are starting afresh. Also, xorm requires deletes to have a
|
|
|
|
// condition, and we want to delete everything, thus 1=1.
|
2021-09-19 13:49:59 +02:00
|
|
|
if err := db.DeleteAllRecords("repo_indexer_status"); err != nil {
|
2020-09-07 17:05:08 +02:00
|
|
|
log.Fatal("System error: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
var maxRepoID int64
|
2021-09-19 13:49:59 +02:00
|
|
|
if maxRepoID, err = db.GetMaxID("repository"); err != nil {
|
2020-09-07 17:05:08 +02:00
|
|
|
log.Fatal("System error: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// start with the maximum existing repo ID and work backwards, so that we
|
|
|
|
// don't include repos that are created after gitea starts; such repos will
|
|
|
|
// already be added to the indexer, and we don't need to add them again.
|
|
|
|
for maxRepoID > 0 {
|
|
|
|
select {
|
|
|
|
case <-ctx.Done():
|
|
|
|
log.Info("Repository Indexer population shutdown before completion")
|
|
|
|
return
|
|
|
|
default:
|
|
|
|
}
|
2021-12-10 02:27:50 +01:00
|
|
|
ids, err := repo_model.GetUnindexedRepos(repo_model.RepoIndexerTypeCode, maxRepoID, 0, 50)
|
2020-09-07 17:05:08 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Error("populateRepoIndexer: %v", err)
|
|
|
|
return
|
|
|
|
} else if len(ids) == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
for _, id := range ids {
|
|
|
|
select {
|
|
|
|
case <-ctx.Done():
|
|
|
|
log.Info("Repository Indexer population shutdown before completion")
|
|
|
|
return
|
|
|
|
default:
|
|
|
|
}
|
|
|
|
if err := indexerQueue.Push(&IndexerData{RepoID: id}); err != nil {
|
|
|
|
log.Error("indexerQueue.Push: %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
maxRepoID = id - 1
|
|
|
|
}
|
|
|
|
}
|
|
|
|
log.Info("Done (re)populating the repo indexer with existing repositories")
|
|
|
|
}
|