2017-10-27 08:10:54 +02:00
|
|
|
// Copyright 2017 The Gitea Authors. All rights reserved.
|
2022-11-27 19:20:29 +01:00
|
|
|
// SPDX-License-Identifier: MIT
|
2017-10-27 08:10:54 +02:00
|
|
|
|
2019-12-23 13:31:16 +01:00
|
|
|
package code
|
2017-10-27 08:10:54 +02:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2022-01-27 09:30:51 +01:00
|
|
|
"context"
|
2023-12-17 15:38:54 +01:00
|
|
|
"html/template"
|
2017-10-27 08:10:54 +02:00
|
|
|
"strings"
|
|
|
|
|
|
|
|
"code.gitea.io/gitea/modules/highlight"
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
"code.gitea.io/gitea/modules/indexer/code/internal"
|
2020-02-20 20:53:55 +01:00
|
|
|
"code.gitea.io/gitea/modules/timeutil"
|
2024-08-06 07:57:25 +02:00
|
|
|
"code.gitea.io/gitea/services/gitdiff"
|
2017-10-27 08:10:54 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
// Result a search result to display
|
|
|
|
type Result struct {
|
2024-03-10 16:35:30 +01:00
|
|
|
RepoID int64
|
|
|
|
Filename string
|
|
|
|
CommitID string
|
|
|
|
UpdatedUnix timeutil.TimeStamp
|
|
|
|
Language string
|
|
|
|
Color string
|
|
|
|
Lines []ResultLine
|
|
|
|
}
|
|
|
|
|
|
|
|
type ResultLine struct {
|
|
|
|
Num int
|
|
|
|
FormattedContent template.HTML
|
2017-10-27 08:10:54 +02:00
|
|
|
}
|
|
|
|
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
type SearchResultLanguages = internal.SearchResultLanguages
|
|
|
|
|
2024-03-16 11:32:45 +01:00
|
|
|
type SearchOptions = internal.SearchOptions
|
|
|
|
|
2024-12-22 13:24:29 +01:00
|
|
|
var CodeSearchOptions = [2]string{"exact", "fuzzy"}
|
|
|
|
|
2017-10-27 08:10:54 +02:00
|
|
|
func indices(content string, selectionStartIndex, selectionEndIndex int) (int, int) {
|
|
|
|
startIndex := selectionStartIndex
|
|
|
|
numLinesBefore := 0
|
|
|
|
for ; startIndex > 0; startIndex-- {
|
|
|
|
if content[startIndex-1] == '\n' {
|
|
|
|
if numLinesBefore == 1 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
numLinesBefore++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
endIndex := selectionEndIndex
|
|
|
|
numLinesAfter := 0
|
|
|
|
for ; endIndex < len(content); endIndex++ {
|
|
|
|
if content[endIndex] == '\n' {
|
|
|
|
if numLinesAfter == 1 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
numLinesAfter++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return startIndex, endIndex
|
|
|
|
}
|
|
|
|
|
|
|
|
func writeStrings(buf *bytes.Buffer, strs ...string) error {
|
|
|
|
for _, s := range strs {
|
|
|
|
_, err := buf.WriteString(s)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-08-06 07:57:25 +02:00
|
|
|
const (
|
|
|
|
highlightTagStart = "<span class=\"search-highlight\">"
|
|
|
|
highlightTagEnd = "</span>"
|
|
|
|
)
|
|
|
|
|
|
|
|
func HighlightSearchResultCode(filename string, lineNums []int, highlightRanges [][3]int, code string) []ResultLine {
|
|
|
|
hcd := gitdiff.NewHighlightCodeDiff()
|
|
|
|
hcd.CollectUsedRunes(code)
|
|
|
|
startTag, endTag := hcd.NextPlaceholder(), hcd.NextPlaceholder()
|
|
|
|
hcd.PlaceholderTokenMap[startTag] = highlightTagStart
|
|
|
|
hcd.PlaceholderTokenMap[endTag] = highlightTagEnd
|
|
|
|
|
2024-03-24 17:05:00 +01:00
|
|
|
// we should highlight the whole code block first, otherwise it doesn't work well with multiple line highlighting
|
|
|
|
hl, _ := highlight.Code(filename, "", code)
|
2024-08-06 07:57:25 +02:00
|
|
|
conv := hcd.ConvertToPlaceholders(string(hl))
|
|
|
|
convLines := strings.Split(conv, "\n")
|
|
|
|
|
|
|
|
// each highlightRange is of the form [line number, start pos, end pos]
|
|
|
|
for _, highlightRange := range highlightRanges {
|
|
|
|
ln, start, end := highlightRange[0], highlightRange[1], highlightRange[2]
|
|
|
|
line := convLines[ln]
|
|
|
|
if line == "" || len(line) <= start || len(line) < end {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
sb := strings.Builder{}
|
|
|
|
count := -1
|
|
|
|
isOpen := false
|
|
|
|
for _, r := range line {
|
|
|
|
if token, ok := hcd.PlaceholderTokenMap[r];
|
|
|
|
// token was not found
|
|
|
|
!ok ||
|
|
|
|
// token was marked as used
|
|
|
|
token == "" ||
|
2024-08-08 18:07:35 +02:00
|
|
|
// the token is not an valid html tag emitted by chroma
|
2024-08-06 07:57:25 +02:00
|
|
|
!(len(token) > 6 && (token[0:5] == "<span" || token[0:6] == "</span")) {
|
|
|
|
count++
|
|
|
|
} else if !isOpen {
|
|
|
|
// open the tag only after all other placeholders
|
|
|
|
sb.WriteRune(r)
|
|
|
|
continue
|
|
|
|
} else if isOpen && count < end {
|
|
|
|
// if the tag is open, but a placeholder exists in between
|
|
|
|
// close the tag
|
|
|
|
sb.WriteRune(endTag)
|
|
|
|
// write the placeholder
|
|
|
|
sb.WriteRune(r)
|
|
|
|
// reopen the tag
|
|
|
|
sb.WriteRune(startTag)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
switch count {
|
|
|
|
case end:
|
|
|
|
// if tag is not open, no need to close
|
|
|
|
if !isOpen {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
sb.WriteRune(endTag)
|
|
|
|
isOpen = false
|
|
|
|
case start:
|
|
|
|
// if tag is open, do not open again
|
|
|
|
if isOpen {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
isOpen = true
|
|
|
|
sb.WriteRune(startTag)
|
|
|
|
}
|
|
|
|
|
|
|
|
sb.WriteRune(r)
|
|
|
|
}
|
|
|
|
if isOpen {
|
|
|
|
sb.WriteRune(endTag)
|
|
|
|
}
|
|
|
|
convLines[ln] = sb.String()
|
|
|
|
}
|
|
|
|
conv = strings.Join(convLines, "\n")
|
2024-03-24 17:05:00 +01:00
|
|
|
|
2024-08-06 07:57:25 +02:00
|
|
|
highlightedLines := strings.Split(hcd.Recover(conv), "\n")
|
2024-03-24 17:05:00 +01:00
|
|
|
// The lineNums outputted by highlight.Code might not match the original lineNums, because "highlight" removes the last `\n`
|
|
|
|
lines := make([]ResultLine, min(len(highlightedLines), len(lineNums)))
|
|
|
|
for i := 0; i < len(lines); i++ {
|
|
|
|
lines[i].Num = lineNums[i]
|
|
|
|
lines[i].FormattedContent = template.HTML(highlightedLines[i])
|
|
|
|
}
|
|
|
|
return lines
|
|
|
|
}
|
|
|
|
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Result, error) {
|
2017-10-27 08:10:54 +02:00
|
|
|
startLineNum := 1 + strings.Count(result.Content[:startIndex], "\n")
|
|
|
|
|
|
|
|
var formattedLinesBuffer bytes.Buffer
|
|
|
|
|
|
|
|
contentLines := strings.SplitAfter(result.Content[startIndex:endIndex], "\n")
|
2024-03-24 17:05:00 +01:00
|
|
|
lineNums := make([]int, 0, len(contentLines))
|
2017-10-27 08:10:54 +02:00
|
|
|
index := startIndex
|
2024-08-06 07:57:25 +02:00
|
|
|
var highlightRanges [][3]int
|
2017-10-27 08:10:54 +02:00
|
|
|
for i, line := range contentLines {
|
|
|
|
var err error
|
|
|
|
if index < result.EndIndex &&
|
|
|
|
result.StartIndex < index+len(line) &&
|
|
|
|
result.StartIndex < result.EndIndex {
|
2023-09-03 12:34:57 +02:00
|
|
|
openActiveIndex := max(result.StartIndex-index, 0)
|
|
|
|
closeActiveIndex := min(result.EndIndex-index, len(line))
|
2024-08-06 07:57:25 +02:00
|
|
|
highlightRanges = append(highlightRanges, [3]int{i, openActiveIndex, closeActiveIndex})
|
2017-10-27 08:10:54 +02:00
|
|
|
err = writeStrings(&formattedLinesBuffer,
|
2020-06-30 23:34:03 +02:00
|
|
|
line[:openActiveIndex],
|
|
|
|
line[openActiveIndex:closeActiveIndex],
|
|
|
|
line[closeActiveIndex:],
|
2017-10-27 08:10:54 +02:00
|
|
|
)
|
|
|
|
} else {
|
2024-03-24 17:05:00 +01:00
|
|
|
err = writeStrings(&formattedLinesBuffer, line)
|
2017-10-27 08:10:54 +02:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-03-24 17:05:00 +01:00
|
|
|
lineNums = append(lineNums, startLineNum+i)
|
2017-10-27 08:10:54 +02:00
|
|
|
index += len(line)
|
|
|
|
}
|
2022-11-19 12:08:06 +01:00
|
|
|
|
2017-10-27 08:10:54 +02:00
|
|
|
return &Result{
|
2024-03-10 16:35:30 +01:00
|
|
|
RepoID: result.RepoID,
|
|
|
|
Filename: result.Filename,
|
|
|
|
CommitID: result.CommitID,
|
|
|
|
UpdatedUnix: result.UpdatedUnix,
|
|
|
|
Language: result.Language,
|
|
|
|
Color: result.Color,
|
2024-08-06 07:57:25 +02:00
|
|
|
Lines: HighlightSearchResultCode(result.Filename, lineNums, highlightRanges, formattedLinesBuffer.String()),
|
2017-10-27 08:10:54 +02:00
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// PerformSearch perform a search on a repository
|
2024-03-09 02:39:27 +01:00
|
|
|
// if isFuzzy is true set the Damerau-Levenshtein distance from 0 to 2
|
2024-03-16 11:32:45 +01:00
|
|
|
func PerformSearch(ctx context.Context, opts *SearchOptions) (int, []*Result, []*SearchResultLanguages, error) {
|
|
|
|
if opts == nil || len(opts.Keyword) == 0 {
|
2020-02-20 20:53:55 +01:00
|
|
|
return 0, nil, nil, nil
|
2017-10-27 08:10:54 +02:00
|
|
|
}
|
|
|
|
|
2024-03-16 11:32:45 +01:00
|
|
|
total, results, resultLanguages, err := (*globalIndexer.Load()).Search(ctx, opts)
|
2017-10-27 08:10:54 +02:00
|
|
|
if err != nil {
|
2020-02-20 20:53:55 +01:00
|
|
|
return 0, nil, nil, err
|
2017-10-27 08:10:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
displayResults := make([]*Result, len(results))
|
|
|
|
|
|
|
|
for i, result := range results {
|
|
|
|
startIndex, endIndex := indices(result.Content, result.StartIndex, result.EndIndex)
|
|
|
|
displayResults[i], err = searchResult(result, startIndex, endIndex)
|
|
|
|
if err != nil {
|
2020-02-20 20:53:55 +01:00
|
|
|
return 0, nil, nil, err
|
2017-10-27 08:10:54 +02:00
|
|
|
}
|
|
|
|
}
|
2020-02-20 20:53:55 +01:00
|
|
|
return int(total), displayResults, resultLanguages, nil
|
2017-10-27 08:10:54 +02:00
|
|
|
}
|