mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-12-23 06:43:34 +01:00
feat: filepath filter for code search (#6143)
Some checks are pending
/ release (push) Waiting to run
testing / backend-checks (push) Waiting to run
testing / frontend-checks (push) Waiting to run
testing / test-unit (push) Blocked by required conditions
testing / test-e2e (push) Blocked by required conditions
testing / test-remote-cacher (redis) (push) Blocked by required conditions
testing / test-remote-cacher (valkey) (push) Blocked by required conditions
testing / test-remote-cacher (garnet) (push) Blocked by required conditions
testing / test-remote-cacher (redict) (push) Blocked by required conditions
testing / test-mysql (push) Blocked by required conditions
testing / test-pgsql (push) Blocked by required conditions
testing / test-sqlite (push) Blocked by required conditions
testing / security-check (push) Blocked by required conditions
Some checks are pending
/ release (push) Waiting to run
testing / backend-checks (push) Waiting to run
testing / frontend-checks (push) Waiting to run
testing / test-unit (push) Blocked by required conditions
testing / test-e2e (push) Blocked by required conditions
testing / test-remote-cacher (redis) (push) Blocked by required conditions
testing / test-remote-cacher (valkey) (push) Blocked by required conditions
testing / test-remote-cacher (garnet) (push) Blocked by required conditions
testing / test-remote-cacher (redict) (push) Blocked by required conditions
testing / test-mysql (push) Blocked by required conditions
testing / test-pgsql (push) Blocked by required conditions
testing / test-sqlite (push) Blocked by required conditions
testing / security-check (push) Blocked by required conditions
Added support for searching content in a specific directory or file. Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/6143 Reviewed-by: Gusted <gusted@noreply.codeberg.org> Reviewed-by: 0ko <0ko@noreply.codeberg.org> Co-authored-by: Shiny Nematoda <snematoda.751k2@aleeas.com> Co-committed-by: Shiny Nematoda <snematoda.751k2@aleeas.com>
This commit is contained in:
parent
bb88e1daf8
commit
ee214cb886
19 changed files with 342 additions and 61 deletions
|
@ -36,13 +36,15 @@ const (
|
||||||
RegExpGrepMode
|
RegExpGrepMode
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var GrepSearchOptions = [3]string{"exact", "union", "regexp"}
|
||||||
|
|
||||||
type GrepOptions struct {
|
type GrepOptions struct {
|
||||||
RefName string
|
RefName string
|
||||||
MaxResultLimit int
|
MaxResultLimit int
|
||||||
MatchesPerFile int // >= git 2.38
|
MatchesPerFile int // >= git 2.38
|
||||||
ContextLineNumber int
|
ContextLineNumber int
|
||||||
Mode grepMode
|
Mode grepMode
|
||||||
PathSpec []setting.Glob
|
Filename string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (opts *GrepOptions) ensureDefaults() {
|
func (opts *GrepOptions) ensureDefaults() {
|
||||||
|
@ -112,13 +114,39 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO
|
||||||
}
|
}
|
||||||
|
|
||||||
// pathspec
|
// pathspec
|
||||||
files := make([]string, 0,
|
includeLen := len(setting.Indexer.IncludePatterns)
|
||||||
len(setting.Indexer.IncludePatterns)+
|
if len(opts.Filename) > 0 {
|
||||||
len(setting.Indexer.ExcludePatterns)+
|
includeLen = 1
|
||||||
len(opts.PathSpec))
|
}
|
||||||
for _, expr := range append(setting.Indexer.IncludePatterns, opts.PathSpec...) {
|
files := make([]string, 0, len(setting.Indexer.ExcludePatterns)+includeLen)
|
||||||
|
if len(opts.Filename) > 0 && len(setting.Indexer.IncludePatterns) > 0 {
|
||||||
|
// if the both a global include pattern and the per search path is defined
|
||||||
|
// we only include results where the path matches the globally set pattern
|
||||||
|
// (eg, global pattern = "src/**" and path = "node_modules/")
|
||||||
|
|
||||||
|
// FIXME: this is a bit too restrictive, and fails to consider cases where the
|
||||||
|
// gloabally set include pattern refers to a file than a directory
|
||||||
|
// (eg, global pattern = "**.go" and path = "modules/git")
|
||||||
|
exprMatched := false
|
||||||
|
for _, expr := range setting.Indexer.IncludePatterns {
|
||||||
|
if expr.Match(opts.Filename) {
|
||||||
|
files = append(files, ":(literal)"+opts.Filename)
|
||||||
|
exprMatched = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !exprMatched {
|
||||||
|
log.Warn("git-grep: filepath %s does not match any include pattern", opts.Filename)
|
||||||
|
}
|
||||||
|
} else if len(opts.Filename) > 0 {
|
||||||
|
// if the path is only set we just include results that matches it
|
||||||
|
files = append(files, ":(literal)"+opts.Filename)
|
||||||
|
} else {
|
||||||
|
// otherwise if global include patterns are set include results that strictly match them
|
||||||
|
for _, expr := range setting.Indexer.IncludePatterns {
|
||||||
files = append(files, ":"+expr.Pattern())
|
files = append(files, ":"+expr.Pattern())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for _, expr := range setting.Indexer.ExcludePatterns {
|
for _, expr := range setting.Indexer.ExcludePatterns {
|
||||||
files = append(files, ":^"+expr.Pattern())
|
files = append(files, ":^"+expr.Pattern())
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,6 +89,20 @@ func TestGrepSearch(t *testing.T) {
|
||||||
},
|
},
|
||||||
}, res)
|
}, res)
|
||||||
|
|
||||||
|
res, err = GrepSearch(context.Background(), repo, "world", GrepOptions{
|
||||||
|
MatchesPerFile: 1,
|
||||||
|
Filename: "java-hello/",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []*GrepResult{
|
||||||
|
{
|
||||||
|
Filename: "java-hello/main.java",
|
||||||
|
LineNumbers: []int{1},
|
||||||
|
LineCodes: []string{"public class HelloWorld"},
|
||||||
|
HighlightedRanges: [][3]int{{0, 18, 23}},
|
||||||
|
},
|
||||||
|
}, res)
|
||||||
|
|
||||||
res, err = GrepSearch(context.Background(), repo, "no-such-content", GrepOptions{})
|
res, err = GrepSearch(context.Background(), repo, "no-such-content", GrepOptions{})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Empty(t, res)
|
assert.Empty(t, res)
|
||||||
|
|
|
@ -17,6 +17,7 @@ import (
|
||||||
"code.gitea.io/gitea/modules/charset"
|
"code.gitea.io/gitea/modules/charset"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/gitrepo"
|
"code.gitea.io/gitea/modules/gitrepo"
|
||||||
|
tokenizer_hierarchy "code.gitea.io/gitea/modules/indexer/code/bleve/tokenizer/hierarchy"
|
||||||
"code.gitea.io/gitea/modules/indexer/code/internal"
|
"code.gitea.io/gitea/modules/indexer/code/internal"
|
||||||
indexer_internal "code.gitea.io/gitea/modules/indexer/internal"
|
indexer_internal "code.gitea.io/gitea/modules/indexer/internal"
|
||||||
inner_bleve "code.gitea.io/gitea/modules/indexer/internal/bleve"
|
inner_bleve "code.gitea.io/gitea/modules/indexer/internal/bleve"
|
||||||
|
@ -56,6 +57,7 @@ func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error {
|
||||||
type RepoIndexerData struct {
|
type RepoIndexerData struct {
|
||||||
RepoID int64
|
RepoID int64
|
||||||
CommitID string
|
CommitID string
|
||||||
|
Filename string
|
||||||
Content string
|
Content string
|
||||||
Language string
|
Language string
|
||||||
UpdatedAt time.Time
|
UpdatedAt time.Time
|
||||||
|
@ -69,7 +71,8 @@ func (d *RepoIndexerData) Type() string {
|
||||||
const (
|
const (
|
||||||
repoIndexerAnalyzer = "repoIndexerAnalyzer"
|
repoIndexerAnalyzer = "repoIndexerAnalyzer"
|
||||||
repoIndexerDocType = "repoIndexerDocType"
|
repoIndexerDocType = "repoIndexerDocType"
|
||||||
repoIndexerLatestVersion = 6
|
pathHierarchyAnalyzer = "pathHierarchyAnalyzer"
|
||||||
|
repoIndexerLatestVersion = 7
|
||||||
)
|
)
|
||||||
|
|
||||||
// generateBleveIndexMapping generates a bleve index mapping for the repo indexer
|
// generateBleveIndexMapping generates a bleve index mapping for the repo indexer
|
||||||
|
@ -89,6 +92,11 @@ func generateBleveIndexMapping() (mapping.IndexMapping, error) {
|
||||||
docMapping.AddFieldMappingsAt("Language", termFieldMapping)
|
docMapping.AddFieldMappingsAt("Language", termFieldMapping)
|
||||||
docMapping.AddFieldMappingsAt("CommitID", termFieldMapping)
|
docMapping.AddFieldMappingsAt("CommitID", termFieldMapping)
|
||||||
|
|
||||||
|
pathFieldMapping := bleve.NewTextFieldMapping()
|
||||||
|
pathFieldMapping.IncludeInAll = false
|
||||||
|
pathFieldMapping.Analyzer = pathHierarchyAnalyzer
|
||||||
|
docMapping.AddFieldMappingsAt("Filename", pathFieldMapping)
|
||||||
|
|
||||||
timeFieldMapping := bleve.NewDateTimeFieldMapping()
|
timeFieldMapping := bleve.NewDateTimeFieldMapping()
|
||||||
timeFieldMapping.IncludeInAll = false
|
timeFieldMapping.IncludeInAll = false
|
||||||
docMapping.AddFieldMappingsAt("UpdatedAt", timeFieldMapping)
|
docMapping.AddFieldMappingsAt("UpdatedAt", timeFieldMapping)
|
||||||
|
@ -103,6 +111,13 @@ func generateBleveIndexMapping() (mapping.IndexMapping, error) {
|
||||||
"token_filters": []string{unicodeNormalizeName, camelcase.Name, lowercase.Name},
|
"token_filters": []string{unicodeNormalizeName, camelcase.Name, lowercase.Name},
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
} else if err := mapping.AddCustomAnalyzer(pathHierarchyAnalyzer, map[string]any{
|
||||||
|
"type": analyzer_custom.Name,
|
||||||
|
"char_filters": []string{},
|
||||||
|
"tokenizer": tokenizer_hierarchy.Name,
|
||||||
|
"token_filters": []string{unicodeNormalizeName},
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
mapping.DefaultAnalyzer = repoIndexerAnalyzer
|
mapping.DefaultAnalyzer = repoIndexerAnalyzer
|
||||||
mapping.AddDocumentMapping(repoIndexerDocType, docMapping)
|
mapping.AddDocumentMapping(repoIndexerDocType, docMapping)
|
||||||
|
@ -178,6 +193,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
||||||
return batch.Index(id, &RepoIndexerData{
|
return batch.Index(id, &RepoIndexerData{
|
||||||
RepoID: repo.ID,
|
RepoID: repo.ID,
|
||||||
CommitID: commitSha,
|
CommitID: commitSha,
|
||||||
|
Filename: update.Filename,
|
||||||
Content: string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
Content: string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
||||||
Language: analyze.GetCodeLanguage(update.Filename, fileContents),
|
Language: analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||||
UpdatedAt: time.Now().UTC(),
|
UpdatedAt: time.Now().UTC(),
|
||||||
|
@ -266,22 +282,30 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
||||||
indexerQuery = keywordQuery
|
indexerQuery = keywordQuery
|
||||||
}
|
}
|
||||||
|
|
||||||
|
opts.Filename = strings.Trim(opts.Filename, "/")
|
||||||
|
if len(opts.Filename) > 0 {
|
||||||
|
// we use a keyword analyzer for the query than path hierarchy analyzer
|
||||||
|
// to match only the exact path
|
||||||
|
// eg, a query for modules/indexer/code
|
||||||
|
// should not provide results for modules/ nor modules/indexer
|
||||||
|
indexerQuery = bleve.NewConjunctionQuery(
|
||||||
|
indexerQuery,
|
||||||
|
inner_bleve.MatchQuery(opts.Filename, "Filename", analyzer_keyword.Name, 0),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Save for reuse without language filter
|
// Save for reuse without language filter
|
||||||
facetQuery := indexerQuery
|
facetQuery := indexerQuery
|
||||||
if len(opts.Language) > 0 {
|
if len(opts.Language) > 0 {
|
||||||
languageQuery := bleve.NewMatchQuery(opts.Language)
|
|
||||||
languageQuery.FieldVal = "Language"
|
|
||||||
languageQuery.Analyzer = analyzer_keyword.Name
|
|
||||||
|
|
||||||
indexerQuery = bleve.NewConjunctionQuery(
|
indexerQuery = bleve.NewConjunctionQuery(
|
||||||
indexerQuery,
|
indexerQuery,
|
||||||
languageQuery,
|
inner_bleve.MatchQuery(opts.Language, "Language", analyzer_keyword.Name, 0),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
from, pageSize := opts.GetSkipTake()
|
from, pageSize := opts.GetSkipTake()
|
||||||
searchRequest := bleve.NewSearchRequestOptions(indexerQuery, pageSize, from, false)
|
searchRequest := bleve.NewSearchRequestOptions(indexerQuery, pageSize, from, false)
|
||||||
searchRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"}
|
searchRequest.Fields = []string{"Content", "RepoID", "Filename", "Language", "CommitID", "UpdatedAt"}
|
||||||
searchRequest.IncludeLocations = true
|
searchRequest.IncludeLocations = true
|
||||||
|
|
||||||
if len(opts.Language) == 0 {
|
if len(opts.Language) == 0 {
|
||||||
|
@ -320,7 +344,7 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
||||||
RepoID: int64(hit.Fields["RepoID"].(float64)),
|
RepoID: int64(hit.Fields["RepoID"].(float64)),
|
||||||
StartIndex: startIndex,
|
StartIndex: startIndex,
|
||||||
EndIndex: endIndex,
|
EndIndex: endIndex,
|
||||||
Filename: internal.FilenameOfIndexerID(hit.ID),
|
Filename: hit.Fields["Filename"].(string),
|
||||||
Content: hit.Fields["Content"].(string),
|
Content: hit.Fields["Content"].(string),
|
||||||
CommitID: hit.Fields["CommitID"].(string),
|
CommitID: hit.Fields["CommitID"].(string),
|
||||||
UpdatedUnix: updatedUnix,
|
UpdatedUnix: updatedUnix,
|
||||||
|
@ -333,7 +357,7 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
||||||
if len(opts.Language) > 0 {
|
if len(opts.Language) > 0 {
|
||||||
// Use separate query to go get all language counts
|
// Use separate query to go get all language counts
|
||||||
facetRequest := bleve.NewSearchRequestOptions(facetQuery, 1, 0, false)
|
facetRequest := bleve.NewSearchRequestOptions(facetQuery, 1, 0, false)
|
||||||
facetRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"}
|
facetRequest.Fields = []string{"Content", "RepoID", "Filename", "Language", "CommitID", "UpdatedAt"}
|
||||||
facetRequest.IncludeLocations = true
|
facetRequest.IncludeLocations = true
|
||||||
facetRequest.AddFacet("languages", bleve.NewFacetRequest("Language", 10))
|
facetRequest.AddFacet("languages", bleve.NewFacetRequest("Language", 10))
|
||||||
|
|
||||||
|
|
69
modules/indexer/code/bleve/tokenizer/hierarchy/hierarchy.go
Normal file
69
modules/indexer/code/bleve/tokenizer/hierarchy/hierarchy.go
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hierarchy
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
|
||||||
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
|
)
|
||||||
|
|
||||||
|
const Name = "path_hierarchy"
|
||||||
|
|
||||||
|
type PathHierarchyTokenizer struct{}
|
||||||
|
|
||||||
|
// Similar to elastic's path_hierarchy tokenizer
|
||||||
|
// This tokenizes a given path into all the possible hierarchies
|
||||||
|
// For example,
|
||||||
|
// modules/indexer/code/search.go =>
|
||||||
|
//
|
||||||
|
// modules/
|
||||||
|
// modules/indexer
|
||||||
|
// modules/indexer/code
|
||||||
|
// modules/indexer/code/search.go
|
||||||
|
func (t *PathHierarchyTokenizer) Tokenize(input []byte) analysis.TokenStream {
|
||||||
|
// trim any extra slashes
|
||||||
|
input = bytes.Trim(input, "/")
|
||||||
|
|
||||||
|
// zero allocations until the nested directories exceed a depth of 8 (which is unlikely)
|
||||||
|
rv := make(analysis.TokenStream, 0, 8)
|
||||||
|
count, off := 1, 0
|
||||||
|
|
||||||
|
// iterate till all directory seperators
|
||||||
|
for i := bytes.IndexRune(input[off:], '/'); i != -1; i = bytes.IndexRune(input[off:], '/') {
|
||||||
|
// the index is relative to input[offest...]
|
||||||
|
// add this index to the accumlated offset to get the index of the current seperator in input[0...]
|
||||||
|
off += i
|
||||||
|
rv = append(rv, &analysis.Token{
|
||||||
|
Term: input[:off], // take the slice, input[0...index of seperator]
|
||||||
|
Start: 0,
|
||||||
|
End: off,
|
||||||
|
Position: count,
|
||||||
|
Type: analysis.AlphaNumeric,
|
||||||
|
})
|
||||||
|
// increment the offset after considering the seperator
|
||||||
|
off++
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
|
||||||
|
// the entire file path should always be the last token
|
||||||
|
rv = append(rv, &analysis.Token{
|
||||||
|
Term: input,
|
||||||
|
Start: 0,
|
||||||
|
End: len(input),
|
||||||
|
Position: count,
|
||||||
|
Type: analysis.AlphaNumeric,
|
||||||
|
})
|
||||||
|
|
||||||
|
return rv
|
||||||
|
}
|
||||||
|
|
||||||
|
func TokenizerConstructor(config map[string]any, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||||
|
return &PathHierarchyTokenizer{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
registry.RegisterTokenizer(Name, TokenizerConstructor)
|
||||||
|
}
|
|
@ -0,0 +1,59 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hierarchy
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestIndexerBleveHierarchyTokenizer(t *testing.T) {
|
||||||
|
tokenizer := &PathHierarchyTokenizer{}
|
||||||
|
keywords := []struct {
|
||||||
|
Term string
|
||||||
|
Results []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
Term: "modules/indexer/code/search.go",
|
||||||
|
Results: []string{
|
||||||
|
"modules",
|
||||||
|
"modules/indexer",
|
||||||
|
"modules/indexer/code",
|
||||||
|
"modules/indexer/code/search.go",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Term: "/tmp/forgejo/",
|
||||||
|
Results: []string{
|
||||||
|
"tmp",
|
||||||
|
"tmp/forgejo",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Term: "a/b/c/d/e/f/g/h/i/j",
|
||||||
|
Results: []string{
|
||||||
|
"a",
|
||||||
|
"a/b",
|
||||||
|
"a/b/c",
|
||||||
|
"a/b/c/d",
|
||||||
|
"a/b/c/d/e",
|
||||||
|
"a/b/c/d/e/f",
|
||||||
|
"a/b/c/d/e/f/g",
|
||||||
|
"a/b/c/d/e/f/g/h",
|
||||||
|
"a/b/c/d/e/f/g/h/i",
|
||||||
|
"a/b/c/d/e/f/g/h/i/j",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, kw := range keywords {
|
||||||
|
tokens := tokenizer.Tokenize([]byte(kw.Term))
|
||||||
|
assert.Len(t, tokens, len(kw.Results))
|
||||||
|
for i, token := range tokens {
|
||||||
|
assert.Equal(t, i+1, token.Position)
|
||||||
|
assert.Equal(t, kw.Results[i], string(token.Term))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -30,7 +30,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
esRepoIndexerLatestVersion = 1
|
esRepoIndexerLatestVersion = 2
|
||||||
// multi-match-types, currently only 2 types are used
|
// multi-match-types, currently only 2 types are used
|
||||||
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
|
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
|
||||||
esMultiMatchTypeBestFields = "best_fields"
|
esMultiMatchTypeBestFields = "best_fields"
|
||||||
|
@ -57,6 +57,21 @@ func NewIndexer(url, indexerName string) *Indexer {
|
||||||
|
|
||||||
const (
|
const (
|
||||||
defaultMapping = `{
|
defaultMapping = `{
|
||||||
|
"settings": {
|
||||||
|
"analysis": {
|
||||||
|
"analyzer": {
|
||||||
|
"custom_path_tree": {
|
||||||
|
"tokenizer": "custom_hierarchy"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tokenizer": {
|
||||||
|
"custom_hierarchy": {
|
||||||
|
"type": "path_hierarchy",
|
||||||
|
"delimiter": "/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"mappings": {
|
"mappings": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"repo_id": {
|
"repo_id": {
|
||||||
|
@ -72,6 +87,15 @@ const (
|
||||||
"type": "keyword",
|
"type": "keyword",
|
||||||
"index": true
|
"index": true
|
||||||
},
|
},
|
||||||
|
"filename": {
|
||||||
|
"type": "text",
|
||||||
|
"fields": {
|
||||||
|
"tree": {
|
||||||
|
"type": "text",
|
||||||
|
"analyzer": "custom_path_tree"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"language": {
|
"language": {
|
||||||
"type": "keyword",
|
"type": "keyword",
|
||||||
"index": true
|
"index": true
|
||||||
|
@ -138,6 +162,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
||||||
"repo_id": repo.ID,
|
"repo_id": repo.ID,
|
||||||
"content": string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
"content": string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
||||||
"commit_id": sha,
|
"commit_id": sha,
|
||||||
|
"filename": update.Filename,
|
||||||
"language": analyze.GetCodeLanguage(update.Filename, fileContents),
|
"language": analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||||
"updated_at": timeutil.TimeStampNow(),
|
"updated_at": timeutil.TimeStampNow(),
|
||||||
}),
|
}),
|
||||||
|
@ -267,7 +292,6 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int)
|
||||||
panic(fmt.Sprintf("2===%#v", hit.Highlight))
|
panic(fmt.Sprintf("2===%#v", hit.Highlight))
|
||||||
}
|
}
|
||||||
|
|
||||||
repoID, fileName := internal.ParseIndexerID(hit.Id)
|
|
||||||
res := make(map[string]any)
|
res := make(map[string]any)
|
||||||
if err := json.Unmarshal(hit.Source, &res); err != nil {
|
if err := json.Unmarshal(hit.Source, &res); err != nil {
|
||||||
return 0, nil, nil, err
|
return 0, nil, nil, err
|
||||||
|
@ -276,8 +300,8 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int)
|
||||||
language := res["language"].(string)
|
language := res["language"].(string)
|
||||||
|
|
||||||
hits = append(hits, &internal.SearchResult{
|
hits = append(hits, &internal.SearchResult{
|
||||||
RepoID: repoID,
|
RepoID: int64(res["repo_id"].(float64)),
|
||||||
Filename: fileName,
|
Filename: res["filename"].(string),
|
||||||
CommitID: res["commit_id"].(string),
|
CommitID: res["commit_id"].(string),
|
||||||
Content: res["content"].(string),
|
Content: res["content"].(string),
|
||||||
UpdatedUnix: timeutil.TimeStamp(res["updated_at"].(float64)),
|
UpdatedUnix: timeutil.TimeStamp(res["updated_at"].(float64)),
|
||||||
|
@ -326,6 +350,9 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
||||||
repoQuery := elastic.NewTermsQuery("repo_id", repoStrs...)
|
repoQuery := elastic.NewTermsQuery("repo_id", repoStrs...)
|
||||||
query = query.Must(repoQuery)
|
query = query.Must(repoQuery)
|
||||||
}
|
}
|
||||||
|
if len(opts.Filename) > 0 {
|
||||||
|
query = query.Filter(elastic.NewTermsQuery("filename.tree", opts.Filename))
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
start, pageSize = opts.GetSkipTake()
|
start, pageSize = opts.GetSkipTake()
|
||||||
|
|
|
@ -38,6 +38,7 @@ func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
|
||||||
Keyword string
|
Keyword string
|
||||||
IDs []int64
|
IDs []int64
|
||||||
Langs int
|
Langs int
|
||||||
|
Filename string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
RepoIDs: nil,
|
RepoIDs: nil,
|
||||||
|
@ -51,6 +52,20 @@ func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
|
||||||
IDs: []int64{},
|
IDs: []int64{},
|
||||||
Langs: 0,
|
Langs: 0,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
RepoIDs: nil,
|
||||||
|
Keyword: "Description",
|
||||||
|
IDs: []int64{},
|
||||||
|
Langs: 0,
|
||||||
|
Filename: "NOT-README.md",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoIDs: nil,
|
||||||
|
Keyword: "Description",
|
||||||
|
IDs: []int64{repoID},
|
||||||
|
Langs: 1,
|
||||||
|
Filename: "README.md",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
RepoIDs: nil,
|
RepoIDs: nil,
|
||||||
Keyword: "Description for",
|
Keyword: "Description for",
|
||||||
|
@ -86,6 +101,7 @@ func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
|
||||||
Page: 1,
|
Page: 1,
|
||||||
PageSize: 10,
|
PageSize: 10,
|
||||||
},
|
},
|
||||||
|
Filename: kw.Filename,
|
||||||
IsKeywordFuzzy: true,
|
IsKeywordFuzzy: true,
|
||||||
})
|
})
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -24,6 +24,7 @@ type SearchOptions struct {
|
||||||
RepoIDs []int64
|
RepoIDs []int64
|
||||||
Keyword string
|
Keyword string
|
||||||
Language string
|
Language string
|
||||||
|
Filename string
|
||||||
|
|
||||||
IsKeywordFuzzy bool
|
IsKeywordFuzzy bool
|
||||||
|
|
||||||
|
|
|
@ -3,30 +3,8 @@
|
||||||
|
|
||||||
package internal
|
package internal
|
||||||
|
|
||||||
import (
|
import "code.gitea.io/gitea/modules/indexer/internal"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/indexer/internal"
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
func FilenameIndexerID(repoID int64, filename string) string {
|
func FilenameIndexerID(repoID int64, filename string) string {
|
||||||
return internal.Base36(repoID) + "_" + filename
|
return internal.Base36(repoID) + "_" + filename
|
||||||
}
|
}
|
||||||
|
|
||||||
func ParseIndexerID(indexerID string) (int64, string) {
|
|
||||||
index := strings.IndexByte(indexerID, '_')
|
|
||||||
if index == -1 {
|
|
||||||
log.Error("Unexpected ID in repo indexer: %s", indexerID)
|
|
||||||
}
|
|
||||||
repoID, _ := internal.ParseBase36(indexerID[:index])
|
|
||||||
return repoID, indexerID[index+1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
func FilenameOfIndexerID(indexerID string) string {
|
|
||||||
index := strings.IndexByte(indexerID, '_')
|
|
||||||
if index == -1 {
|
|
||||||
log.Error("Unexpected ID in repo indexer: %s", indexerID)
|
|
||||||
}
|
|
||||||
return indexerID[index+1:]
|
|
||||||
}
|
|
||||||
|
|
|
@ -35,6 +35,8 @@ type SearchResultLanguages = internal.SearchResultLanguages
|
||||||
|
|
||||||
type SearchOptions = internal.SearchOptions
|
type SearchOptions = internal.SearchOptions
|
||||||
|
|
||||||
|
var CodeSearchOptions = [2]string{"exact", "fuzzy"}
|
||||||
|
|
||||||
func indices(content string, selectionStartIndex, selectionEndIndex int) (int, int) {
|
func indices(content string, selectionStartIndex, selectionEndIndex int) (int, int) {
|
||||||
startIndex := selectionStartIndex
|
startIndex := selectionStartIndex
|
||||||
numLinesBefore := 0
|
numLinesBefore := 0
|
||||||
|
|
|
@ -35,6 +35,7 @@ func Code(ctx *context.Context) {
|
||||||
|
|
||||||
language := ctx.FormTrim("l")
|
language := ctx.FormTrim("l")
|
||||||
keyword := ctx.FormTrim("q")
|
keyword := ctx.FormTrim("q")
|
||||||
|
path := ctx.FormTrim("path")
|
||||||
|
|
||||||
isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
|
isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
|
||||||
if mode := ctx.FormTrim("mode"); len(mode) > 0 {
|
if mode := ctx.FormTrim("mode"); len(mode) > 0 {
|
||||||
|
@ -91,6 +92,7 @@ func Code(ctx *context.Context) {
|
||||||
Keyword: keyword,
|
Keyword: keyword,
|
||||||
IsKeywordFuzzy: isFuzzy,
|
IsKeywordFuzzy: isFuzzy,
|
||||||
Language: language,
|
Language: language,
|
||||||
|
Filename: path,
|
||||||
Paginator: &db.ListOptions{
|
Paginator: &db.ListOptions{
|
||||||
Page: page,
|
Page: page,
|
||||||
PageSize: setting.UI.RepoSearchPagingNum,
|
PageSize: setting.UI.RepoSearchPagingNum,
|
||||||
|
|
|
@ -54,6 +54,7 @@ func Search(ctx *context.Context) {
|
||||||
language := ctx.FormTrim("l")
|
language := ctx.FormTrim("l")
|
||||||
keyword := ctx.FormTrim("q")
|
keyword := ctx.FormTrim("q")
|
||||||
|
|
||||||
|
path := ctx.FormTrim("path")
|
||||||
mode := ExactSearchMode
|
mode := ExactSearchMode
|
||||||
if modeStr := ctx.FormString("mode"); len(modeStr) > 0 {
|
if modeStr := ctx.FormString("mode"); len(modeStr) > 0 {
|
||||||
mode = searchModeFromString(modeStr)
|
mode = searchModeFromString(modeStr)
|
||||||
|
@ -63,6 +64,7 @@ func Search(ctx *context.Context) {
|
||||||
|
|
||||||
ctx.Data["Keyword"] = keyword
|
ctx.Data["Keyword"] = keyword
|
||||||
ctx.Data["Language"] = language
|
ctx.Data["Language"] = language
|
||||||
|
ctx.Data["CodeSearchPath"] = path
|
||||||
ctx.Data["CodeSearchMode"] = mode.String()
|
ctx.Data["CodeSearchMode"] = mode.String()
|
||||||
ctx.Data["PageIsViewCode"] = true
|
ctx.Data["PageIsViewCode"] = true
|
||||||
|
|
||||||
|
@ -86,6 +88,7 @@ func Search(ctx *context.Context) {
|
||||||
Keyword: keyword,
|
Keyword: keyword,
|
||||||
IsKeywordFuzzy: mode == FuzzySearchMode,
|
IsKeywordFuzzy: mode == FuzzySearchMode,
|
||||||
Language: language,
|
Language: language,
|
||||||
|
Filename: path,
|
||||||
Paginator: &db.ListOptions{
|
Paginator: &db.ListOptions{
|
||||||
Page: page,
|
Page: page,
|
||||||
PageSize: setting.UI.RepoSearchPagingNum,
|
PageSize: setting.UI.RepoSearchPagingNum,
|
||||||
|
@ -100,11 +103,12 @@ func Search(ctx *context.Context) {
|
||||||
} else {
|
} else {
|
||||||
ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
|
ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
|
||||||
}
|
}
|
||||||
ctx.Data["CodeSearchOptions"] = []string{"exact", "fuzzy"}
|
ctx.Data["CodeSearchOptions"] = code_indexer.CodeSearchOptions
|
||||||
} else {
|
} else {
|
||||||
grepOpt := git.GrepOptions{
|
grepOpt := git.GrepOptions{
|
||||||
ContextLineNumber: 1,
|
ContextLineNumber: 1,
|
||||||
RefName: ctx.Repo.RefName,
|
RefName: ctx.Repo.RefName,
|
||||||
|
Filename: path,
|
||||||
}
|
}
|
||||||
switch mode {
|
switch mode {
|
||||||
case FuzzySearchMode:
|
case FuzzySearchMode:
|
||||||
|
@ -130,10 +134,12 @@ func Search(ctx *context.Context) {
|
||||||
// UpdatedUnix: not supported yet
|
// UpdatedUnix: not supported yet
|
||||||
// Language: not supported yet
|
// Language: not supported yet
|
||||||
// Color: not supported yet
|
// Color: not supported yet
|
||||||
Lines: code_indexer.HighlightSearchResultCode(r.Filename, r.LineNumbers, r.HighlightedRanges, strings.Join(r.LineCodes, "\n")),
|
Lines: code_indexer.HighlightSearchResultCode(
|
||||||
|
r.Filename, r.LineNumbers, r.HighlightedRanges,
|
||||||
|
strings.Join(r.LineCodes, "\n")),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
ctx.Data["CodeSearchOptions"] = []string{"exact", "union", "regexp"}
|
ctx.Data["CodeSearchOptions"] = git.GrepSearchOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.Data["CodeIndexerDisabled"] = !setting.Indexer.RepoIndexerEnabled
|
ctx.Data["CodeIndexerDisabled"] = !setting.Indexer.RepoIndexerEnabled
|
||||||
|
|
|
@ -39,6 +39,7 @@ import (
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/gitrepo"
|
"code.gitea.io/gitea/modules/gitrepo"
|
||||||
"code.gitea.io/gitea/modules/highlight"
|
"code.gitea.io/gitea/modules/highlight"
|
||||||
|
code_indexer "code.gitea.io/gitea/modules/indexer/code"
|
||||||
"code.gitea.io/gitea/modules/lfs"
|
"code.gitea.io/gitea/modules/lfs"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/markup"
|
"code.gitea.io/gitea/modules/markup"
|
||||||
|
@ -1152,6 +1153,12 @@ PostRecentBranchCheck:
|
||||||
ctx.Data["TreeNames"] = treeNames
|
ctx.Data["TreeNames"] = treeNames
|
||||||
ctx.Data["BranchLink"] = branchLink
|
ctx.Data["BranchLink"] = branchLink
|
||||||
ctx.Data["CodeIndexerDisabled"] = !setting.Indexer.RepoIndexerEnabled
|
ctx.Data["CodeIndexerDisabled"] = !setting.Indexer.RepoIndexerEnabled
|
||||||
|
if setting.Indexer.RepoIndexerEnabled {
|
||||||
|
ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
|
||||||
|
ctx.Data["CodeSearchOptions"] = code_indexer.CodeSearchOptions
|
||||||
|
} else {
|
||||||
|
ctx.Data["CodeSearchOptions"] = git.GrepSearchOptions
|
||||||
|
}
|
||||||
ctx.HTML(http.StatusOK, tplRepoHome)
|
ctx.HTML(http.StatusOK, tplRepoHome)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ func CodeSearch(ctx *context.Context) {
|
||||||
|
|
||||||
language := ctx.FormTrim("l")
|
language := ctx.FormTrim("l")
|
||||||
keyword := ctx.FormTrim("q")
|
keyword := ctx.FormTrim("q")
|
||||||
|
path := ctx.FormTrim("path")
|
||||||
|
|
||||||
isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
|
isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
|
||||||
if mode := ctx.FormTrim("mode"); len(mode) > 0 {
|
if mode := ctx.FormTrim("mode"); len(mode) > 0 {
|
||||||
|
@ -88,6 +89,7 @@ func CodeSearch(ctx *context.Context) {
|
||||||
Keyword: keyword,
|
Keyword: keyword,
|
||||||
IsKeywordFuzzy: isFuzzy,
|
IsKeywordFuzzy: isFuzzy,
|
||||||
Language: language,
|
Language: language,
|
||||||
|
Filename: path,
|
||||||
Paginator: &db.ListOptions{
|
Paginator: &db.ListOptions{
|
||||||
Page: page,
|
Page: page,
|
||||||
PageSize: setting.UI.RepoSearchPagingNum,
|
PageSize: setting.UI.RepoSearchPagingNum,
|
||||||
|
|
|
@ -11,12 +11,6 @@
|
||||||
{{if $description}}<span class="description">{{$description | RenderCodeBlock}}</span>{{else}}<span class="no-description text-italic">{{ctx.Locale.Tr "repo.no_desc"}}</span>{{end}}
|
{{if $description}}<span class="description">{{$description | RenderCodeBlock}}</span>{{else}}<span class="no-description text-italic">{{ctx.Locale.Tr "repo.no_desc"}}</span>{{end}}
|
||||||
{{if .Repository.Website}}<a class="link" href="{{.Repository.Website}}">{{.Repository.Website}}</a>{{end}}
|
{{if .Repository.Website}}<a class="link" href="{{.Repository.Website}}">{{.Repository.Website}}</a>{{end}}
|
||||||
</div>
|
</div>
|
||||||
<form class="ignore-dirty" action="{{.RepoLink}}/search/{{if .CodeIndexerDisabled}}{{.BranchNameSubURL}}{{end}}" method="get" data-test-tag="codesearch">
|
|
||||||
<div class="ui small action input">
|
|
||||||
<input name="q" value="{{.Keyword}}" placeholder="{{ctx.Locale.Tr "search.code_kind"}}">
|
|
||||||
{{template "shared/search/button"}}
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="tw-flex tw-items-center tw-flex-wrap tw-gap-2 tw-my-2" id="repo-topics">
|
<div class="tw-flex tw-items-center tw-flex-wrap tw-gap-2 tw-my-2" id="repo-topics">
|
||||||
{{/* it should match the code in issue-home.js */}}
|
{{/* it should match the code in issue-home.js */}}
|
||||||
|
@ -158,6 +152,22 @@
|
||||||
{{else if .IsBlame}}
|
{{else if .IsBlame}}
|
||||||
{{template "repo/blame" .}}
|
{{template "repo/blame" .}}
|
||||||
{{else}}{{/* IsViewDirectory */}}
|
{{else}}{{/* IsViewDirectory */}}
|
||||||
|
{{/* display the search bar only if */}}
|
||||||
|
{{$isCommit := StringUtils.HasPrefix .BranchNameSubURL "commit"}}
|
||||||
|
{{if and (not $isCommit) (or .CodeIndexerDisabled (and (not .TagName) (eq .Repository.DefaultBranch .BranchName)))}}
|
||||||
|
<div class="code-search tw-w-full tw-py-2 tw-px-2 tw-bg-box-header tw-rounded-t tw-border tw-border-secondary tw-border-b-0">
|
||||||
|
<form class="ui form ignore-dirty" action="{{.RepoLink}}/search/{{if .CodeIndexerDisabled}}{{.BranchNameSubURL}}{{end}}" method="get" data-test-tag="codesearch">
|
||||||
|
<input type="hidden" name="path" value="{{.TreePath | PathEscapeSegments}}">
|
||||||
|
{{template "shared/search/combo_multi"
|
||||||
|
dict
|
||||||
|
"Value" .Keyword
|
||||||
|
"Disabled" .CodeIndexerUnavailable
|
||||||
|
"Placeholder" (ctx.Locale.Tr "search.code_kind")
|
||||||
|
"Selected" (index .CodeSearchOptions 0)
|
||||||
|
"Options" .CodeSearchOptions}}
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
{{end}}
|
||||||
{{template "repo/view_list" .}}
|
{{template "repo/view_list" .}}
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
<div class="flex-text-block tw-flex-wrap">
|
<div class="flex-text-block tw-flex-wrap">
|
||||||
{{range $term := .SearchResultLanguages}}
|
{{range $term := .SearchResultLanguages}}
|
||||||
<a class="ui {{if eq $.Language $term.Language}}primary{{end}} basic label tw-m-0"
|
<a class="ui {{if eq $.Language $term.Language}}primary{{end}} basic label tw-m-0"
|
||||||
href="?q={{$.Keyword}}{{if ne $.Language $term.Language}}&l={{$term.Language}}{{end}}&mode={{$.CodeSearchMode}}">
|
href="?q={{$.Keyword}}{{if ne $.Language $term.Language}}&l={{$term.Language}}{{end}}&mode={{$.CodeSearchMode}}&path={{$.CodeSearchPath}}">
|
||||||
<i class="color-icon tw-mr-2" style="background-color: {{$term.Color}}"></i>
|
<i class="color-icon tw-mr-2" style="background-color: {{$term.Color}}"></i>
|
||||||
{{$term.Language}}
|
{{$term.Language}}
|
||||||
<div class="detail">{{$term.Count}}</div>
|
<div class="detail">{{$term.Count}}</div>
|
||||||
|
@ -13,7 +13,7 @@
|
||||||
{{$repo := or $.Repo (index $.RepoMaps .RepoID)}}
|
{{$repo := or $.Repo (index $.RepoMaps .RepoID)}}
|
||||||
<details class="tw-group diff-file-box diff-box file-content non-diff-file-content repo-search-result" open>
|
<details class="tw-group diff-file-box diff-box file-content non-diff-file-content repo-search-result" open>
|
||||||
<summary class="tw-list-none">
|
<summary class="tw-list-none">
|
||||||
<h4 class="ui top attached header tw-font-normal tw-flex tw-flex-wrap tw-transform-reset">
|
<h4 class="ui top attached header tw-font-normal tw-flex tw-items-center tw-flex-wrap tw-transform-reset">
|
||||||
<span class="tw-h-4 tw-transition -tw-rotate-90 group-open:tw-rotate-0">
|
<span class="tw-h-4 tw-transition -tw-rotate-90 group-open:tw-rotate-0">
|
||||||
{{svg "octicon-chevron-down"}}
|
{{svg "octicon-chevron-down"}}
|
||||||
</span>
|
</span>
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
<form class="ui form ignore-dirty">
|
<form class="ui form ignore-dirty">
|
||||||
|
<input type="hidden" name="path" value="{{.CodeSearchPath}}">
|
||||||
{{template "shared/search/combo_multi"
|
{{template "shared/search/combo_multi"
|
||||||
dict
|
dict
|
||||||
"Value" .Keyword
|
"Value" .Keyword
|
||||||
|
@ -14,6 +15,23 @@
|
||||||
<p>{{ctx.Locale.Tr "search.code_search_unavailable"}}</p>
|
<p>{{ctx.Locale.Tr "search.code_search_unavailable"}}</p>
|
||||||
</div>
|
</div>
|
||||||
{{else}}
|
{{else}}
|
||||||
|
{{if .CodeSearchPath}}
|
||||||
|
<div class="tw-mb-4">
|
||||||
|
<span class="breadcrumb">
|
||||||
|
<a class="section" href="?q={{.Keyword}}&mode={{.CodeSearchMode}}">@</a>
|
||||||
|
{{$href := ""}}
|
||||||
|
{{- range $i, $path := StringUtils.Split .CodeSearchPath "/" -}}
|
||||||
|
{{if eq $i 0}}
|
||||||
|
{{$href = $path}}
|
||||||
|
{{else}}
|
||||||
|
{{$href = StringUtils.Join (StringUtils.Make $href $path) "/"}}
|
||||||
|
{{end}}
|
||||||
|
<span class="breadcrumb-divider">/</span>
|
||||||
|
<span class="section"><a href="?q={{$.Keyword}}&mode={{$.CodeSearchMode}}&path={{$href}}">{{$path}}</a></span>
|
||||||
|
{{- end -}}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{{end}}
|
||||||
{{if .CodeIndexerDisabled}}
|
{{if .CodeIndexerDisabled}}
|
||||||
<div class="ui message" data-test-tag="grep">
|
<div class="ui message" data-test-tag="grep">
|
||||||
<p>{{ctx.Locale.Tr "search.code_search_by_git_grep"}}</p>
|
<p>{{ctx.Locale.Tr "search.code_search_by_git_grep"}}</p>
|
||||||
|
|
|
@ -1009,16 +1009,29 @@ func TestRepoCodeSearchForm(t *testing.T) {
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
action, exists := htmlDoc.doc.Find("form[data-test-tag=codesearch]").Attr("action")
|
formEl := htmlDoc.doc.Find("form[data-test-tag=codesearch]")
|
||||||
|
|
||||||
|
action, exists := formEl.Attr("action")
|
||||||
assert.True(t, exists)
|
assert.True(t, exists)
|
||||||
|
|
||||||
branchSubURL := "/branch/master"
|
branchSubURL := "/branch/master"
|
||||||
|
|
||||||
if indexer {
|
if indexer {
|
||||||
assert.NotContains(t, action, branchSubURL)
|
assert.NotContains(t, action, branchSubURL)
|
||||||
} else {
|
} else {
|
||||||
assert.Contains(t, action, branchSubURL)
|
assert.Contains(t, action, branchSubURL)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
filepath, exists := formEl.Find("input[name=path]").Attr("value")
|
||||||
|
assert.True(t, exists)
|
||||||
|
assert.Empty(t, filepath)
|
||||||
|
|
||||||
|
req = NewRequest(t, "GET", "/user2/glob/src/branch/master/x/y")
|
||||||
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
filepath, exists = NewHTMLParser(t, resp.Body).doc.
|
||||||
|
Find("form[data-test-tag=codesearch] input[name=path]").
|
||||||
|
Attr("value")
|
||||||
|
assert.True(t, exists)
|
||||||
|
assert.Equal(t, "x/y", filepath)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Run("indexer disabled", func(t *testing.T) {
|
t.Run("indexer disabled", func(t *testing.T) {
|
||||||
|
|
|
@ -389,6 +389,11 @@ td .commit-summary {
|
||||||
cursor: default;
|
cursor: default;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.code-search + #repo-files-table {
|
||||||
|
border-top-left-radius: 0;
|
||||||
|
border-top-right-radius: 0;
|
||||||
|
}
|
||||||
|
|
||||||
.view-raw {
|
.view-raw {
|
||||||
display: flex;
|
display: flex;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
|
|
Loading…
Reference in a new issue