Add support for indexing arch files

This commit is contained in:
dragon 2024-12-06 14:51:49 +08:00
parent c287a15996
commit 37a300b105
5 changed files with 96 additions and 29 deletions

View file

@ -4,6 +4,7 @@
package arch package arch
import ( import (
"archive/tar"
"bufio" "bufio"
"bytes" "bytes"
"encoding/hex" "encoding/hex"
@ -26,6 +27,8 @@ import (
const ( const (
PropertyDescription = "arch.description" PropertyDescription = "arch.description"
PropertyFiles = "arch.files"
PropertyArch = "arch.architecture" PropertyArch = "arch.architecture"
PropertyDistribution = "arch.distribution" PropertyDistribution = "arch.distribution"
@ -85,6 +88,8 @@ type FileMetadata struct {
Packager string `json:"packager"` Packager string `json:"packager"`
Arch string `json:"arch"` Arch string `json:"arch"`
PgpSigned string `json:"pgp"` PgpSigned string `json:"pgp"`
Files []string `json:"files,omitempty"`
} }
// ParsePackage Function that receives arch package archive data and returns it's metadata. // ParsePackage Function that receives arch package archive data and returns it's metadata.
@ -127,6 +132,8 @@ func ParsePackage(r *packages.HashedBuffer) (*Package, error) {
var pkg *Package var pkg *Package
var mTree bool var mTree bool
files := make([]string, 0)
for { for {
f, err := tarball.Read() f, err := tarball.Read()
if err == io.EOF { if err == io.EOF {
@ -135,6 +142,11 @@ func ParsePackage(r *packages.HashedBuffer) (*Package, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
// ref:https://gitlab.archlinux.org/pacman/pacman/-/blob/91546004903eea5d5267d59898a6029ba1d64031/lib/libalpm/add.c#L529-L533
if !strings.HasPrefix(f.Name(), ".") {
files = append(files, (f.Header.(*tar.Header)).Name)
}
switch f.Name() { switch f.Name() {
case ".PKGINFO": case ".PKGINFO":
pkg, err = ParsePackageInfo(tarballType, f) pkg, err = ParsePackageInfo(tarballType, f)
@ -155,7 +167,7 @@ func ParsePackage(r *packages.HashedBuffer) (*Package, error) {
if !mTree { if !mTree {
return nil, util.NewInvalidArgumentErrorf(".MTREE file not found") return nil, util.NewInvalidArgumentErrorf(".MTREE file not found")
} }
pkg.FileMetadata.Files = files
pkg.FileMetadata.CompressedSize = r.Size() pkg.FileMetadata.CompressedSize = r.Size()
pkg.FileMetadata.MD5 = hex.EncodeToString(md5) pkg.FileMetadata.MD5 = hex.EncodeToString(md5)
pkg.FileMetadata.SHA256 = hex.EncodeToString(sha256) pkg.FileMetadata.SHA256 = hex.EncodeToString(sha256)
@ -339,3 +351,12 @@ func (p *Package) Desc() string {
} }
return buf.String() return buf.String()
} }
func (p *Package) Files() string {
var buf bytes.Buffer
buf.WriteString("%FILES%\n")
for _, item := range p.FileMetadata.Files {
_, _ = fmt.Fprintf(&buf, "%s\n", item)
}
return buf.String()
}

View file

@ -344,8 +344,8 @@ func TestValidatePackageSpec(t *testing.T) {
}) })
} }
func TestDescString(t *testing.T) { func TestDescAndFileString(t *testing.T) {
const pkgdesc = `%FILENAME% const pkgDesc = `%FILENAME%
zstd-1.5.5-1-x86_64.pkg.tar.zst zstd-1.5.5-1-x86_64.pkg.tar.zst
%NAME% %NAME%
@ -415,6 +415,12 @@ ninja
dummy5 dummy5
dummy6 dummy6
`
const pkgFiles = `%FILES%
usr/
usr/bin/
usr/bin/zstd
` `
md := &Package{ md := &Package{
@ -441,7 +447,9 @@ dummy6
BuildDate: 1681646714, BuildDate: 1681646714,
Packager: "Jelle van der Waa <jelle@archlinux.org>", Packager: "Jelle van der Waa <jelle@archlinux.org>",
Arch: "x86_64", Arch: "x86_64",
Files: []string{"usr/", "usr/bin/", "usr/bin/zstd"},
}, },
} }
require.Equal(t, pkgdesc, md.Desc()) require.Equal(t, pkgDesc, md.Desc())
require.Equal(t, pkgFiles, md.Files())
} }

View file

@ -26,7 +26,7 @@ import (
var ( var (
archPkgOrSig = regexp.MustCompile(`^.*\.pkg\.tar\.\w+(\.sig)*$`) archPkgOrSig = regexp.MustCompile(`^.*\.pkg\.tar\.\w+(\.sig)*$`)
archDBOrSig = regexp.MustCompile(`^.*.db(\.tar\.gz)*(\.sig)*$`) archDBOrSig = regexp.MustCompile(`^.*.(db|files)(\.tar\.gz)*(\.sig)*$`)
locker = sync.NewExclusivePool() locker = sync.NewExclusivePool()
) )
@ -115,6 +115,7 @@ func PushPackage(ctx *context.Context) {
properties := map[string]string{ properties := map[string]string{
arch_module.PropertyDescription: p.Desc(), arch_module.PropertyDescription: p.Desc(),
arch_module.PropertyFiles: p.Files(),
arch_module.PropertyArch: p.FileMetadata.Arch, arch_module.PropertyArch: p.FileMetadata.Arch,
arch_module.PropertyDistribution: group, arch_module.PropertyDistribution: group,
} }

View file

@ -225,24 +225,46 @@ func createDB(ctx context.Context, ownerID int64, group, arch string) (*packages
if err != nil { if err != nil {
return nil, err return nil, err
} }
if len(pps) >= 1 { if len(pps) == 0 {
meta := []byte(pps[0].Value) continue
}
pkgDesc := []byte(pps[0].Value)
header := &tar.Header{ header := &tar.Header{
Name: pkg.Name + "-" + ver.Version + "/desc", Name: pkg.Name + "-" + ver.Version + "/desc",
Size: int64(len(meta)), Size: int64(len(pkgDesc)),
Mode: int64(os.ModePerm), Mode: int64(os.ModePerm),
} }
if err = tw.WriteHeader(header); err != nil { if err = tw.WriteHeader(header); err != nil {
return nil, err return nil, err
} }
if _, err := tw.Write(meta); err != nil { if _, err := tw.Write(pkgDesc); err != nil {
return nil, err return nil, err
} }
pfs, err := packages_model.GetPropertiesByName(
ctx, packages_model.PropertyTypeFile, pf.ID, arch_module.PropertyFiles,
)
if err != nil {
return nil, err
}
if len(pfs) >= 1 {
pkgFiles := []byte(pfs[0].Value)
header := &tar.Header{
Name: pkg.Name + "-" + ver.Version + "/files",
Size: int64(len(pkgFiles)),
Mode: int64(os.ModePerm),
}
if err = tw.WriteHeader(header); err != nil {
return nil, err
}
if _, err := tw.Write(pkgFiles); err != nil {
return nil, err
}
}
count++ count++
break break
} }
} }
}
if count == 0 { if count == 0 {
return nil, io.EOF return nil, io.EOF
} }

View file

@ -223,8 +223,14 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
t.Run(fmt.Sprintf("RepositoryDB[%s]", group), func(t *testing.T) { t.Run(fmt.Sprintf("RepositoryDB[%s]", group), func(t *testing.T) {
defer tests.PrintCurrentTest(t)() defer tests.PrintCurrentTest(t)()
req := NewRequest(t, "GET", rootURL+"/repository.key") req := NewRequest(t, "GET", groupURL+"/x86_64/base.db.tar.gz")
respPub := MakeRequest(t, req, http.StatusOK) MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", groupURL+"/x86_64/base.files")
MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", groupURL+"/x86_64/base.files.tar.gz")
MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", groupURL+"/x86_64/base.db") req = NewRequest(t, "GET", groupURL+"/x86_64/base.db")
respPkg := MakeRequest(t, req, http.StatusOK) respPkg := MakeRequest(t, req, http.StatusOK)
@ -232,13 +238,17 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
req = NewRequest(t, "GET", groupURL+"/x86_64/base.db.sig") req = NewRequest(t, "GET", groupURL+"/x86_64/base.db.sig")
respSig := MakeRequest(t, req, http.StatusOK) respSig := MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", rootURL+"/repository.key")
respPub := MakeRequest(t, req, http.StatusOK)
if err := gpgVerify(respPub.Body.Bytes(), respSig.Body.Bytes(), respPkg.Body.Bytes()); err != nil { if err := gpgVerify(respPub.Body.Bytes(), respSig.Body.Bytes(), respPkg.Body.Bytes()); err != nil {
t.Fatal(err) t.Fatal(err)
} }
files, err := listTarGzFiles(respPkg.Body.Bytes()) files, err := listTarGzFiles(respPkg.Body.Bytes())
require.NoError(t, err) require.NoError(t, err)
require.Len(t, files, 1) require.Len(t, files, 2)
for s, d := range files { for s, d := range files {
if strings.HasSuffix(s, "/desc") {
name := getProperty(string(d.Data), "NAME") name := getProperty(string(d.Data), "NAME")
ver := getProperty(string(d.Data), "VERSION") ver := getProperty(string(d.Data), "VERSION")
require.Equal(t, name+"-"+ver+"/desc", s) require.Equal(t, name+"-"+ver+"/desc", s)
@ -249,6 +259,11 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
decodeString, err := base64.StdEncoding.DecodeString(pgp) decodeString, err := base64.StdEncoding.DecodeString(pgp)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, respSig.Body.Bytes(), decodeString) require.Equal(t, respSig.Body.Bytes(), decodeString)
} else if strings.HasSuffix(s, "/files") {
require.True(t, strings.HasPrefix(string(d.Data), "%FILES%"))
} else {
require.Failf(t, "unknown item", "fileName:%s", s)
}
} }
}) })
@ -275,7 +290,7 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
respPkg := MakeRequest(t, req, http.StatusOK) respPkg := MakeRequest(t, req, http.StatusOK)
files, err := listTarGzFiles(respPkg.Body.Bytes()) files, err := listTarGzFiles(respPkg.Body.Bytes())
require.NoError(t, err) require.NoError(t, err)
require.Len(t, files, 1) require.Len(t, files, 2)
req = NewRequestWithBody(t, "DELETE", groupURL+"/test2/1.0.0-1/any", nil). req = NewRequestWithBody(t, "DELETE", groupURL+"/test2/1.0.0-1/any", nil).
AddBasicAuth(user.Name) AddBasicAuth(user.Name)
@ -347,7 +362,7 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
files, err := listTarGzFiles(respPkg.Body.Bytes()) files, err := listTarGzFiles(respPkg.Body.Bytes())
require.NoError(t, err) require.NoError(t, err)
require.Len(t, files, 1) require.Len(t, files, 2)
req = NewRequestWithBody(t, "PUT", rootURL, bytes.NewReader(pkgs["otherXZ"])). req = NewRequestWithBody(t, "PUT", rootURL, bytes.NewReader(pkgs["otherXZ"])).
AddBasicAuth(user.Name) AddBasicAuth(user.Name)
@ -358,7 +373,7 @@ HMhNSS1IzUsBcpJAPFAwwUXSM0u4BjoaR8EoGAWjgGQAAILFeyQADAAA
files, err = listTarGzFiles(respPkg.Body.Bytes()) files, err = listTarGzFiles(respPkg.Body.Bytes())
require.NoError(t, err) require.NoError(t, err)
require.Len(t, files, 2) require.Len(t, files, 4)
}) })
} }