diff --git a/go.mod b/go.mod
index 7b7a51efbb..7d783a7524 100644
--- a/go.mod
+++ b/go.mod
@@ -15,7 +15,9 @@ require (
gitea.com/lunny/levelqueue v0.4.2-0.20220729054728-f020868cc2f7
github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
+ github.com/DataDog/zstd v1.4.5
github.com/NYTimes/gziphandler v1.1.1
+ github.com/ProtonMail/gopenpgp/v2 v2.7.1
github.com/PuerkitoBio/goquery v1.8.1
github.com/alecthomas/chroma/v2 v2.7.0
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb
@@ -131,12 +133,12 @@ require (
git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 // indirect
github.com/ClickHouse/ch-go v0.55.0 // indirect
github.com/ClickHouse/clickhouse-go/v2 v2.9.1 // indirect
- github.com/DataDog/zstd v1.4.5 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.2.0 // indirect
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/ProtonMail/go-crypto v0.0.0-20230528122434-6f98819771a1 // indirect
+ github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect
github.com/RoaringBitmap/roaring v1.2.3 // indirect
github.com/acomagu/bufpipe v1.0.4 // indirect
github.com/andybalholm/brotli v1.0.5 // indirect
diff --git a/go.sum b/go.sum
index c3ac719f3f..686b5505c3 100644
--- a/go.sum
+++ b/go.sum
@@ -105,8 +105,13 @@ github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cq
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8/go.mod h1:I0gYDMZ6Z5GRU7l58bNFSkPTFN6Yl12dsUlAZ8xy98g=
+github.com/ProtonMail/go-crypto v0.0.0-20230321155629-9a39f2531310/go.mod h1:8TI4H3IbrackdNgv+92dI+rhpCaLqM0IfpgCgenFvRE=
github.com/ProtonMail/go-crypto v0.0.0-20230528122434-6f98819771a1 h1:JMDGhoQvXNTqH6Y3MC0IUw6tcZvaUdujNqzK2HYWZc8=
github.com/ProtonMail/go-crypto v0.0.0-20230528122434-6f98819771a1/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
+github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k=
+github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw=
+github.com/ProtonMail/gopenpgp/v2 v2.7.1 h1:Awsg7MPc2gD3I7IFac2qE3Gdls0lZW8SzrFZ3k1oz0s=
+github.com/ProtonMail/gopenpgp/v2 v2.7.1/go.mod h1:/BU5gfAVwqyd8EfC3Eu7zmuhwYQpKs+cGD8M//iiaxs=
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
diff --git a/models/packages/descriptor.go b/models/packages/descriptor.go
index ee35ffe0f2..4a03006de4 100644
--- a/models/packages/descriptor.go
+++ b/models/packages/descriptor.go
@@ -13,6 +13,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/packages/alpine"
+ "code.gitea.io/gitea/modules/packages/arch"
"code.gitea.io/gitea/modules/packages/cargo"
"code.gitea.io/gitea/modules/packages/chef"
"code.gitea.io/gitea/modules/packages/composer"
@@ -140,6 +141,8 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc
switch p.Type {
case TypeAlpine:
metadata = &alpine.VersionMetadata{}
+ case TypeArch:
+ metadata = &arch.Metadata{}
case TypeCargo:
metadata = &cargo.Metadata{}
case TypeChef:
diff --git a/models/packages/package.go b/models/packages/package.go
index 380a076f9d..99c478fa79 100644
--- a/models/packages/package.go
+++ b/models/packages/package.go
@@ -31,6 +31,7 @@ type Type string
// List of supported packages
const (
TypeAlpine Type = "alpine"
+ TypeArch Type = "arch"
TypeCargo Type = "cargo"
TypeChef Type = "chef"
TypeComposer Type = "composer"
@@ -55,6 +56,7 @@ const (
var TypeList = []Type{
TypeAlpine,
+ TypeArch,
TypeCargo,
TypeChef,
TypeComposer,
@@ -82,6 +84,8 @@ func (pt Type) Name() string {
switch pt {
case TypeAlpine:
return "Alpine"
+ case TypeArch:
+ return "Arch"
case TypeCargo:
return "Cargo"
case TypeChef:
@@ -131,6 +135,8 @@ func (pt Type) SVGName() string {
switch pt {
case TypeAlpine:
return "gitea-alpine"
+ case TypeArch:
+ return "gitea-arch"
case TypeCargo:
return "gitea-cargo"
case TypeChef:
diff --git a/modules/packages/arch/metadata.go b/modules/packages/arch/metadata.go
new file mode 100644
index 0000000000..5433138a67
--- /dev/null
+++ b/modules/packages/arch/metadata.go
@@ -0,0 +1,255 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package arch
+
+import (
+ "crypto/md5"
+ "crypto/sha256"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io/fs"
+ "os"
+ "path"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/DataDog/zstd"
+ "github.com/mholt/archiver/v3"
+)
+
+// Metadata for arch package.
+type Metadata struct {
+ Filename string
+ Name string
+ Base string
+ Version string
+ Description string
+ CompressedSize int64
+ CompressedSizeMib string
+ InstalledSize int64
+ InstalledSizeMib string
+ MD5 string
+ SHA256 string
+ URL string
+ BuildDate int64
+ BuildDateStr string
+ BaseDomain string
+ Packager string
+ Provides []string
+ License []string
+ Arch []string
+ Depends []string
+ OptDepends []string
+ MakeDepends []string
+ CheckDepends []string
+ Backup []string
+}
+
+// Function that recieves arch package archive data and returns it's metadata.
+func EjectMetadata(filename, domain string, pkg []byte) (*Metadata, error) {
+ d, err := zstd.Decompress(nil, pkg)
+ if err != nil {
+ return nil, err
+ }
+ splt := strings.Split(string(d), "PKGINFO")
+ if len(splt) < 2 {
+ return nil, errors.New("unable to eject .PKGINFO from archive")
+ }
+ raw := splt[1][0:10000]
+ inssize := int64(len(pkg))
+ compsize := ejectInt64(raw, "size")
+ unixbuilddate := ejectInt64(raw, "builddate")
+ return &Metadata{
+ Filename: filename,
+ Name: ejectString(raw, "pkgname"),
+ Base: ejectString(raw, "pkgbase"),
+ Version: ejectString(raw, "pkgver"),
+ Description: ejectString(raw, "pkgdesc"),
+ CompressedSize: inssize,
+ CompressedSizeMib: ByteCountSI(inssize),
+ InstalledSize: compsize,
+ InstalledSizeMib: ByteCountSI(compsize),
+ MD5: md5sum(pkg),
+ SHA256: sha256sum(pkg),
+ URL: ejectString(raw, "url"),
+ BuildDate: unixbuilddate,
+ BuildDateStr: ReadableTime(unixbuilddate),
+ BaseDomain: domain,
+ Packager: ejectString(raw, "packager"),
+ Provides: ejectStrings(raw, "provides"),
+ License: ejectStrings(raw, "license"),
+ Arch: ejectStrings(raw, "arch"),
+ Depends: ejectStrings(raw, "depend"),
+ OptDepends: ejectStrings(raw, "optdepend"),
+ MakeDepends: ejectStrings(raw, "makedepend"),
+ CheckDepends: ejectStrings(raw, "checkdepend"),
+ Backup: ejectStrings(raw, "backup"),
+ }, err
+}
+
+func ejectString(raw, field string) string {
+ splitted := strings.Split(raw, "\n"+field+" = ")
+ if len(splitted) < 2 {
+ return ``
+ }
+ return strings.Split(splitted[1], "\n")[0]
+}
+
+func ejectStrings(raw, field string) []string {
+ splitted := strings.Split(raw, "\n"+field+" = ")
+ if len(splitted) < 2 {
+ return nil
+ }
+ var rez []string
+ for i, v := range splitted {
+ if i == 0 {
+ continue
+ }
+ rez = append(rez, strings.Split(v, "\n")[0])
+ }
+ return rez
+}
+
+func ejectInt64(raw, field string) int64 {
+ splitted := strings.Split(raw, "\n"+field+" = ")
+ if len(splitted) < 2 {
+ return 0
+ }
+ i, err := strconv.ParseInt(strings.Split(splitted[1], "\n")[0], 10, 64)
+ if err != nil {
+ return 0
+ }
+ return i
+}
+
+func ByteCountSI(b int64) string {
+ const unit = 1000
+ if b < unit {
+ return fmt.Sprintf("%d B", b)
+ }
+ div, exp := int64(unit), 0
+ for n := b / unit; n >= unit; n /= unit {
+ div *= unit
+ exp++
+ }
+ return fmt.Sprintf("%.1f %cB", float64(b)/float64(div), "kMGTPE"[exp])
+}
+
+func ReadableTime(unix int64) string {
+ return time.Unix(unix, 0).Format(time.DateTime)
+}
+
+func md5sum(data []byte) string {
+ sum := md5.Sum(data)
+ return hex.EncodeToString(sum[:])
+}
+
+func sha256sum(data []byte) string {
+ sum := sha256.Sum256(data)
+ return hex.EncodeToString(sum[:])
+}
+
+// This function returns pacman package description in unarchived raw database
+// format.
+func (m *Metadata) GetDbDesc() string {
+ return strings.Join(rmEmptyStrings([]string{
+ formatField("FILENAME", m.Filename),
+ formatField("NAME", m.Name),
+ formatField("BASE", m.Base),
+ formatField("VERSION", m.Version),
+ formatField("DESC", m.Description),
+ formatField("CSIZE", m.CompressedSize),
+ formatField("ISIZE", m.InstalledSize),
+ formatField("MD5SUM", m.MD5),
+ formatField("SHA256SUM", m.SHA256),
+ formatField("URL", m.URL),
+ formatField("LICENSE", m.License),
+ formatField("ARCH", m.Arch),
+ formatField("BUILDDATE", m.BuildDate),
+ formatField("PACKAGER", m.Packager),
+ formatField("PROVIDES", m.Provides),
+ formatField("DEPENDS", m.Depends),
+ formatField("OPTDEPENDS", m.OptDepends),
+ formatField("MAKEDEPENDS", m.MakeDepends),
+ formatField("CHECKDEPENDS", m.CheckDepends),
+ }), "\n\n") + "\n\n"
+}
+
+func formatField(field string, value any) string {
+ switch value := value.(type) {
+ case []string:
+ if value == nil {
+ return ``
+ }
+ val := strings.Join(value, "\n")
+ return fmt.Sprintf("%%%s%%\n%s", field, val)
+ case string:
+ return fmt.Sprintf("%%%s%%\n%s", field, value)
+ case int64:
+ return fmt.Sprintf("%%%s%%\n%d", field, value)
+ }
+ return ``
+}
+
+func rmEmptyStrings(s []string) []string {
+ var r []string
+ for _, str := range s {
+ if str != "" {
+ r = append(r, str)
+ }
+ }
+ return r
+}
+
+// Function takes path to directory with pacman database and updates package
+// it with current metadata.
+func (m *Metadata) PutToDb(dir string, mode fs.FileMode) error {
+ descdir := path.Join(dir, m.Name+"-"+m.Version)
+ err := os.MkdirAll(descdir, mode)
+ if err != nil {
+ return err
+ }
+ return os.WriteFile(path.Join(descdir, "desc"), []byte(m.GetDbDesc()), mode)
+}
+
+// Function takes raw database archive bytes and destination directory as
+// arguements and unpacks database contents to destination directory.
+func UnpackDb(src, dst string) error {
+ return archiver.DefaultTarGz.Unarchive(src, dst)
+}
+
+// Function takes path to source directory with raw pacman description files
+// for pacman database, creates db.tar.gz archive and related symlink for
+// provided path.
+func PackDb(src, dst string) error {
+ if !strings.HasSuffix(dst, ".db.tar.gz") {
+ return fmt.Errorf("dst should end with '.db.tar.gz': %s", dst)
+ }
+ symlink := strings.TrimSuffix(dst, ".tar.gz")
+ if _, err := os.Stat(dst); err == nil {
+ err = os.RemoveAll(dst)
+ if err != nil {
+ return err
+ }
+ err = os.RemoveAll(symlink)
+ if err != nil {
+ return err
+ }
+ }
+ des, err := os.ReadDir(src)
+ if err != nil {
+ return err
+ }
+ var pkgdescs []string
+ for _, de := range des {
+ pkgdescs = append(pkgdescs, path.Join(src, de.Name()))
+ }
+ err = archiver.DefaultTarGz.Archive(pkgdescs, dst)
+ if err != nil {
+ return err
+ }
+ return os.Symlink(dst, symlink)
+}
diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini
index 6cab7c0cbb..8d9b17c1e9 100644
--- a/options/locale/locale_en-US.ini
+++ b/options/locale/locale_en-US.ini
@@ -3254,6 +3254,10 @@ alpine.repository = Repository Info
alpine.repository.branches = Branches
alpine.repository.repositories = Repositories
alpine.repository.architectures = Architectures
+arch.pacmanconf = Add server to pacman.conf:
+arch.pacmansync = And sync package with pacman:
+arch.documentation = For more information on the Arch registry, see the documentation.
+arch.properties = Package properties
cargo.registry = Setup this registry in the Cargo configuration file (for example ~/.cargo/config.toml
):
cargo.install = To install the package using Cargo, run the following command:
cargo.documentation = For more information on the Cargo registry, see the documentation.
diff --git a/public/img/svg/gitea-arch.svg b/public/img/svg/gitea-arch.svg
new file mode 100644
index 0000000000..ac50ffdf26
--- /dev/null
+++ b/public/img/svg/gitea-arch.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/routers/api/packages/api.go b/routers/api/packages/api.go
index fa7f66f3ab..bbc6ce2782 100644
--- a/routers/api/packages/api.go
+++ b/routers/api/packages/api.go
@@ -15,6 +15,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/routers/api/packages/alpine"
+ "code.gitea.io/gitea/routers/api/packages/arch"
"code.gitea.io/gitea/routers/api/packages/cargo"
"code.gitea.io/gitea/routers/api/packages/chef"
"code.gitea.io/gitea/routers/api/packages/composer"
@@ -752,3 +753,16 @@ func ContainerRoutes() *web.Route {
return r
}
+
+// Routes for arch packages.
+func ArchRoutes() *web.Route {
+ r := web.NewRoute()
+
+ r.Use(context.PackageContexter())
+
+ r.Put("/push", arch.Push)
+ r.Get("/{distro}/{arch}/{owner}/{file}", arch.Get)
+ r.Get("/{distro}/{arch}/{file}", arch.Get)
+
+ return r
+}
diff --git a/routers/api/packages/arch/arch.go b/routers/api/packages/arch/arch.go
new file mode 100644
index 0000000000..1948bbee07
--- /dev/null
+++ b/routers/api/packages/arch/arch.go
@@ -0,0 +1,416 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package arch
+
+import (
+ "bytes"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/context"
+ "code.gitea.io/gitea/modules/json"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ arch_module "code.gitea.io/gitea/modules/packages/arch"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/routers/api/packages/helper"
+ packages_service "code.gitea.io/gitea/services/packages"
+
+ "github.com/ProtonMail/gopenpgp/v2/crypto"
+ "github.com/google/uuid"
+)
+
+// Push new package to arch package registry.
+func Push(ctx *context.Context) {
+ // Creating connector that will help with keys/blobs.
+ connector := Connector{ctx: ctx}
+
+ // Getting some information related to package from headers.
+ filename := ctx.Req.Header.Get("filename")
+ email := ctx.Req.Header.Get("email")
+ sign := ctx.Req.Header.Get("sign")
+ owner := ctx.Req.Header.Get("owner")
+ distro := ctx.Req.Header.Get("distro")
+
+ // Decoding package signature.
+ sigdata, err := hex.DecodeString(sign)
+ if err != nil {
+ apiError(ctx, http.StatusBadRequest, err)
+ return
+ }
+ pgpsig := crypto.NewPGPSignature(sigdata)
+
+ // Validating that user is allowed to push to specified namespace.
+ err = connector.ValidateNamespace(owner, email)
+ if err != nil {
+ apiError(ctx, http.StatusBadRequest, err)
+ return
+ }
+
+ // Getting GPG keys related to specific user. After keys have been recieved,
+ // this function will find one key related to email provided in request.
+ armoredKeys, err := connector.GetValidKeys(email)
+ if err != nil {
+ apiError(ctx, http.StatusBadRequest, err)
+ return
+ }
+ var matchedKeyring *crypto.KeyRing
+ for _, armor := range armoredKeys {
+ pgpkey, err := crypto.NewKeyFromArmored(armor)
+ if err != nil {
+ apiError(ctx, http.StatusBadRequest, err)
+ return
+ }
+ keyring, err := crypto.NewKeyRing(pgpkey)
+ if err != nil {
+ apiError(ctx, http.StatusBadRequest, err)
+ return
+ }
+ for _, idnt := range keyring.GetIdentities() {
+ if idnt.Email == email {
+ matchedKeyring = keyring
+ break
+ }
+ }
+ if matchedKeyring != nil {
+ break
+ }
+ }
+ if matchedKeyring == nil {
+ msg := "GPG key related to " + email + " not found"
+ apiError(ctx, http.StatusBadRequest, msg)
+ return
+ }
+
+ // Read package to memory and create plain GPG message to validate signature.
+ pkgdata, err := io.ReadAll(ctx.Req.Body)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ defer ctx.Req.Body.Close()
+
+ pgpmes := crypto.NewPlainMessage(pkgdata)
+
+ // Validate package signature with user's GPG key related to his email.
+ err = matchedKeyring.VerifyDetached(pgpmes, pgpsig, crypto.GetUnixTime())
+ if err != nil {
+ apiError(ctx, http.StatusUnauthorized, "unable to validate package signature")
+ return
+ }
+
+ // Create temporary directory for arch database operations.
+ tmpdir := path.Join(setting.Repository.Upload.TempPath, uuid.New().String())
+ err = os.MkdirAll(tmpdir, os.ModePerm)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, "unable to create tmp path")
+ return
+ }
+ defer os.RemoveAll(tmpdir)
+
+ // Parse metadata contained in arch package archive.
+ md, err := arch_module.EjectMetadata(filename, setting.Domain, pkgdata)
+ if err != nil {
+ apiError(ctx, http.StatusBadRequest, err)
+ return
+ }
+
+ // Arch database related filenames, pathes and folders.
+ dbname := Join(owner, distro, setting.Domain, "db.tar.gz")
+ dbpath := path.Join(tmpdir, dbname)
+ dbfolder := path.Join(tmpdir, dbname) + ".folder"
+ dbsymlink := strings.TrimSuffix(dbname, ".tar.gz")
+ dbsymlinkpath := path.Join(tmpdir, dbsymlink)
+
+ // Get existing arch package database, related to specific userspace from
+ // file storage, and save it on disk, then unpack it's contents to related
+ // folder. If database is not found in storage, create empty directory to
+ // store package related information.
+ dbdata, err := connector.Get(dbname)
+ if err == nil {
+ err = os.WriteFile(dbpath, dbdata, os.ModePerm)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ err = arch_module.UnpackDb(dbpath, dbfolder)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ }
+ if err != nil {
+ err = os.MkdirAll(dbfolder, os.ModePerm)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ }
+
+ // Update database folder with metadata for new package.
+ err = md.PutToDb(dbfolder, os.ModePerm)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ // Create database archive and related symlink.
+ err = arch_module.PackDb(dbfolder, dbpath)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ // Save namespace related arch repository database.
+ f, err := os.Open(dbpath)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ defer f.Close()
+ dbfi, err := f.Stat()
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ err = connector.Save(dbname, f, dbfi.Size())
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ // Save namespace related arch repository db archive.
+ f, err = os.Open(dbsymlinkpath)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ defer f.Close()
+ dbarchivefi, err := f.Stat()
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ err = connector.Save(dbsymlink, f, dbarchivefi.Size())
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ // Create package in database.
+ pkg, err := packages_model.TryInsertPackage(ctx, &packages_model.Package{
+ OwnerID: connector.user.ID,
+ Type: packages_model.TypeArch,
+ Name: md.Name,
+ LowerName: strings.ToLower(md.Name),
+ })
+ if errors.Is(err, packages_model.ErrDuplicatePackage) {
+ pkg, err = packages_model.GetPackageByName(
+ ctx, connector.user.ID,
+ packages_model.TypeArch, md.Name,
+ )
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ }
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ // Check if repository for package with provided owner exists.
+ repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, md.Name)
+ if err == nil {
+ err = packages_model.SetRepositoryLink(ctx, pkg.ID, repo.ID)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ }
+
+ // Create new package version in database.
+ rawjsonmetadata, err := json.Marshal(&md)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ ver, err := packages_model.GetOrInsertVersion(ctx, &packages_model.PackageVersion{
+ PackageID: pkg.ID,
+ CreatorID: connector.user.ID,
+ Version: md.Version,
+ LowerVersion: strings.ToLower(md.Version),
+ CreatedUnix: timeutil.TimeStampNow(),
+ MetadataJSON: string(rawjsonmetadata),
+ })
+ if err != nil {
+ if errors.Is(err, packages_model.ErrDuplicatePackageVersion) {
+ apiError(ctx, http.StatusConflict, err)
+ return
+ }
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ // Create package blob and db file for package file.
+ pkgreader := bytes.NewReader(pkgdata)
+ fbuf, err := packages_module.CreateHashedBufferFromReader(pkgreader)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ defer fbuf.Close()
+
+ filepb, ok, err := packages_model.GetOrInsertBlob(
+ ctx, packages_service.NewPackageBlob(fbuf),
+ )
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, fmt.Errorf("%v %t", err, ok))
+ return
+ }
+ err = connector.Save(filepb.HashSHA256, fbuf, filepb.Size)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ _, err = packages_model.TryInsertFile(ctx, &packages_model.PackageFile{
+ VersionID: ver.ID,
+ BlobID: filepb.ID,
+ Name: filename,
+ LowerName: strings.ToLower(filename),
+ CompositeKey: distro + "-" + filename,
+ IsLead: true,
+ CreatedUnix: timeutil.TimeStampNow(),
+ })
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ // Create package blob for package signature.
+ sigreader := bytes.NewReader(sigdata)
+ sbuf, err := packages_module.CreateHashedBufferFromReader(sigreader)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ defer fbuf.Close()
+
+ sigpb, ok, err := packages_model.GetOrInsertBlob(
+ ctx, packages_service.NewPackageBlob(sbuf),
+ )
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, fmt.Errorf("%v %t", err, ok))
+ return
+ }
+ err = connector.Save(sigpb.HashSHA256, sbuf, sigpb.Size)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ _, err = packages_model.TryInsertFile(ctx, &packages_model.PackageFile{
+ VersionID: ver.ID,
+ BlobID: sigpb.ID,
+ Name: filename + ".sig",
+ LowerName: strings.ToLower(filename + ".sig"),
+ CompositeKey: distro + "-" + filename + ".sig",
+ IsLead: false,
+ CreatedUnix: timeutil.TimeStampNow(),
+ })
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ ctx.Status(http.StatusOK)
+}
+
+// Get file from arch package registry.
+func Get(ctx *context.Context) {
+ filename := ctx.Params("file")
+ owner := ctx.Params("owner")
+ distro := ctx.Params("distro")
+ // arch := ctx.Params("arch")
+
+ cs := packages_module.NewContentStore()
+
+ if strings.HasSuffix(filename, "tar.zst") ||
+ strings.HasSuffix(filename, "zst.sig") {
+ db := db.GetEngine(ctx)
+
+ pkgfile := &packages_model.PackageFile{
+ CompositeKey: distro + "-" + filename,
+ }
+ ok, err := db.Get(pkgfile)
+ if err != nil || !ok {
+ apiError(
+ ctx, http.StatusInternalServerError,
+ fmt.Errorf("%+v %t", err, ok),
+ )
+ return
+ }
+
+ blob, err := packages_model.GetBlobByID(ctx, pkgfile.BlobID)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ obj, err := cs.Get(packages_module.BlobHash256Key(blob.HashSHA256))
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ data, err := io.ReadAll(obj)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ _, err = ctx.Resp.Write(data)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ ctx.Resp.WriteHeader(http.StatusOK)
+
+ return
+ }
+ obj, err := cs.Get(packages_module.BlobHash256Key(Join(owner, distro, filename)))
+ if err != nil {
+ apiError(ctx, http.StatusNotFound, err)
+ }
+
+ data, err := io.ReadAll(obj)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+
+ _, err = ctx.Resp.Write(data)
+ if err != nil {
+ apiError(ctx, http.StatusInternalServerError, err)
+ return
+ }
+ ctx.Resp.WriteHeader(http.StatusOK)
+}
+
+func apiError(ctx *context.Context, status int, obj interface{}) {
+ helper.LogAndProcessError(ctx, status, obj, func(message string) {
+ ctx.PlainText(status, message)
+ })
+}
diff --git a/routers/api/packages/arch/connector.go b/routers/api/packages/arch/connector.go
new file mode 100644
index 0000000000..e19215c5a1
--- /dev/null
+++ b/routers/api/packages/arch/connector.go
@@ -0,0 +1,133 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package arch
+
+import (
+ "errors"
+ "io"
+
+ "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ organization_model "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/context"
+ "code.gitea.io/gitea/modules/log"
+ packages_module "code.gitea.io/gitea/modules/packages"
+)
+
+// Connector helps to retrieve GPG keys related to package validation and
+// manage blobs related to specific user spaces:
+// 1 - Check if user is allowed to push package to specific namespace.
+// 2 - Retrieving GPG keys related to provided email.
+// 3 - Get/put arch arch package/signature/database files to connected file
+// storage.
+type Connector struct {
+ ctx *context.Context
+ user *user_model.User
+ org *organization_model.Organization
+}
+
+// This function will find user related to provided email adress and check if
+// he is able to push packages to provided namespace (user/organization/or
+// empty namespace allowed for admin users).
+func (c *Connector) ValidateNamespace(namespace, email string) error {
+ var err error
+ c.user, err = user_model.GetUserByEmail(c.ctx, email)
+ if err != nil {
+ log.Error("unable to get user with email: %s %v", email, err)
+ return err
+ }
+
+ if namespace == "" && c.user.IsAdmin {
+ c.org = (*organization_model.Organization)(c.user)
+ return nil
+ }
+
+ if c.user.Name != namespace && c.org == nil {
+ c.org, err = organization_model.GetOrgByName(c.ctx, namespace)
+ if err != nil {
+ log.Error("unable to organization: %s %v", namespace, err)
+ return err
+ }
+ ismember, err := c.org.IsOrgMember(c.user.ID)
+ if err != nil {
+ log.Error(
+ "unable to check if user belongs to organization: %s %s %v",
+ c.user.Name, email, err,
+ )
+ return err
+ }
+ if !ismember {
+ log.Error("user %s is not member of organization: %s", c.user.Name, email)
+ return errors.New("user is not member of organization: " + namespace)
+ }
+ } else {
+ c.org = (*organization_model.Organization)(c.user)
+ }
+ return nil
+}
+
+// This function will try to find user related to specific email. And check
+// that user is allowed to push to 'owner' namespace (package owner, could
+// be empty, user or organization).
+// After namespace check, this function
+func (c *Connector) GetValidKeys(email string) ([]string, error) {
+ keys, err := asymkey.ListGPGKeys(c.ctx, c.user.ID, db.ListOptions{
+ ListAll: true,
+ })
+ if err != nil {
+ log.Error("unable to get keys related to user: %v", err)
+ return nil, errors.New("unable to get public keys")
+ }
+ if len(keys) == 0 {
+ log.Error("no keys related to user")
+ return nil, errors.New("no keys for user with email: " + email)
+ }
+
+ var keyarmors []string
+ for _, key := range keys {
+ k, err := asymkey.GetGPGImportByKeyID(key.KeyID)
+ if err != nil {
+ log.Error("unable to import GPG key by ID: %v", err)
+ return nil, errors.New("internal error")
+ }
+ keyarmors = append(keyarmors, k.Content)
+ }
+
+ return keyarmors, nil
+}
+
+// Get specific file content from content storage.
+func (c *Connector) Get(key string) ([]byte, error) {
+ cs := packages_module.NewContentStore()
+ obj, err := cs.Get(packages_module.BlobHash256Key(key))
+ if err != nil {
+ return nil, err
+ }
+ return io.ReadAll(obj)
+}
+
+// Save contents related to specific arch package.
+func (c *Connector) Save(key string, content io.Reader, size int64) error {
+ cs := packages_module.NewContentStore()
+ return cs.Save(packages_module.BlobHash256Key(key), content, size)
+}
+
+// Join database or package names to prevent collisions with same packages in
+// different user spaces. Skips empty strings and returns name joined with
+// dots.
+func Join(s ...string) string {
+ rez := ""
+ for i, v := range s {
+ if v == "" {
+ continue
+ }
+ if i+1 == len(s) {
+ rez += v
+ continue
+ }
+ rez += v + "."
+ }
+ return rez
+}
diff --git a/routers/init.go b/routers/init.go
index 54e8d2b8b3..668efdcb85 100644
--- a/routers/init.go
+++ b/routers/init.go
@@ -190,6 +190,8 @@ func NormalRoutes() *web.Route {
r.Mount("/api/packages", packages_router.CommonRoutes())
// This implements the OCI API (Note this is not preceded by /api but is instead /v2)
r.Mount("/v2", packages_router.ContainerRoutes())
+ // Arch package routes
+ r.Mount("/api/packages/arch", packages_router.ArchRoutes())
}
if setting.Actions.Enabled {
diff --git a/templates/package/content/arch.tmpl b/templates/package/content/arch.tmpl
new file mode 100644
index 0000000000..0ecce35509
--- /dev/null
+++ b/templates/package/content/arch.tmpl
@@ -0,0 +1,105 @@
+{{if eq .PackageDescriptor.Package.Type "arch"}}
+
[{{.PackageDescriptor.Owner.LowerName}}.{{.PackageDescriptor.Metadata.BaseDomain}}]
+Server = https://{{.PackageDescriptor.Metadata.BaseDomain}}/api/packages/arch/(distribution)/(architecture)/{{.PackageDescriptor.Owner.LowerName}}
pacman -S {{.PackageDescriptor.Package.LowerName}}
Description |
+ {{.PackageDescriptor.Metadata.Description}} | +
Compressed size |
+ {{.PackageDescriptor.Metadata.CompressedSizeMib}} | +
Installed size |
+ {{.PackageDescriptor.Metadata.InstalledSizeMib}} | +
Official URL |
+ {{.PackageDescriptor.Metadata.URL}} | +
Build date |
+ {{.PackageDescriptor.Metadata.BuildDateStr}} | +
Packager |
+ {{.PackageDescriptor.Metadata.Packager}} | +
Provides |
+ {{$key}} | +
Architecture |
+ {{$key}} | +
Depends |
+ {{$key}} | +
Optional depends |
+ {{$key}} | +
Make depends |
+ {{$key}} | +
Check depends |
+ {{$key}} | +
Backup file |
+ {{$key}} | +