Better release writer?

This commit is contained in:
Tyler 2017-09-10 22:58:50 -04:00
parent 150a33579e
commit a35d776c32
8 changed files with 219 additions and 80 deletions

View File

@ -6,9 +6,10 @@ build:
- cd $CI_PROJECT_DIR/src/meow.tf/deb-simple
- glide install
- cd $CI_PROJECT_DIR
- go build -o simple
- GOOS=linux GOARCH=arm GOARM=7 go build -o simple-arm
- mkdir -p build/i386 build/amd64 build/arm
- go build -o build/amd64/deb-simple
- GOOS=linux GOARCH=386 go build -o build/i386/deb-simple
- GOOS=linux GOARCH=arm GOARM=7 go build -o build/arm/deb-simple
artifacts:
paths:
- simple
- simple-arm
- build

View File

@ -18,6 +18,7 @@ import (
"encoding/hex"
"golang.org/x/crypto/openpgp"
"time"
"meow.tf/deb-simple/deb/release"
)
func inspectPackage(filename string) (string, error) {
@ -89,7 +90,7 @@ func inspectPackageControl(filename bytes.Buffer) (string, error) {
}
func createRelease(config Conf, distro, arch string) error {
func createRelease(config Conf, distro string) error {
outfile, err := os.Create(filepath.Join(config.DistPath(distro), "Release"))
if err != nil {
@ -98,94 +99,100 @@ func createRelease(config Conf, distro, arch string) error {
defer outfile.Close()
var packBuf bytes.Buffer
fmt.Fprintf(&packBuf, "Suite: %s\n", distro)
fmt.Fprintf(&packBuf, "Architectures: %s\n", arch)
fmt.Fprint(&packBuf, "Components: main\n")
fmt.Fprintf(&packBuf, "Date: %s\n", time.Now().In(time.UTC).Format("Mon, 02 Jan 2006 15:04:05 -0700"))
basePath := filepath.Join("main", "binary-" + arch)
dirList, err := ioutil.ReadDir(filepath.Join(config.DistPath(distro), "main", "binary-" + arch))
if err != nil {
return fmt.Errorf("scanning: %s: %s", config.PoolPath(distro, arch), err)
r := &release.Release{
Suite: distro,
Architectures: config.Repo.ArchitectureNames(),
Components: []string{"main"},
Date: time.Now(),
}
var md5Buf bytes.Buffer
var sha1Buf bytes.Buffer
var sha256Buf bytes.Buffer
for _, arch := range config.Repo.ArchitectureNames() {
absolutePath := filepath.Join(config.DistPath(distro), "main", "binary-" + arch)
for _, file := range dirList {
filePath := filepath.Join(config.DistPath(distro), basePath, file.Name())
fileLocalPath := filepath.Join(basePath, file.Name())
fileLocalPath = strings.Replace(fileLocalPath, "\\", "/", -1)
f, err := os.Open(filePath)
list, err := ioutil.ReadDir(absolutePath)
if err != nil {
return err
continue
}
var size int64 = file.Size()
basePath := filepath.Join("main", "binary-" + arch)
if size == 0 {
if stat, err := os.Stat(filePath); err == nil {
size = stat.Size()
for _, file := range list {
filePath := filepath.Join(absolutePath, file.Name())
fileLocalPath := filepath.Join(basePath, file.Name())
fileLocalPath = strings.Replace(fileLocalPath, "\\", "/", -1)
f, err := os.Open(filePath)
if err != nil {
return err
}
var size int64 = file.Size()
if size == 0 {
if stat, err := os.Stat(filePath); err == nil {
size = stat.Size()
}
}
var (
md5hash = md5.New()
sha1hash = sha1.New()
sha256hash = sha256.New()
)
w := io.MultiWriter(md5hash, sha1hash, sha256hash)
io.Copy(w, f)
repoFile := &release.File{
Name: fileLocalPath,
Size: size,
MD5Sum: hex.EncodeToString(md5hash.Sum(nil)),
SHA1: hex.EncodeToString(sha1hash.Sum(nil)),
SHA256: hex.EncodeToString(sha256hash.Sum(nil)),
}
r.Append(repoFile)
f.Close()
f = nil
}
var (
md5hash = md5.New()
sha1hash = sha1.New()
sha256hash = sha256.New()
)
f.Seek(0, 0)
if _, err := io.Copy(md5hash, f); err != nil {
log.Println("error with the md5 hashing: ", err)
}
fmt.Fprintf(&md5Buf, " %s %d %s\n", hex.EncodeToString(md5hash.Sum(nil)), size, fileLocalPath)
f.Seek(0, 0)
if _, err := io.Copy(sha1hash, f); err != nil {
log.Println("error with the sha1 hashing: ", err)
}
fmt.Fprintf(&sha1Buf, " %s %d %s\n", hex.EncodeToString(sha1hash.Sum(nil)), size, fileLocalPath)
f.Seek(0, 0)
if _, err := io.Copy(sha256hash, f); err != nil {
log.Println("error with the sha256 hashing: ", err)
}
fmt.Fprintf(&sha256Buf, " %s %d %s\n", hex.EncodeToString(sha256hash.Sum(nil)), size, fileLocalPath)
f.Close()
f = nil
}
fmt.Fprintf(&packBuf, "MD5Sum:\n%s", string(md5Buf.Bytes()))
fmt.Fprintf(&packBuf, "SHA1:\n%s", string(sha1Buf.Bytes()))
fmt.Fprintf(&packBuf, "SHA256:\n%s", string(sha256Buf.Bytes()))
outfile.Write(packBuf.Bytes())
r.Write(outfile)
if pgpEntity != nil {
gpgfile, err := os.Create(filepath.Join(config.DistPath(distro), "Release.gpg"))
if err != nil {
return fmt.Errorf("failed to create Release.gpg: %s", err)
}
defer gpgfile.Close()
byteReader := bytes.NewReader(packBuf.Bytes())
if err := openpgp.ArmoredDetachSignText(gpgfile, pgpEntity, byteReader, nil); err != nil {
return fmt.Errorf("failed to sign Release: %s", err)
}
return signRelease(config, distro)
}
return nil
}
func signRelease(config Conf, distro string) error {
distPath := config.DistPath(distro)
f, err := os.Open(filepath.Join(distPath, "Release"))
if err != nil {
return fmt.Errorf("failed to read Release: %s", err)
}
defer f.Close()
gpgfile, err := os.Create(filepath.Join(distPath, "Release.gpg"))
if err != nil {
return fmt.Errorf("failed to create Release.gpg: %s", err)
}
defer gpgfile.Close()
if err := openpgp.ArmoredDetachSignText(gpgfile, pgpEntity, f, nil); err != nil {
return fmt.Errorf("failed to sign Release: %s", err)
}
return nil

View File

@ -0,0 +1,42 @@
package release
import (
"time"
"io"
"bufio"
)
type Release struct {
Architectures []string
Origin string
Label string
Suite string
Codename string
Version string
Components []string
Date time.Time
Files []*File
}
func (r *Release) Append(f *File) {
if r.Files == nil {
r.Files = make([]*File, 0)
}
r.Files = append(r.Files, f)
}
func (r *Release) Write(w io.Writer) {
bufw := &writer{bufio.NewWriter(w)}
bufw.WriteValue("Suite", r.Suite)
bufw.WriteSlice("Architectures", r.Architectures)
bufw.WriteSlice("Components", r.Components)
bufw.WriteValue("Date", r.Date.In(time.UTC).Format("Mon, 02 Jan 2006 15:04:05 -0700"))
bufw.WriteHashTable("MD5Sum", r.Files)
bufw.WriteHashTable("SHA1", r.Files)
bufw.WriteHashTable("SHA256", r.Files)
bufw.Flush()
}

View File

@ -0,0 +1,20 @@
package release
type File struct {
Name string
Size int64
MD5Sum string
SHA1 string
SHA256 string
}
func (f *File) Hash(key string) string {
if key == "MD5Sum" {
return f.MD5Sum
} else if key == "SHA1" {
return f.SHA1
} else if key == "SHA256" {
return f.SHA256
}
return ""
}

View File

@ -0,0 +1,22 @@
package release
import (
"testing"
"bytes"
"time"
)
func TestRelease_Write(t *testing.T) {
r := &Release{
Architectures: []string{"all", "i386", "amd64"},
Suite: "stable",
Components: []string{"main"},
Date: time.Now(),
}
var b bytes.Buffer
r.Write(&b)
t.Log(b.String())
}

View File

@ -0,0 +1,47 @@
package release
import (
"strings"
"fmt"
"bufio"
)
type writer struct {
writer *bufio.Writer
}
func (r *writer) WriteValue(key, value string) (int, error) {
if len(value) == 0 {
return 0, nil
}
return r.writer.WriteString(key + ": " + value + "\n")
}
func (r *writer) WriteSlice(key string, s []string) (int, error) {
if len(s) == 0 {
return 0, nil
}
return r.writer.WriteString(key + ": " + strings.Join(s, " ") + "\n")
}
func (r *writer) WriteHashTable(key string, files []*File) {
if files == nil || len(files) == 0 {
return
}
r.writer.WriteString(key + ":\n")
var hash string
for _, f := range files {
if hash = f.Hash(key); hash != "" {
fmt.Fprintf(r.writer, " %s %d %s\n", hash, f.Size, f.Name)
}
}
}
func (r *writer) Flush() {
r.writer.Flush()
}

View File

@ -167,7 +167,7 @@ func uploadHandler(w http.ResponseWriter, r *http.Request) {
return
}
err = createRelease(conf, distroName, archType)
err = createRelease(conf, distroName)
if err != nil {
httpErrorf(w, "error creating package: %s", err)
@ -231,7 +231,7 @@ func deleteHandler(w http.ResponseWriter, r *http.Request) {
return
}
if err := createRelease(conf, req.DistroName, req.Arch); err != nil {
if err := createRelease(conf, req.DistroName); err != nil {
httpErrorf(w, "failed to delete package: %s", err)
return
}

View File

@ -109,7 +109,7 @@ func scanInitialPackages(config Conf, dist *Distro) {
log.Println("Generating packages file for", dist.Name, arch)
createPackagesCached(config, dist.Name, arch, files)
createRelease(config, dist.Name, arch)
createRelease(config, dist.Name)
}
saveCache(dist)