From e26ef30ab9a92cccea7ccfa85a6a6cc65a129bca Mon Sep 17 00:00:00 2001 From: Erik Geiser Date: Thu, 2 Sep 2021 20:45:53 +0200 Subject: [PATCH 1/5] Add compression option for deb. --- deb/deb.go | 85 +++++++++++++++++++++++++++++---------- nfpm.go | 9 +++-- www/docs/configuration.md | 7 ++-- 3 files changed, 72 insertions(+), 29 deletions(-) diff --git a/deb/deb.go b/deb/deb.go index eb85ce90..68b5886d 100644 --- a/deb/deb.go +++ b/deb/deb.go @@ -19,6 +19,7 @@ import ( "github.com/blakesmith/ar" "github.com/goreleaser/chglog" + "github.com/ulikunitz/xz" "github.com/goreleaser/nfpm/v2" "github.com/goreleaser/nfpm/v2/files" @@ -90,7 +91,7 @@ func (*Deb) Package(info *nfpm.Info, deb io.Writer) (err error) { // nolint: fun return err } - dataTarGz, md5sums, instSize, err := createDataTarGz(info) + dataTarGz, md5sums, instSize, dataTarballName, err := createDataTarball(info) if err != nil { return err } @@ -115,7 +116,7 @@ func (*Deb) Package(info *nfpm.Info, deb io.Writer) (err error) { // nolint: fun return fmt.Errorf("cannot add control.tar.gz to deb: %w", err) } - if err := addArFile(w, "data.tar.gz", dataTarGz); err != nil { + if err := addArFile(w, dataTarballName, dataTarGz); err != nil { return fmt.Errorf("cannot add data.tar.gz to deb: %w", err) } @@ -164,34 +165,72 @@ func addArFile(w *ar.Writer, name string, body []byte) error { return err } -func createDataTarGz(info *nfpm.Info) (dataTarGz, md5sums []byte, instSize int64, err error) { - var buf bytes.Buffer - compress := gzip.NewWriter(&buf) - out := tar.NewWriter(compress) +type nopCloser struct { + io.Writer +} - // the writers are properly closed later, this is just in case that we have +func (nopCloser) Close() error { return nil } + +func createDataTarball(info *nfpm.Info) (dataTarGz, md5sums []byte, instSize int64, name string, err error) { + var ( + dataTarball bytes.Buffer + dataTarballWriteCloser io.WriteCloser + ) + + switch info.Deb.Compression { + case "", "gzip": // the default for now + dataTarballWriteCloser = gzip.NewWriter(&dataTarball) + name = "data.tar.gz" + case "xz": + dataTarballWriteCloser, err = xz.NewWriter(&dataTarball) + if err != nil { + return nil, nil, 0, "", err + } + name = "data.tar.xz" + case "none": + dataTarballWriteCloser = nopCloser{Writer: &dataTarball} + name = "data.tar" + default: + return nil, nil, 0, "", fmt.Errorf("unknown compression algorithm: %s", info.Deb.Compression) + } + + // the writer is properly closed later, this is just in case that we error out + defer dataTarballWriteCloser.Close() // nolint: errcheck + + md5sums, instSize, err = fillDataTar(info, dataTarballWriteCloser) + if err != nil { + return nil, nil, 0, "", err + } + + if err := dataTarballWriteCloser.Close(); err != nil { + return nil, nil, 0, "", fmt.Errorf("closing data tarball: %w", err) + } + + return dataTarball.Bytes(), md5sums, instSize, name, nil +} + +func fillDataTar(info *nfpm.Info, w io.Writer) (md5sums []byte, instSize int64, err error) { + out := tar.NewWriter(w) + + // the writer is properly closed later, this is just in case that we have // an error in another part of the code. - defer out.Close() // nolint: errcheck - defer compress.Close() // nolint: errcheck + defer out.Close() // nolint: errcheck created := map[string]bool{} - if err = createEmptyFoldersInsideTarGz(info, out, created); err != nil { - return nil, nil, 0, err + if err = createEmptyFoldersInsideDataTar(info, out, created); err != nil { + return nil, 0, err } - md5buf, instSize, err := createFilesInsideTarGz(info, out, created) + md5buf, instSize, err := createFilesInsideDataTar(info, out, created) if err != nil { - return nil, nil, 0, err + return nil, 0, err } if err := out.Close(); err != nil { - return nil, nil, 0, fmt.Errorf("closing data.tar.gz: %w", err) - } - if err := compress.Close(); err != nil { - return nil, nil, 0, fmt.Errorf("closing data.tar.gz: %w", err) + return nil, 0, fmt.Errorf("closing data.tar.gz: %w", err) } - return buf.Bytes(), md5buf.Bytes(), instSize, nil + return md5buf.Bytes(), instSize, nil } func createSymlinkInsideTarGz(file *files.Content, out *tar.Writer) error { @@ -204,7 +243,8 @@ func createSymlinkInsideTarGz(file *files.Content, out *tar.Writer) error { }) } -func createFilesInsideTarGz(info *nfpm.Info, tw *tar.Writer, created map[string]bool) (md5buf bytes.Buffer, instSize int64, err error) { +func createFilesInsideDataTar(info *nfpm.Info, tw *tar.Writer, + created map[string]bool) (md5buf bytes.Buffer, instSize int64, err error) { for _, file := range info.Contents { if file.Packager != "" && file.Packager != packagerName { continue @@ -246,7 +286,7 @@ func createFilesInsideTarGz(info *nfpm.Info, tw *tar.Writer, created map[string] } if info.Changelog != "" { - size, err := createChangelogInsideTarGz(tw, &md5buf, created, info) + size, err := createChangelogInsideDataTar(tw, &md5buf, created, info) if err != nil { return md5buf, 0, err } @@ -257,7 +297,7 @@ func createFilesInsideTarGz(info *nfpm.Info, tw *tar.Writer, created map[string] return md5buf, instSize, nil } -func createEmptyFoldersInsideTarGz(info *nfpm.Info, out *tar.Writer, created map[string]bool) error { +func createEmptyFoldersInsideDataTar(info *nfpm.Info, out *tar.Writer, created map[string]bool) error { for _, folder := range info.EmptyFolders { // this .nope is actually not created, because createTree ignore the // last part of the path, assuming it is a file. @@ -297,7 +337,8 @@ func copyToTarAndDigest(file *files.Content, tw *tar.Writer, md5w io.Writer) (in return file.Size(), nil } -func createChangelogInsideTarGz(tarw *tar.Writer, md5w io.Writer, created map[string]bool, info *nfpm.Info) (int64, error) { +func createChangelogInsideDataTar(tarw *tar.Writer, md5w io.Writer, + created map[string]bool, info *nfpm.Info) (int64, error) { var buf bytes.Buffer out := gzip.NewWriter(&buf) // the writers are properly closed later, this is just in case that we have diff --git a/nfpm.go b/nfpm.go index 963a781b..061906c2 100644 --- a/nfpm.go +++ b/nfpm.go @@ -306,10 +306,11 @@ type APKScripts struct { // Deb is custom configs that are only available on deb packages. type Deb struct { - Scripts DebScripts `yaml:"scripts,omitempty" jsonschema:"title=scripts"` - Triggers DebTriggers `yaml:"triggers,omitempty" jsonschema:"title=triggers"` - Breaks []string `yaml:"breaks,omitempty" jsonschema:"title=breaks"` - Signature DebSignature `yaml:"signature,omitempty" jsonschema:"title=signature"` + Scripts DebScripts `yaml:"scripts,omitempty" jsonschema:"title=scripts"` + Triggers DebTriggers `yaml:"triggers,omitempty" jsonschema:"title=triggers"` + Breaks []string `yaml:"breaks,omitempty" jsonschema:"title=breaks"` + Signature DebSignature `yaml:"signature,omitempty" jsonschema:"title=signature"` + Compression string `yaml:"compression,omitempty" jsonschema:"title=compression algorithm to be used,enum=gzip,enum=xz,enum=none,default=gzip"` } type DebSignature struct { diff --git a/www/docs/configuration.md b/www/docs/configuration.md index d0a945e7..a903e5c2 100644 --- a/www/docs/configuration.md +++ b/www/docs/configuration.md @@ -165,7 +165,6 @@ contents: owner: notRoot group: notRoot - # Empty folders your package may need created. (overridable) empty_folders: - /var/log/foo @@ -211,7 +210,7 @@ rpm: # description, but can be explicitly provided here. summary: Explicit Summary for Sample Package - # Compression algorithm. + # Compression algorithm (gzip (default), lzma or xz). compression: lzma # The package is signed if a key_file is set @@ -255,6 +254,9 @@ deb: breaks: - some-package + # Compression algorithm (gzip (default), xz or none). + compression: lzma + # The package is signed if a key_file is set signature: # PGP secret key (can also be ASCII-armored). The passphrase is taken @@ -293,7 +295,6 @@ Templating is not and will not be supported. If you really need it, you can build on top of nFPM, use `envsubst`, `jsonnet` or apply some other templating on top of it. - ## JSON Schema nFPM also has a [jsonschema][] file which you can use to have better editor support: From 48a033855325149619279c7e8019e7be5fd4d84a Mon Sep 17 00:00:00 2001 From: Erik Geiser Date: Thu, 2 Sep 2021 20:46:14 +0200 Subject: [PATCH 2/5] Add deb compression tests. --- deb/deb_test.go | 289 +++++++++++++++++++++++++++++------------------- 1 file changed, 178 insertions(+), 111 deletions(-) diff --git a/deb/deb_test.go b/deb/deb_test.go index f3f4240c..9bfa67e1 100644 --- a/deb/deb_test.go +++ b/deb/deb_test.go @@ -22,6 +22,7 @@ import ( "github.com/goreleaser/chglog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/xi2/xz" "github.com/goreleaser/nfpm/v2" "github.com/goreleaser/nfpm/v2/files" @@ -512,11 +513,9 @@ func TestDebChangelogControl(t *testing.T) { controlTarGz, err := createControl(0, []byte{}, info) require.NoError(t, err) - controlChangelog, err := extractFileFromTarGz(controlTarGz, "changelog") - require.NoError(t, err) + controlChangelog := extractFileFromTar(t, inflate(t, "gz", controlTarGz), "changelog") - goldenChangelog, err := readAndFormatAsDebChangelog(info.Changelog, info.Name) - require.NoError(t, err) + goldenChangelog := readAndFormatAsDebChangelog(t, info.Changelog, info.Name) assert.Equal(t, goldenChangelog, string(controlChangelog)) } @@ -534,8 +533,7 @@ func TestDebNoChangelogControlWithoutChangelogConfigured(t *testing.T) { controlTarGz, err := createControl(0, []byte{}, info) require.NoError(t, err) - _, err = extractFileFromTarGz(controlTarGz, "changelog") - assert.EqualError(t, err, os.ErrNotExist.Error()) + assert.False(t, tarContains(t, inflate(t, "gz", controlTarGz), "changelog")) } func TestDebChangelogData(t *testing.T) { @@ -549,18 +547,15 @@ func TestDebChangelogData(t *testing.T) { err := info.Validate() require.NoError(t, err) - dataTarGz, _, _, err := createDataTarGz(info) + dataTarGz, _, _, dataTarballName, err := createDataTarball(info) require.NoError(t, err) changelogName := fmt.Sprintf("/usr/share/doc/%s/changelog.gz", info.Name) - dataChangelogGz, err := extractFileFromTarGz(dataTarGz, changelogName) - require.NoError(t, err) + dataChangelogGz := extractFileFromTar(t, + inflate(t, dataTarballName, dataTarGz), changelogName) - dataChangelog, err := gzipInflate(dataChangelogGz) - require.NoError(t, err) - - goldenChangelog, err := readAndFormatAsDebChangelog(info.Changelog, info.Name) - require.NoError(t, err) + dataChangelog := inflate(t, "gz", dataChangelogGz) + goldenChangelog := readAndFormatAsDebChangelog(t, info.Changelog, info.Name) assert.Equal(t, goldenChangelog, string(dataChangelog)) } @@ -575,12 +570,12 @@ func TestDebNoChangelogDataWithoutChangelogConfigured(t *testing.T) { err := info.Validate() require.NoError(t, err) - dataTarGz, _, _, err := createDataTarGz(info) + dataTarGz, _, _, dataTarballName, err := createDataTarball(info) require.NoError(t, err) changelogName := fmt.Sprintf("/usr/share/doc/%s/changelog.gz", info.Name) - _, err = extractFileFromTarGz(dataTarGz, changelogName) - assert.EqualError(t, err, os.ErrNotExist.Error()) + + assert.False(t, tarContains(t, inflate(t, dataTarballName, dataTarGz), changelogName)) } func TestDebTriggers(t *testing.T) { @@ -608,8 +603,7 @@ func TestDebTriggers(t *testing.T) { controlTarGz, err := createControl(0, []byte{}, info) require.NoError(t, err) - controlTriggers, err := extractFileFromTarGz(controlTarGz, "triggers") - require.NoError(t, err) + controlTriggers := extractFileFromTar(t, inflate(t, "gz", controlTarGz), "triggers") goldenTriggers := createTriggers(info) @@ -648,8 +642,7 @@ func TestDebNoTriggersInControlIfNoneProvided(t *testing.T) { controlTarGz, err := createControl(0, []byte{}, info) require.NoError(t, err) - _, err = extractFileFromTarGz(controlTarGz, "triggers") - assert.EqualError(t, err, os.ErrNotExist.Error()) + assert.False(t, tarContains(t, inflate(t, "gz", controlTarGz), "triggers")) } func TestSymlinkInFiles(t *testing.T) { @@ -678,11 +671,11 @@ func TestSymlinkInFiles(t *testing.T) { realSymlinkTarget, err := ioutil.ReadFile(symlinkTarget) require.NoError(t, err) - dataTarGz, _, _, err := createDataTarGz(info) + dataTarGz, _, _, dataTarballName, err := createDataTarball(info) require.NoError(t, err) - packagedSymlinkTarget, err := extractFileFromTarGz(dataTarGz, packagedTarget) - require.NoError(t, err) + packagedSymlinkTarget := extractFileFromTar(t, + inflate(t, dataTarballName, dataTarGz), packagedTarget) assert.Equal(t, string(realSymlinkTarget), string(packagedSymlinkTarget)) } @@ -716,11 +709,11 @@ func TestSymlink(t *testing.T) { err := info.Validate() require.NoError(t, err) - dataTarGz, _, _, err := createDataTarGz(info) + dataTarGz, _, _, dataTarballName, err := createDataTarball(info) require.NoError(t, err) - packagedSymlinkHeader, err := extractFileHeaderFromTarGz(dataTarGz, symlink) - require.NoError(t, err) + packagedSymlinkHeader := extractFileHeaderFromTar(t, + inflate(t, dataTarballName, dataTarGz), symlink) assert.Equal(t, symlink, path.Join("/", packagedSymlinkHeader.Name)) // nolint:gosec assert.Equal(t, uint8(tar.TypeSymlink), packagedSymlinkHeader.Typeflag) @@ -740,13 +733,13 @@ func TestEnsureRelativePrefixInTarGzFiles(t *testing.T) { err := info.Validate() require.NoError(t, err) - dataTarGz, md5sums, instSize, err := createDataTarGz(info) + dataTarGz, md5sums, instSize, tarballName, err := createDataTarball(info) require.NoError(t, err) - testRelativePathPrefixInTarGzFiles(t, dataTarGz) + testRelativePathPrefixInTar(t, inflate(t, tarballName, dataTarGz)) controlTarGz, err := createControl(instSize, md5sums, info) require.NoError(t, err) - testRelativePathPrefixInTarGzFiles(t, controlTarGz) + testRelativePathPrefixInTar(t, inflate(t, "gz", controlTarGz)) } func TestMD5Sums(t *testing.T) { @@ -762,38 +755,34 @@ func TestMD5Sums(t *testing.T) { } } - dataTarGz, md5sums, instSize, err := createDataTarGz(info) + dataTarGz, md5sums, instSize, tarballName, err := createDataTarball(info) require.NoError(t, err) controlTarGz, err := createControl(instSize, md5sums, info) require.NoError(t, err) - md5sumsFile, err := extractFileFromTarGz(controlTarGz, "./md5sums") - require.NoError(t, err) + md5sumsFile := extractFileFromTar(t, inflate(t, "gz", controlTarGz), "./md5sums") lines := strings.Split(strings.TrimRight(string(md5sumsFile), "\n"), "\n") require.Equal(t, nFiles, len(lines)) + dataTar := inflate(t, tarballName, dataTarGz) + for _, line := range lines { parts := strings.Fields(line) require.Equal(t, len(parts), 2) md5sum, fileName := parts[0], parts[1] - fileContent, err := extractFileFromTarGz(dataTarGz, fileName) - require.NoError(t, err) - digest := md5.New() // nolint:gosec - _, err = digest.Write(fileContent) + _, err = digest.Write(extractFileFromTar(t, dataTar, fileName)) require.NoError(t, err) assert.Equal(t, md5sum, hex.EncodeToString(digest.Sum(nil))) } } -func testRelativePathPrefixInTarGzFiles(t *testing.T, tarGzFile []byte) { - t.Helper() - tarFile, err := gzipInflate(tarGzFile) - require.NoError(t, err) +func testRelativePathPrefixInTar(tb testing.TB, tarFile []byte) { + tb.Helper() tr := tar.NewReader(bytes.NewReader(tarFile)) for { @@ -801,9 +790,9 @@ func testRelativePathPrefixInTarGzFiles(t *testing.T, tarGzFile []byte) { if errors.Is(err, io.EOF) { break // End of archive } - require.NoError(t, err) + require.NoError(tb, err) - assert.True(t, strings.HasPrefix(hdr.Name, "./"), "%s does not start with './'", hdr.Name) + assert.True(tb, strings.HasPrefix(hdr.Name, "./"), "%s does not start with './'", hdr.Name) } } @@ -816,17 +805,10 @@ func TestDebsigsSignature(t *testing.T) { err := Default.Package(info, &deb) require.NoError(t, err) - debBinary, err := extractFileFromAr(deb.Bytes(), "debian-binary") - require.NoError(t, err) - - controlTarGz, err := extractFileFromAr(deb.Bytes(), "control.tar.gz") - require.NoError(t, err) - - dataTarGz, err := extractFileFromAr(deb.Bytes(), "data.tar.gz") - require.NoError(t, err) - - signature, err := extractFileFromAr(deb.Bytes(), "_gpgorigin") - require.NoError(t, err) + debBinary := extractFileFromAr(t, deb.Bytes(), "debian-binary") + controlTarGz := extractFileFromAr(t, deb.Bytes(), "control.tar.gz") + dataTarGz := extractFileFromAr(t, deb.Bytes(), findDataTarball(t, deb.Bytes())) + signature := extractFileFromAr(t, deb.Bytes(), "_gpgorigin") message := io.MultiReader(bytes.NewReader(debBinary), bytes.NewReader(controlTarGz), bytes.NewReader(dataTarGz)) @@ -858,54 +840,81 @@ func TestDisableGlobbing(t *testing.T) { } require.NoError(t, info.Validate()) - dataTarGz, _, _, err := createDataTarGz(info) + dataTarGz, _, _, tarballName, err := createDataTarball(info) require.NoError(t, err) expectedContent, err := ioutil.ReadFile("../testdata/{file}[") require.NoError(t, err) - actualContent, err := extractFileFromTarGz(dataTarGz, "/test/{file}[") - require.NoError(t, err) + actualContent := extractFileFromTar(t, inflate(t, tarballName, dataTarGz), "/test/{file}[") assert.Equal(t, expectedContent, actualContent) } -func extractFileFromTarGz(tarGzFile []byte, filename string) ([]byte, error) { - tarFile, err := gzipInflate(tarGzFile) - if err != nil { - return nil, err +func TestCompressionAlgorithms(t *testing.T) { + testCases := []struct { + algorithm string + dataTarballName string + }{ + {"gzip", "data.tar.gz"}, + {"", "data.tar.gz"}, // test current default + {"xz", "data.tar.xz"}, + {"none", "data.tar"}, } + for _, testCase := range testCases { + testCase := testCase + + t.Run(testCase.algorithm, func(t *testing.T) { + info := exampleInfo() + info.Deb.Compression = testCase.algorithm + + var deb bytes.Buffer + + err := Default.Package(info, &deb) + require.NoError(t, err) + + dataTarballName := findDataTarball(t, deb.Bytes()) + assert.Equal(t, dataTarballName, testCase.dataTarballName) + + dataTarGz := extractFileFromAr(t, deb.Bytes(), dataTarballName) + dataTar := inflate(t, dataTarballName, dataTarGz) + + for _, file := range info.Contents { + tarContains(t, dataTar, file.Destination) + } + }) + } +} + +func extractFileFromTar(tb testing.TB, tarFile []byte, filename string) []byte { + tb.Helper() + tr := tar.NewReader(bytes.NewReader(tarFile)) for { hdr, err := tr.Next() if errors.Is(err, io.EOF) { break // End of archive } - if err != nil { - return nil, err - } + require.NoError(tb, err) - if path.Join("/", hdr.Name) != path.Join("/", filename) { // nolint:gosec + if path.Join("/", hdr.Name) != path.Join("/", filename) { continue } fileContents, err := ioutil.ReadAll(tr) - if err != nil { - return nil, err - } + require.NoError(tb, err) - return fileContents, nil + return fileContents } - return nil, os.ErrNotExist + tb.Fatalf("file %q does not exist in tar", filename) + + return nil } -func extractFileHeaderFromTarGz(tarGzFile []byte, filename string) (*tar.Header, error) { - tarFile, err := gzipInflate(tarGzFile) - if err != nil { - return nil, err - } +func tarContains(tb testing.TB, tarFile []byte, filename string) bool { + tb.Helper() tr := tar.NewReader(bytes.NewReader(tarFile)) for { @@ -913,94 +922,152 @@ func extractFileHeaderFromTarGz(tarGzFile []byte, filename string) (*tar.Header, if errors.Is(err, io.EOF) { break // End of archive } - if err != nil { - return nil, err + require.NoError(tb, err) + + if path.Join("/", hdr.Name) == path.Join("/", filename) { // nolint:gosec + return true + } + } + + return false +} + +func extractFileHeaderFromTar(tb testing.TB, tarFile []byte, filename string) *tar.Header { + tb.Helper() + + tr := tar.NewReader(bytes.NewReader(tarFile)) + for { + hdr, err := tr.Next() + if errors.Is(err, io.EOF) { + break // End of archive } + require.NoError(tb, err) if path.Join("/", hdr.Name) != path.Join("/", filename) { // nolint:gosec continue } - return hdr, nil + return hdr } - return nil, os.ErrNotExist + tb.Fatalf("file %q does not exist in tar", filename) + + return nil } -func gzipInflate(data []byte) ([]byte, error) { - gzr, err := gzip.NewReader(bytes.NewReader(data)) - if err != nil { - return nil, err - } +func inflate(tb testing.TB, nameOrType string, data []byte) []byte { + tb.Helper() - inflatedData, err := ioutil.ReadAll(gzr) - if err != nil { - return nil, err + ext := filepath.Ext(nameOrType) + if ext == "" { + ext = nameOrType + } else { + ext = strings.TrimPrefix(ext, ".") } - if err = gzr.Close(); err != nil { - return nil, err + dataReader := bytes.NewReader(data) + + var ( + inflateReadCloser io.ReadCloser + err error + ) + + switch ext { + case "gz", "gzip": + inflateReadCloser, err = gzip.NewReader(dataReader) + require.NoError(tb, err) + case "xz": + r, err := xz.NewReader(dataReader, 0) + require.NoError(tb, err) + inflateReadCloser = io.NopCloser(r) + case "tar", "": // no compression + inflateReadCloser = io.NopCloser(dataReader) + default: + tb.Fatalf("invalid inflation type: %s", ext) } - return inflatedData, nil + inflatedData, err := ioutil.ReadAll(inflateReadCloser) + require.NoError(tb, err) + + err = inflateReadCloser.Close() + require.NoError(tb, err) + + return inflatedData } -func readAndFormatAsDebChangelog(changelogFileName, packageName string) (string, error) { +func readAndFormatAsDebChangelog(tb testing.TB, changelogFileName, packageName string) string { + tb.Helper() + changelogEntries, err := chglog.Parse(changelogFileName) - if err != nil { - return "", err - } + require.NoError(tb, err) tpl, err := chglog.DebTemplate() - if err != nil { - return "", err - } + require.NoError(tb, err) debChangelog, err := chglog.FormatChangelog(&chglog.PackageChangeLog{ Name: packageName, Entries: changelogEntries, }, tpl) - if err != nil { - return "", err - } + require.NoError(tb, err) - return strings.TrimSpace(debChangelog) + "\n", nil + return strings.TrimSpace(debChangelog) + "\n" } func symlinkTo(tb testing.TB, fileName string) string { tb.Helper() target, err := filepath.Abs(fileName) - assert.NoError(tb, err) + require.NoError(tb, err) symlinkName := filepath.Join(tb.TempDir(), "symlink") err = os.Symlink(target, symlinkName) - assert.NoError(tb, err) + require.NoError(tb, err) return files.ToNixPath(symlinkName) } -func extractFileFromAr(arFile []byte, filename string) ([]byte, error) { +func findDataTarball(tb testing.TB, arFile []byte) string { + tb.Helper() + tr := ar.NewReader(bytes.NewReader(arFile)) for { hdr, err := tr.Next() if errors.Is(err, io.EOF) { break // End of archive } - if err != nil { - return nil, err + require.NoError(tb, err) + + if strings.HasPrefix(path.Join("/", hdr.Name), "/data.tar") { + return hdr.Name } + } + + tb.Fatalf("data taball does not exist in ar") + + return "" +} + +func extractFileFromAr(tb testing.TB, arFile []byte, filename string) []byte { + tb.Helper() + + tr := ar.NewReader(bytes.NewReader(arFile)) + for { + hdr, err := tr.Next() + if errors.Is(err, io.EOF) { + break // End of archive + } + require.NoError(tb, err) if path.Join("/", hdr.Name) != path.Join("/", filename) { continue } fileContents, err := ioutil.ReadAll(tr) - if err != nil { - return nil, err - } + require.NoError(tb, err) - return fileContents, nil + return fileContents } - return nil, os.ErrNotExist + tb.Fatalf("file %q does not exist in ar", filename) + + return nil } From 65a0bc10c4c79e18719e146819c8999b9d018c20 Mon Sep 17 00:00:00 2001 From: Erik Geiser Date: Thu, 2 Sep 2021 20:46:30 +0200 Subject: [PATCH 3/5] Add deb compression acceptance tests. --- acceptance_test.go | 30 ++++++++++++++++++- testdata/acceptance/deb.dockerfile | 7 +++++ testdata/acceptance/deb.gzip.compression.yaml | 19 ++++++++++++ testdata/acceptance/deb.none.compression.yaml | 19 ++++++++++++ testdata/acceptance/deb.xz.compression.yaml | 19 ++++++++++++ 5 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 testdata/acceptance/deb.gzip.compression.yaml create mode 100644 testdata/acceptance/deb.none.compression.yaml create mode 100644 testdata/acceptance/deb.xz.compression.yaml diff --git a/acceptance_test.go b/acceptance_test.go index 8992b63b..c7d8fb92 100644 --- a/acceptance_test.go +++ b/acceptance_test.go @@ -127,7 +127,7 @@ func TestUpgrade(t *testing.T) { } } -func TestCompression(t *testing.T) { +func TestRPMCompression(t *testing.T) { t.Parallel() format := "rpm" compressFormats := []string{"gzip", "xz", "lzma"} @@ -156,6 +156,34 @@ func TestCompression(t *testing.T) { } } +func TestDebCompression(t *testing.T) { + t.Parallel() + format := "deb" + compressFormats := []string{"gzip", "xz", "none"} + for _, arch := range formatArchs[format] { + for _, compFormat := range compressFormats { + func(tt *testing.T, testCompFormat, testArch string) { + tt.Run(fmt.Sprintf("%s/%s/%s", format, testArch, testCompFormat), func(ttt *testing.T) { + ttt.Parallel() + if testArch == "ppc64le" && os.Getenv("NO_TEST_PPC64LE") == "true" { + ttt.Skip("ppc64le arch not supported in pipeline") + } + accept(ttt, acceptParms{ + Name: fmt.Sprintf("%s_compression_%s", testCompFormat, testArch), + Conf: fmt.Sprintf("deb.%s.compression.yaml", testCompFormat), + Format: format, + Docker: dockerParams{ + File: fmt.Sprintf("%s.dockerfile", format), + Target: "compression", + Arch: testArch, + }, + }) + }) + }(t, compFormat, arch) + } + } +} + func TestRPMSpecific(t *testing.T) { t.Parallel() format := "rpm" diff --git a/testdata/acceptance/deb.dockerfile b/testdata/acceptance/deb.dockerfile index 7e6d4e42..a761a54e 100644 --- a/testdata/acceptance/deb.dockerfile +++ b/testdata/acceptance/deb.dockerfile @@ -146,6 +146,13 @@ RUN dpkg -i /tmp/foo.deb 2>&1 | grep "foo breaks dummy" RUN dpkg -r dummy RUN dpkg -i /tmp/foo.deb +# ---- compression test ---- +FROM min AS compression +RUN test -e /usr/local/bin/fake +RUN test -f /etc/foo/whatever.conf +RUN echo wat >> /etc/foo/whatever.conf +RUN dpkg -r foo +RUN test ! -f /usr/local/bin/fake # ---- upgrade test ---- FROM test_base AS upgrade diff --git a/testdata/acceptance/deb.gzip.compression.yaml b/testdata/acceptance/deb.gzip.compression.yaml new file mode 100644 index 00000000..28af9789 --- /dev/null +++ b/testdata/acceptance/deb.gzip.compression.yaml @@ -0,0 +1,19 @@ +name: "foo" +arch: "${BUILD_ARCH}" +platform: "linux" +version: "v1.2.3" +maintainer: "Foo Bar" +description: | + Foo bar + Multiple lines +vendor: "foobar" +homepage: "https://foobar.org" +license: "MIT" +contents: + - src: ./testdata/fake + dst: /usr/local/bin/fake + - src: ./testdata/whatever.conf + dst: /etc/foo/whatever.conf + type: config +deb: + compression: "gzip" diff --git a/testdata/acceptance/deb.none.compression.yaml b/testdata/acceptance/deb.none.compression.yaml new file mode 100644 index 00000000..631fbf98 --- /dev/null +++ b/testdata/acceptance/deb.none.compression.yaml @@ -0,0 +1,19 @@ +name: "foo" +arch: "${BUILD_ARCH}" +platform: "linux" +version: "v1.2.3" +maintainer: "Foo Bar" +description: | + Foo bar + Multiple lines +vendor: "foobar" +homepage: "https://foobar.org" +license: "MIT" +contents: + - src: ./testdata/fake + dst: /usr/local/bin/fake + - src: ./testdata/whatever.conf + dst: /etc/foo/whatever.conf + type: config +deb: + compression: "none" diff --git a/testdata/acceptance/deb.xz.compression.yaml b/testdata/acceptance/deb.xz.compression.yaml new file mode 100644 index 00000000..268e5bb0 --- /dev/null +++ b/testdata/acceptance/deb.xz.compression.yaml @@ -0,0 +1,19 @@ +name: "foo" +arch: "${BUILD_ARCH}" +platform: "linux" +version: "v1.2.3" +maintainer: "Foo Bar" +description: | + Foo bar + Multiple lines +vendor: "foobar" +homepage: "https://foobar.org" +license: "MIT" +contents: + - src: ./testdata/fake + dst: /usr/local/bin/fake + - src: ./testdata/whatever.conf + dst: /etc/foo/whatever.conf + type: config +deb: + compression: "xz" From 8278268767ad3622efd7d673e0078bbee4274879 Mon Sep 17 00:00:00 2001 From: Erik Geiser Date: Thu, 2 Sep 2021 23:05:29 +0200 Subject: [PATCH 4/5] Fix compression value in docs. --- www/docs/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/www/docs/configuration.md b/www/docs/configuration.md index a903e5c2..890928e1 100644 --- a/www/docs/configuration.md +++ b/www/docs/configuration.md @@ -255,7 +255,7 @@ deb: - some-package # Compression algorithm (gzip (default), xz or none). - compression: lzma + compression: xz # The package is signed if a key_file is set signature: From 2c18ac9d0f664ac8c3fcc01154be7f0a383aed77 Mon Sep 17 00:00:00 2001 From: Erik Geiser Date: Fri, 3 Sep 2021 16:26:14 +0200 Subject: [PATCH 5/5] Consistent tarball naming. --- deb/deb.go | 31 ++++++++++++++++--------------- deb/deb_test.go | 42 +++++++++++++++++++++--------------------- 2 files changed, 37 insertions(+), 36 deletions(-) diff --git a/deb/deb.go b/deb/deb.go index 68b5886d..90f714f2 100644 --- a/deb/deb.go +++ b/deb/deb.go @@ -91,7 +91,7 @@ func (*Deb) Package(info *nfpm.Info, deb io.Writer) (err error) { // nolint: fun return err } - dataTarGz, md5sums, instSize, dataTarballName, err := createDataTarball(info) + dataTarball, md5sums, instSize, dataTarballName, err := createDataTarball(info) if err != nil { return err } @@ -116,14 +116,14 @@ func (*Deb) Package(info *nfpm.Info, deb io.Writer) (err error) { // nolint: fun return fmt.Errorf("cannot add control.tar.gz to deb: %w", err) } - if err := addArFile(w, dataTarballName, dataTarGz); err != nil { + if err := addArFile(w, dataTarballName, dataTarball); err != nil { return fmt.Errorf("cannot add data.tar.gz to deb: %w", err) } // TODO: refactor this if info.Deb.Signature.KeyFile != "" { data := io.MultiReader(bytes.NewReader(debianBinary), bytes.NewReader(controlTarGz), - bytes.NewReader(dataTarGz)) + bytes.NewReader(dataTarball)) sig, err := sign.PGPArmoredDetachSignWithKeyID(data, info.Deb.Signature.KeyFile, info.Deb.Signature.KeyPassphrase, info.Deb.Signature.KeyID) if err != nil { @@ -171,7 +171,8 @@ type nopCloser struct { func (nopCloser) Close() error { return nil } -func createDataTarball(info *nfpm.Info) (dataTarGz, md5sums []byte, instSize int64, name string, err error) { +func createDataTarball(info *nfpm.Info) (dataTarBall, md5sums []byte, + instSize int64, name string, err error) { var ( dataTarball bytes.Buffer dataTarballWriteCloser io.WriteCloser @@ -233,8 +234,8 @@ func fillDataTar(info *nfpm.Info, w io.Writer) (md5sums []byte, instSize int64, return md5buf.Bytes(), instSize, nil } -func createSymlinkInsideTarGz(file *files.Content, out *tar.Writer) error { - return newItemInsideTarGz(out, []byte{}, &tar.Header{ +func createSymlinkInsideTar(file *files.Content, out *tar.Writer) error { + return newItemInsideTar(out, []byte{}, &tar.Header{ Name: normalizePath(file.Destination), Linkname: file.Source, Typeflag: tar.TypeSymlink, @@ -259,7 +260,7 @@ func createFilesInsideDataTar(info *nfpm.Info, tw *tar.Writer, // skip ghost files in apk continue case "symlink": - err = createSymlinkInsideTarGz(file, tw) + err = createSymlinkInsideTar(file, tw) case "doc": // nolint:gocritic // ignoring `emptyFallthrough: remove empty case containing only fallthrough to default case` @@ -375,7 +376,7 @@ func createChangelogInsideDataTar(tarw *tar.Writer, md5w io.Writer, return 0, err } - if err = newFileInsideTarGz(tarw, changelogName, changelogData); err != nil { + if err = newFileInsideTar(tarw, changelogName, changelogData); err != nil { return 0, err } @@ -440,7 +441,7 @@ func createControl(instSize int64, md5sums []byte, info *nfpm.Info) (controlTarG } for name, content := range filesToCreate { - if err := newFileInsideTarGz(out, name, content); err != nil { + if err := newFileInsideTar(out, name, content); err != nil { return nil, err } } @@ -482,7 +483,7 @@ func createControl(instSize int64, md5sums []byte, info *nfpm.Info) (controlTarG for path, destMode := range specialFiles { if path != "" { - if err := newFilePathInsideTarGz(out, path, destMode.fileName, destMode.mode); err != nil { + if err := newFilePathInsideTar(out, path, destMode.fileName, destMode.mode); err != nil { return nil, err } } @@ -497,7 +498,7 @@ func createControl(instSize int64, md5sums []byte, info *nfpm.Info) (controlTarG return buf.Bytes(), nil } -func newItemInsideTarGz(out *tar.Writer, content []byte, header *tar.Header) error { +func newItemInsideTar(out *tar.Writer, content []byte, header *tar.Header) error { if err := out.WriteHeader(header); err != nil { return fmt.Errorf("cannot write header of %s file to control.tar.gz: %w", header.Name, err) } @@ -507,8 +508,8 @@ func newItemInsideTarGz(out *tar.Writer, content []byte, header *tar.Header) err return nil } -func newFileInsideTarGz(out *tar.Writer, name string, content []byte) error { - return newItemInsideTarGz(out, content, &tar.Header{ +func newFileInsideTar(out *tar.Writer, name string, content []byte) error { + return newItemInsideTar(out, content, &tar.Header{ Name: normalizePath(name), Size: int64(len(content)), Mode: 0o644, @@ -518,7 +519,7 @@ func newFileInsideTarGz(out *tar.Writer, name string, content []byte) error { }) } -func newFilePathInsideTarGz(out *tar.Writer, path, dest string, mode int64) error { +func newFilePathInsideTar(out *tar.Writer, path, dest string, mode int64) error { file, err := os.Open(path) //nolint:gosec if err != nil { return err @@ -527,7 +528,7 @@ func newFilePathInsideTarGz(out *tar.Writer, path, dest string, mode int64) erro if err != nil { return err } - return newItemInsideTarGz(out, content, &tar.Header{ + return newItemInsideTar(out, content, &tar.Header{ Name: normalizePath(dest), Size: int64(len(content)), Mode: mode, diff --git a/deb/deb_test.go b/deb/deb_test.go index 9bfa67e1..c064abcf 100644 --- a/deb/deb_test.go +++ b/deb/deb_test.go @@ -209,8 +209,8 @@ func TestSpecialFiles(t *testing.T) { var w bytes.Buffer out := tar.NewWriter(&w) filePath := "testdata/templates.golden" - assert.Error(t, newFilePathInsideTarGz(out, "doesnotexit", "templates", 0o644)) - require.NoError(t, newFilePathInsideTarGz(out, filePath, "templates", 0o644)) + assert.Error(t, newFilePathInsideTar(out, "doesnotexit", "templates", 0o644)) + require.NoError(t, newFilePathInsideTar(out, filePath, "templates", 0o644)) in := tar.NewReader(&w) header, err := in.Next() require.NoError(t, err) @@ -547,12 +547,12 @@ func TestDebChangelogData(t *testing.T) { err := info.Validate() require.NoError(t, err) - dataTarGz, _, _, dataTarballName, err := createDataTarball(info) + dataTarball, _, _, dataTarballName, err := createDataTarball(info) require.NoError(t, err) changelogName := fmt.Sprintf("/usr/share/doc/%s/changelog.gz", info.Name) dataChangelogGz := extractFileFromTar(t, - inflate(t, dataTarballName, dataTarGz), changelogName) + inflate(t, dataTarballName, dataTarball), changelogName) dataChangelog := inflate(t, "gz", dataChangelogGz) goldenChangelog := readAndFormatAsDebChangelog(t, info.Changelog, info.Name) @@ -570,12 +570,12 @@ func TestDebNoChangelogDataWithoutChangelogConfigured(t *testing.T) { err := info.Validate() require.NoError(t, err) - dataTarGz, _, _, dataTarballName, err := createDataTarball(info) + dataTarball, _, _, dataTarballName, err := createDataTarball(info) require.NoError(t, err) changelogName := fmt.Sprintf("/usr/share/doc/%s/changelog.gz", info.Name) - assert.False(t, tarContains(t, inflate(t, dataTarballName, dataTarGz), changelogName)) + assert.False(t, tarContains(t, inflate(t, dataTarballName, dataTarball), changelogName)) } func TestDebTriggers(t *testing.T) { @@ -671,11 +671,11 @@ func TestSymlinkInFiles(t *testing.T) { realSymlinkTarget, err := ioutil.ReadFile(symlinkTarget) require.NoError(t, err) - dataTarGz, _, _, dataTarballName, err := createDataTarball(info) + dataTarball, _, _, dataTarballName, err := createDataTarball(info) require.NoError(t, err) packagedSymlinkTarget := extractFileFromTar(t, - inflate(t, dataTarballName, dataTarGz), packagedTarget) + inflate(t, dataTarballName, dataTarball), packagedTarget) assert.Equal(t, string(realSymlinkTarget), string(packagedSymlinkTarget)) } @@ -709,18 +709,18 @@ func TestSymlink(t *testing.T) { err := info.Validate() require.NoError(t, err) - dataTarGz, _, _, dataTarballName, err := createDataTarball(info) + dataTarball, _, _, dataTarballName, err := createDataTarball(info) require.NoError(t, err) packagedSymlinkHeader := extractFileHeaderFromTar(t, - inflate(t, dataTarballName, dataTarGz), symlink) + inflate(t, dataTarballName, dataTarball), symlink) assert.Equal(t, symlink, path.Join("/", packagedSymlinkHeader.Name)) // nolint:gosec assert.Equal(t, uint8(tar.TypeSymlink), packagedSymlinkHeader.Typeflag) assert.Equal(t, symlinkTarget, packagedSymlinkHeader.Linkname) } -func TestEnsureRelativePrefixInTarGzFiles(t *testing.T) { +func TestEnsureRelativePrefixInTarballs(t *testing.T) { info := exampleInfo() info.Contents = []*files.Content{ { @@ -733,9 +733,9 @@ func TestEnsureRelativePrefixInTarGzFiles(t *testing.T) { err := info.Validate() require.NoError(t, err) - dataTarGz, md5sums, instSize, tarballName, err := createDataTarball(info) + dataTarball, md5sums, instSize, tarballName, err := createDataTarball(info) require.NoError(t, err) - testRelativePathPrefixInTar(t, inflate(t, tarballName, dataTarGz)) + testRelativePathPrefixInTar(t, inflate(t, tarballName, dataTarball)) controlTarGz, err := createControl(instSize, md5sums, info) require.NoError(t, err) @@ -755,7 +755,7 @@ func TestMD5Sums(t *testing.T) { } } - dataTarGz, md5sums, instSize, tarballName, err := createDataTarball(info) + dataTarball, md5sums, instSize, tarballName, err := createDataTarball(info) require.NoError(t, err) controlTarGz, err := createControl(instSize, md5sums, info) @@ -766,7 +766,7 @@ func TestMD5Sums(t *testing.T) { lines := strings.Split(strings.TrimRight(string(md5sumsFile), "\n"), "\n") require.Equal(t, nFiles, len(lines)) - dataTar := inflate(t, tarballName, dataTarGz) + dataTar := inflate(t, tarballName, dataTarball) for _, line := range lines { parts := strings.Fields(line) @@ -807,11 +807,11 @@ func TestDebsigsSignature(t *testing.T) { debBinary := extractFileFromAr(t, deb.Bytes(), "debian-binary") controlTarGz := extractFileFromAr(t, deb.Bytes(), "control.tar.gz") - dataTarGz := extractFileFromAr(t, deb.Bytes(), findDataTarball(t, deb.Bytes())) + dataTarball := extractFileFromAr(t, deb.Bytes(), findDataTarball(t, deb.Bytes())) signature := extractFileFromAr(t, deb.Bytes(), "_gpgorigin") message := io.MultiReader(bytes.NewReader(debBinary), - bytes.NewReader(controlTarGz), bytes.NewReader(dataTarGz)) + bytes.NewReader(controlTarGz), bytes.NewReader(dataTarball)) err = sign.PGPVerify(message, signature, "../internal/sign/testdata/pubkey.asc") require.NoError(t, err) @@ -840,13 +840,13 @@ func TestDisableGlobbing(t *testing.T) { } require.NoError(t, info.Validate()) - dataTarGz, _, _, tarballName, err := createDataTarball(info) + dataTarball, _, _, tarballName, err := createDataTarball(info) require.NoError(t, err) expectedContent, err := ioutil.ReadFile("../testdata/{file}[") require.NoError(t, err) - actualContent := extractFileFromTar(t, inflate(t, tarballName, dataTarGz), "/test/{file}[") + actualContent := extractFileFromTar(t, inflate(t, tarballName, dataTarball), "/test/{file}[") assert.Equal(t, expectedContent, actualContent) } @@ -877,8 +877,8 @@ func TestCompressionAlgorithms(t *testing.T) { dataTarballName := findDataTarball(t, deb.Bytes()) assert.Equal(t, dataTarballName, testCase.dataTarballName) - dataTarGz := extractFileFromAr(t, deb.Bytes(), dataTarballName) - dataTar := inflate(t, dataTarballName, dataTarGz) + dataTarball := extractFileFromAr(t, deb.Bytes(), dataTarballName) + dataTar := inflate(t, dataTarballName, dataTarball) for _, file := range info.Contents { tarContains(t, dataTar, file.Destination)