Add support for ignoring directories/files; fix compression of archives

This commit is contained in:
Dane Everitt
2020-04-18 23:26:23 -07:00
parent 8eaf590f78
commit df6d98bbda
10 changed files with 211 additions and 41 deletions

111
server/backup/archiver.go Normal file
View File

@@ -0,0 +1,111 @@
package backup
import (
"archive/tar"
"context"
gzip "github.com/klauspost/pgzip"
"github.com/remeh/sizedwaitgroup"
"go.uber.org/zap"
"golang.org/x/sync/errgroup"
"io"
"os"
"strings"
"sync"
)
type Archive struct {
sync.Mutex
TrimPrefix string
Files *IncludedFiles
}
// Creates an archive at dest with all of the files definied in the included files struct.
func (a *Archive) Create(dest string, ctx context.Context) error {
f, err := os.OpenFile(dest, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return err
}
defer f.Close()
gzw := gzip.NewWriter(f)
defer gzw.Close()
tw := tar.NewWriter(gzw)
defer tw.Close()
wg := sizedwaitgroup.New(10)
g, ctx := errgroup.WithContext(ctx)
// Iterate over all of the files to be included and put them into the archive. This is
// done as a concurrent goroutine to speed things along. If an error is encountered at
// any step, the entire process is aborted.
for p, s := range a.Files.All() {
if (*s).IsDir() {
continue
}
pa := p
st := s
g.Go(func() error {
wg.Add()
defer wg.Done()
select {
case <-ctx.Done():
return ctx.Err()
default:
return a.addToArchive(pa, st, tw)
}
})
}
// Block until the entire routine is completed.
if err := g.Wait(); err != nil {
f.Close()
// Attempt to remove the archive if there is an error, report that error to
// the logger if it fails.
if rerr := os.Remove(dest); rerr != nil && !os.IsNotExist(rerr) {
zap.S().Warnw("failed to delete corrupted backup archive", zap.String("location", dest))
}
return err
}
return nil
}
// Adds a single file to the existing tar archive writer.
func (a *Archive) addToArchive(p string, s *os.FileInfo, w *tar.Writer) error {
f, err := os.Open(p)
if err != nil {
return err
}
defer f.Close()
st := *s
header := &tar.Header{
// Trim the long server path from the name of the file so that the resulting
// archive is exactly how the user would see it in the panel file manager.
Name: strings.TrimPrefix(p, a.TrimPrefix),
Size: st.Size(),
Mode: int64(st.Mode()),
ModTime: st.ModTime(),
}
// These actions must occur sequentially, even if this function is called multiple
// in parallel. You'll get some nasty panic's otherwise.
a.Lock()
defer a.Unlock()
if err = w.WriteHeader(header); err != nil {
return err
}
if _, err := io.Copy(w, f); err != nil {
return err
}
return nil
}

View File

@@ -10,7 +10,7 @@ type Backup interface {
// Generates a backup in whatever the configured source for the specific
// implementation is.
Backup(dir string) error
Backup(*IncludedFiles, string) error
// Returns a SHA256 checksum for the generated backup.
Checksum() ([]byte, error)

View File

@@ -1,16 +1,15 @@
package backup
import (
"context"
"crypto/sha256"
"encoding/hex"
"github.com/mholt/archiver/v3"
"github.com/pkg/errors"
"github.com/pterodactyl/wings/config"
"go.uber.org/zap"
"io"
"os"
"path"
"strings"
"sync"
)
@@ -79,25 +78,15 @@ func (b *LocalBackup) Remove() error {
// Generates a backup of the selected files and pushes it to the defined location
// for this instance.
func (b *LocalBackup) Backup(dir string) error {
if err := archiver.Archive([]string{dir}, b.Path()); err != nil {
if strings.HasPrefix(err.Error(), "file already exists") {
if rerr := os.Remove(b.Path()); rerr != nil {
return errors.WithStack(rerr)
}
// Re-attempt this backup by calling it with the same information.
return b.Backup(dir)
}
// If there was some error with the archive, just go ahead and ensure the backup
// is completely destroyed at this point. Ignore any errors from this function.
os.Remove(b.Path())
return errors.WithStack(err)
func (b *LocalBackup) Backup(included *IncludedFiles, prefix string) error {
a := &Archive{
TrimPrefix: prefix,
Files: included,
}
return nil
err := a.Create(b.Path(), context.Background())
return err
}
// Return the size of the generated backup.
@@ -162,4 +151,4 @@ func (b *LocalBackup) ensureLocalBackupLocation() error {
}
return nil
}
}

View File

@@ -16,7 +16,7 @@ func (s *S3Backup) Identifier() string {
return s.Uuid
}
func (s *S3Backup) Backup(dir string) error {
func (s *S3Backup) Backup(included *IncludedFiles, prefix string) error {
panic("implement me")
}

31
server/backup/included.go Normal file
View File

@@ -0,0 +1,31 @@
package backup
import (
"os"
"sync"
)
type IncludedFiles struct {
sync.RWMutex
files map[string]*os.FileInfo
}
// Pushes an additional file or folder onto the struct.
func (i *IncludedFiles) Push(info *os.FileInfo, p string) {
i.Lock()
defer i.Unlock()
if i.files == nil {
i.files = make(map[string]*os.FileInfo)
}
i.files[p] = info
}
// Returns all of the files that were marked as being included.
func (i *IncludedFiles) All() map[string]*os.FileInfo {
i.RLock()
defer i.RUnlock()
return i.files
}