2023-03-11 14:13:35 +05:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2023-04-03 19:43:16 +05:00
|
|
|
"archive/tar"
|
|
|
|
"encoding/csv"
|
2023-03-11 14:13:35 +05:00
|
|
|
"fmt"
|
2023-04-03 19:43:16 +05:00
|
|
|
"io"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2023-03-11 14:13:35 +05:00
|
|
|
"sort"
|
2023-04-03 19:43:16 +05:00
|
|
|
"strconv"
|
2023-03-11 14:13:35 +05:00
|
|
|
"time"
|
2023-04-03 19:43:16 +05:00
|
|
|
|
|
|
|
"github.com/klauspost/compress/zstd"
|
2023-03-11 14:13:35 +05:00
|
|
|
)
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
type FileInfo struct {
|
|
|
|
ArchiveFileName string
|
|
|
|
ModificationTime time.Time
|
|
|
|
|
|
|
|
filePath string
|
|
|
|
fileSize int64
|
|
|
|
}
|
|
|
|
|
|
|
|
type Index map[string]FileHistory
|
|
|
|
|
|
|
|
func (index Index) AddFile(fileName string, archiveFileName string, modTime time.Time) {
|
|
|
|
fileInfo := FileInfo{ArchiveFileName: archiveFileName, ModificationTime: modTime}
|
|
|
|
|
|
|
|
if eFileInfo, exists := index[fileName]; exists {
|
|
|
|
index[fileName] = append(eFileInfo, fileInfo)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
index[fileName] = FileHistory{fileInfo}
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
func (index Index) ViewFileVersions(w io.Writer) error {
|
|
|
|
for filePath, fileHistory := range index {
|
|
|
|
_, err := fmt.Fprintf(w, "%s\n", filePath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, v := range fileHistory {
|
|
|
|
_, err := fmt.Fprintf(w, "\t%s %s\n", v.ModificationTime.Format(defaultTimeFormat), v.ArchiveFileName)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-03-11 14:13:35 +05:00
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
return nil
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
func (index Index) Save(fileName string) error {
|
|
|
|
f, err := os.Create(fileName)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
enc, err := zstd.NewWriter(f, zstd.WithEncoderLevel(zstd.SpeedBestCompression))
|
|
|
|
if err != nil {
|
2023-04-08 14:01:44 +05:00
|
|
|
f.Close()
|
|
|
|
os.Remove(fileName)
|
2023-04-03 19:43:16 +05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
files := make([]string, 0, len(index))
|
|
|
|
for fileName := range index {
|
|
|
|
files = append(files, fileName)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Sort file list for better compression
|
|
|
|
sort.Strings(files)
|
|
|
|
|
|
|
|
csvWriter := csv.NewWriter(enc)
|
|
|
|
csvWriter.Comma = ';'
|
|
|
|
|
|
|
|
for _, fileName := range files {
|
|
|
|
for _, historyItem := range index[fileName] {
|
|
|
|
err := csvWriter.Write([]string{fileName, historyItem.ArchiveFileName, strconv.Itoa(int(historyItem.ModificationTime.Unix()))})
|
|
|
|
if err != nil {
|
|
|
|
enc.Close()
|
2023-04-08 14:01:44 +05:00
|
|
|
f.Close()
|
|
|
|
os.Remove(fileName)
|
2023-04-03 19:43:16 +05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
csvWriter.Flush()
|
|
|
|
if err := csvWriter.Error(); err != nil {
|
|
|
|
enc.Close()
|
2023-04-08 14:01:44 +05:00
|
|
|
f.Close()
|
|
|
|
os.Remove(fileName)
|
2023-04-03 19:43:16 +05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = enc.Close()
|
|
|
|
if err != nil {
|
2023-04-08 14:01:44 +05:00
|
|
|
f.Close()
|
|
|
|
os.Remove(fileName)
|
2023-04-03 19:43:16 +05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = f.Close()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
func (b *Config) index(fullIndex bool) (Index, error) {
|
|
|
|
index, err := b.indexFromFile()
|
|
|
|
if err == nil {
|
|
|
|
b.logf(Debug, "Index file contains %d of files.", len(index))
|
|
|
|
return index, nil
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
2023-04-03 19:43:16 +05:00
|
|
|
b.logf(Error, "index file read error: %v", err)
|
|
|
|
|
|
|
|
return b.indexFromDisk(fullIndex)
|
|
|
|
}
|
2023-03-11 14:13:35 +05:00
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
func (b *Config) indexFromFile() (Index, error) {
|
|
|
|
index := make(Index)
|
2023-03-11 14:13:35 +05:00
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
indexFileName := filepath.Join(filepath.Dir(b.filePath), indexFileName)
|
|
|
|
|
|
|
|
f, err := os.Open(indexFileName)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
dec, err := zstd.NewReader(f)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer dec.Close()
|
|
|
|
|
|
|
|
csvReader := csv.NewReader(dec)
|
|
|
|
csvReader.Comma = ';'
|
|
|
|
csvReader.FieldsPerRecord = 3
|
|
|
|
for {
|
|
|
|
data, err := csvReader.Read()
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
unixTime, err := strconv.Atoi(data[2])
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
2023-04-03 19:43:16 +05:00
|
|
|
|
|
|
|
index.AddFile(data[0], data[1], time.Unix(int64(unixTime), 0).Local())
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
return index, nil
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
func (b *Config) indexFromDisk(fullIndex bool) (Index, error) {
|
|
|
|
b.logf(Info, "Rebuilding index from %s...", filepath.Dir(b.filePath))
|
|
|
|
allFileMask := filepath.Join(filepath.Dir(b.filePath), b.FileName+"*"+defaultExt)
|
|
|
|
onlyFullBackupFileMask := filepath.Join(filepath.Dir(b.filePath), b.FileName+"*f"+defaultExt)
|
|
|
|
|
|
|
|
// Get last full backup name
|
|
|
|
lastFullBackupFileName := ""
|
|
|
|
err := filepath.WalkDir(filepath.Dir(b.filePath), func(path string, info os.DirEntry, err error) error {
|
|
|
|
matched, err := filepath.Match(onlyFullBackupFileMask, path)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("filepath.WalkDir: %v", err)
|
|
|
|
}
|
|
|
|
if !matched {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
lastFullBackupFileName = path
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("filepath.WalkDir: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if !fullIndex {
|
|
|
|
b.logf(Debug, "Diff will be calculated from %s.", filepath.Base(lastFullBackupFileName))
|
|
|
|
}
|
2023-03-11 14:13:35 +05:00
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
var files []string
|
|
|
|
err = filepath.WalkDir(filepath.Dir(b.filePath), func(path string, info os.DirEntry, err error) error {
|
|
|
|
matched, err := filepath.Match(allFileMask, path)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("filepath.Match: %v", err)
|
|
|
|
}
|
2023-03-11 14:13:35 +05:00
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
if matched && (fullIndex || path >= lastFullBackupFileName) {
|
|
|
|
files = append(files, path)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("filepath.Walk: %v", err)
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
index := make(Index)
|
|
|
|
|
|
|
|
for i, file := range files {
|
|
|
|
b.logf(Debug, "[%3d%%] Reading file %s...", (100 * i / len(files)), filepath.Base(file))
|
|
|
|
f, err := os.Open(file)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("os.Open: %v", err)
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
decoder, err := zstd.NewReader(f)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("zstd.NewReader: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
tarReader := tar.NewReader(decoder)
|
|
|
|
|
|
|
|
for {
|
|
|
|
tarHeader, err := tarReader.Next()
|
|
|
|
if err != nil {
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
} else {
|
|
|
|
return nil, fmt.Errorf("ошибка при чтении списка файлов из архива %s: %v", file, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
index[tarHeader.Name] = append(index[tarHeader.Name], FileInfo{
|
|
|
|
filePath: tarHeader.Name,
|
|
|
|
ModificationTime: tarHeader.FileInfo().ModTime(),
|
|
|
|
fileSize: tarHeader.FileInfo().Size(),
|
|
|
|
ArchiveFileName: filepath.Base(file)})
|
|
|
|
}
|
|
|
|
decoder.Close()
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
return index, nil
|
2023-03-11 14:13:35 +05:00
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
func (index Index) GetFilesLocation(mask string, t time.Time) ([]FileInfo, error) {
|
|
|
|
var files2 []FileInfo
|
2023-03-11 14:13:35 +05:00
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
for fileName := range index {
|
2023-03-19 12:18:33 +05:00
|
|
|
if isFilePathMatchPatterns([]string{mask}, fileName) {
|
2023-04-03 19:43:16 +05:00
|
|
|
files := index[fileName]
|
2023-03-11 14:13:35 +05:00
|
|
|
|
|
|
|
file := files[0]
|
|
|
|
for i := 1; i < len(files); i++ {
|
2023-04-03 19:43:16 +05:00
|
|
|
if files[i].ModificationTime.Before(t) && files[i].ModificationTime.Sub(t) > file.ModificationTime.Sub(t) { // Больше, т.к. отрицательные числа
|
2023-03-11 14:13:35 +05:00
|
|
|
file = files[i]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-03 19:43:16 +05:00
|
|
|
file.filePath = fileName
|
|
|
|
|
2023-03-11 14:13:35 +05:00
|
|
|
files2 = append(files2, file)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return files2, nil
|
|
|
|
}
|