(Feat): Initial Commit

This commit is contained in:
2025-11-16 19:48:50 +00:00
commit a00f70a7fe
17 changed files with 1654 additions and 0 deletions

View File

@@ -0,0 +1,35 @@
package archiver
import (
"zipprine/internal/models"
)
func Compress(config *models.CompressConfig) error {
switch config.ArchiveType {
case models.ZIP:
return createZip(config)
case models.TARGZ:
return createTarGz(config)
case models.TAR:
return createTar(config)
case models.GZIP:
return createGzip(config)
default:
return nil
}
}
func Extract(config *models.ExtractConfig) error {
switch config.ArchiveType {
case models.ZIP:
return extractZip(config)
case models.TARGZ:
return extractTarGz(config)
case models.TAR:
return extractTar(config)
case models.GZIP:
return extractGzip(config)
default:
return nil
}
}

107
internal/archiver/detect.go Normal file
View File

@@ -0,0 +1,107 @@
package archiver
import (
"bytes"
"compress/gzip"
"io"
"os"
"path/filepath"
"strings"
"zipprine/internal/models"
)
func DetectArchiveType(path string) (models.ArchiveType, error) {
// First, try by extension
ext := strings.ToLower(filepath.Ext(path))
switch ext {
case ".zip":
return models.ZIP, nil
case ".gz":
if strings.HasSuffix(strings.ToLower(path), ".tar.gz") {
return models.TARGZ, nil
}
return models.GZIP, nil
case ".tar":
return models.TAR, nil
case ".tgz":
return models.TARGZ, nil
}
// Try by magic bytes
file, err := os.Open(path)
if err != nil {
return "", err
}
defer file.Close()
header := make([]byte, 512)
n, err := file.Read(header)
if err != nil && err != io.EOF {
return "", err
}
header = header[:n]
// ZIP magic: PK (0x504B)
if len(header) >= 2 && header[0] == 0x50 && header[1] == 0x4B {
return models.ZIP, nil
}
// GZIP magic: 0x1F 0x8B
if len(header) >= 2 && header[0] == 0x1F && header[1] == 0x8B {
// Check if it's a tar.gz by trying to decompress and check for tar header
file.Seek(0, 0)
gzReader, err := gzip.NewReader(file)
if err == nil {
defer gzReader.Close()
tarHeader := make([]byte, 512)
if n, _ := gzReader.Read(tarHeader); n >= 257 {
// TAR magic: "ustar" at offset 257
if bytes.Equal(tarHeader[257:262], []byte("ustar")) {
return models.TARGZ, nil
}
}
}
return models.GZIP, nil
}
// TAR magic: "ustar" at offset 257
if len(header) >= 262 && bytes.Equal(header[257:262], []byte("ustar")) {
return models.TAR, nil
}
return models.AUTO, nil
}
func Analyze(path string) (*models.ArchiveInfo, error) {
archiveType, err := DetectArchiveType(path)
if err != nil {
return nil, err
}
switch archiveType {
case models.ZIP:
return analyzeZip(path)
case models.TARGZ:
return analyzeTar(path, true)
case models.TAR:
return analyzeTar(path, false)
case models.GZIP:
// For GZIP, provide basic file info
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
fileStat, _ := file.Stat()
return &models.ArchiveInfo{
Type: models.GZIP,
CompressedSize: fileStat.Size(),
FileCount: 1,
Files: []models.FileInfo{},
}, nil
default:
return nil, nil
}
}

281
internal/archiver/tar.go Normal file
View File

@@ -0,0 +1,281 @@
package archiver
import (
"archive/tar"
"compress/gzip"
"crypto/sha256"
"fmt"
"io"
"os"
"path/filepath"
"zipprine/internal/models"
"zipprine/pkg/fileutil"
)
func createTar(config *models.CompressConfig) error {
outFile, err := os.Create(config.OutputPath)
if err != nil {
return err
}
defer outFile.Close()
tarWriter := tar.NewWriter(outFile)
defer tarWriter.Close()
return addToTar(tarWriter, config)
}
func createTarGz(config *models.CompressConfig) error {
outFile, err := os.Create(config.OutputPath)
if err != nil {
return err
}
defer outFile.Close()
gzWriter, err := gzip.NewWriterLevel(outFile, config.CompressionLevel)
if err != nil {
return err
}
defer gzWriter.Close()
tarWriter := tar.NewWriter(gzWriter)
defer tarWriter.Close()
return addToTar(tarWriter, config)
}
func createGzip(config *models.CompressConfig) error {
inFile, err := os.Open(config.SourcePath)
if err != nil {
return err
}
defer inFile.Close()
outFile, err := os.Create(config.OutputPath)
if err != nil {
return err
}
defer outFile.Close()
gzWriter, err := gzip.NewWriterLevel(outFile, config.CompressionLevel)
if err != nil {
return err
}
defer gzWriter.Close()
_, err = io.Copy(gzWriter, inFile)
return err
}
func addToTar(tarWriter *tar.Writer, config *models.CompressConfig) error {
return filepath.Walk(config.SourcePath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !fileutil.ShouldInclude(path, config.ExcludePaths, config.IncludePaths) {
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
relPath, err := filepath.Rel(config.SourcePath, path)
if err != nil {
return err
}
header, err := tar.FileInfoHeader(info, "")
if err != nil {
return err
}
header.Name = relPath
if err := tarWriter.WriteHeader(header); err != nil {
return err
}
if info.IsDir() {
return nil
}
fmt.Printf(" → %s\n", relPath)
file, err := os.Open(path)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(tarWriter, file)
return err
})
}
func extractTar(config *models.ExtractConfig) error {
file, err := os.Open(config.ArchivePath)
if err != nil {
return err
}
defer file.Close()
tarReader := tar.NewReader(file)
return extractFromTar(tarReader, config)
}
func extractTarGz(config *models.ExtractConfig) error {
file, err := os.Open(config.ArchivePath)
if err != nil {
return err
}
defer file.Close()
gzReader, err := gzip.NewReader(file)
if err != nil {
return err
}
defer gzReader.Close()
tarReader := tar.NewReader(gzReader)
return extractFromTar(tarReader, config)
}
func extractGzip(config *models.ExtractConfig) error {
inFile, err := os.Open(config.ArchivePath)
if err != nil {
return err
}
defer inFile.Close()
gzReader, err := gzip.NewReader(inFile)
if err != nil {
return err
}
defer gzReader.Close()
outPath := filepath.Join(config.DestPath, filepath.Base(config.ArchivePath))
outPath = outPath[:len(outPath)-3] // Remove .gz extension
outFile, err := os.Create(outPath)
if err != nil {
return err
}
defer outFile.Close()
_, err = io.Copy(outFile, gzReader)
return err
}
func extractFromTar(tarReader *tar.Reader, config *models.ExtractConfig) error {
for {
header, err := tarReader.Next()
if err == io.EOF {
break
}
if err != nil {
return err
}
destPath := filepath.Join(config.DestPath, header.Name)
switch header.Typeflag {
case tar.TypeDir:
os.MkdirAll(destPath, os.ModePerm)
case tar.TypeReg:
if !config.OverwriteAll {
if _, err := os.Stat(destPath); err == nil {
fmt.Printf(" ⚠️ Skipping: %s\n", header.Name)
continue
}
}
fmt.Printf(" → Extracting: %s\n", header.Name)
os.MkdirAll(filepath.Dir(destPath), os.ModePerm)
outFile, err := os.Create(destPath)
if err != nil {
return err
}
if _, err := io.Copy(outFile, tarReader); err != nil {
outFile.Close()
return err
}
outFile.Close()
if config.PreservePerms {
os.Chmod(destPath, os.FileMode(header.Mode))
}
}
}
return nil
}
func analyzeTar(path string, isGzipped bool) (*models.ArchiveInfo, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
info := &models.ArchiveInfo{
Type: models.TAR,
Files: []models.FileInfo{},
}
if isGzipped {
info.Type = models.TARGZ
}
fileStat, _ := file.Stat()
info.CompressedSize = fileStat.Size()
hash := sha256.New()
io.Copy(hash, file)
info.Checksum = fmt.Sprintf("%x", hash.Sum(nil))
// Reopen for tar reading
file.Seek(0, 0)
var tarReader *tar.Reader
if isGzipped {
gzReader, err := gzip.NewReader(file)
if err != nil {
return nil, err
}
defer gzReader.Close()
tarReader = tar.NewReader(gzReader)
} else {
tarReader = tar.NewReader(file)
}
for {
header, err := tarReader.Next()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
info.FileCount++
info.TotalSize += header.Size
if len(info.Files) < 100 {
info.Files = append(info.Files, models.FileInfo{
Name: header.Name,
Size: header.Size,
IsDir: header.Typeflag == tar.TypeDir,
ModTime: header.ModTime.Format("2006-01-02 15:04:05"),
})
}
}
if info.TotalSize > 0 {
info.CompressionRatio = (1 - float64(info.CompressedSize)/float64(info.TotalSize)) * 100
}
return info, nil
}

174
internal/archiver/zip.go Normal file
View File

@@ -0,0 +1,174 @@
package archiver
import (
"archive/zip"
"compress/flate"
"crypto/sha256"
"fmt"
"io"
"os"
"path/filepath"
"zipprine/internal/models"
"zipprine/pkg/fileutil"
)
func createZip(config *models.CompressConfig) error {
outFile, err := os.Create(config.OutputPath)
if err != nil {
return err
}
defer outFile.Close()
zipWriter := zip.NewWriter(outFile)
defer zipWriter.Close()
// Set compression level
if config.CompressionLevel > 0 {
zipWriter.RegisterCompressor(zip.Deflate, func(out io.Writer) (io.WriteCloser, error) {
return flate.NewWriter(out, config.CompressionLevel)
})
}
return filepath.Walk(config.SourcePath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !fileutil.ShouldInclude(path, config.ExcludePaths, config.IncludePaths) {
if info.IsDir() {
return filepath.SkipDir
}
return nil
}
relPath, err := filepath.Rel(config.SourcePath, path)
if err != nil {
return err
}
if info.IsDir() {
return nil
}
fmt.Printf(" → %s\n", relPath)
header, err := zip.FileInfoHeader(info)
if err != nil {
return err
}
header.Name = relPath
header.Method = zip.Deflate
writer, err := zipWriter.CreateHeader(header)
if err != nil {
return err
}
file, err := os.Open(path)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(writer, file)
return err
})
}
func extractZip(config *models.ExtractConfig) error {
r, err := zip.OpenReader(config.ArchivePath)
if err != nil {
return err
}
defer r.Close()
for _, f := range r.File {
destPath := filepath.Join(config.DestPath, f.Name)
if f.FileInfo().IsDir() {
os.MkdirAll(destPath, os.ModePerm)
continue
}
if !config.OverwriteAll {
if _, err := os.Stat(destPath); err == nil {
fmt.Printf(" ⚠️ Skipping: %s (already exists)\n", f.Name)
continue
}
}
fmt.Printf(" → Extracting: %s\n", f.Name)
if err := os.MkdirAll(filepath.Dir(destPath), os.ModePerm); err != nil {
return err
}
outFile, err := os.Create(destPath)
if err != nil {
return err
}
rc, err := f.Open()
if err != nil {
outFile.Close()
return err
}
_, err = io.Copy(outFile, rc)
outFile.Close()
rc.Close()
if err != nil {
return err
}
if config.PreservePerms {
os.Chmod(destPath, f.Mode())
}
}
return nil
}
func analyzeZip(path string) (*models.ArchiveInfo, error) {
r, err := zip.OpenReader(path)
if err != nil {
return nil, err
}
defer r.Close()
info := &models.ArchiveInfo{
Type: models.ZIP,
Files: []models.FileInfo{},
}
file, _ := os.Open(path)
defer file.Close()
fileStat, _ := file.Stat()
info.CompressedSize = fileStat.Size()
hash := sha256.New()
io.Copy(hash, file)
info.Checksum = fmt.Sprintf("%x", hash.Sum(nil))
for _, f := range r.File {
info.FileCount++
info.TotalSize += int64(f.UncompressedSize64)
if len(info.Files) < 100 {
info.Files = append(info.Files, models.FileInfo{
Name: f.Name,
Size: int64(f.UncompressedSize64),
IsDir: f.FileInfo().IsDir(),
ModTime: f.Modified.Format("2006-01-02 15:04:05"),
})
}
}
if info.TotalSize > 0 {
info.CompressionRatio = (1 - float64(info.CompressedSize)/float64(info.TotalSize)) * 100
}
return info, nil
}

46
internal/models/types.go Normal file
View File

@@ -0,0 +1,46 @@
package models
type ArchiveType string
const (
ZIP ArchiveType = "ZIP"
TARGZ ArchiveType = "TAR.GZ"
TAR ArchiveType = "TAR"
GZIP ArchiveType = "GZIP"
AUTO ArchiveType = "AUTO"
)
type CompressConfig struct {
SourcePath string
OutputPath string
ArchiveType ArchiveType
ExcludePaths []string
IncludePaths []string
VerifyIntegrity bool
CompressionLevel int
}
type ExtractConfig struct {
ArchivePath string
DestPath string
ArchiveType ArchiveType
OverwriteAll bool
PreservePerms bool
}
type ArchiveInfo struct {
Type ArchiveType
FileCount int
TotalSize int64
CompressedSize int64
CompressionRatio float64
Files []FileInfo
Checksum string
}
type FileInfo struct {
Name string
Size int64
IsDir bool
ModTime string
}

72
internal/ui/analyze.go Normal file
View File

@@ -0,0 +1,72 @@
package ui
import (
"fmt"
"os"
"zipprine/internal/archiver"
"zipprine/internal/models"
"github.com/charmbracelet/huh"
)
func RunAnalyzeFlow() error {
var archivePath string
form := huh.NewForm(
huh.NewGroup(
huh.NewInput().
Title("📦 Archive Path").
Description("Path to the archive to analyze").
Placeholder("/path/to/archive.zip").
Value(&archivePath).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("archive path cannot be empty")
}
if _, err := os.Stat(s); os.IsNotExist(err) {
return fmt.Errorf("archive does not exist")
}
return nil
}),
),
).WithTheme(huh.ThemeCatppuccin())
if err := form.Run(); err != nil {
return err
}
fmt.Println()
fmt.Println(InfoStyle.Render("🔍 Analyzing archive..."))
info, err := archiver.Analyze(archivePath)
if err != nil {
return err
}
displayArchiveInfo(info)
return nil
}
func displayArchiveInfo(info *models.ArchiveInfo) {
fmt.Println()
fmt.Println(HeaderStyle.Render("📊 Archive Information"))
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 🎨 Type: %s", info.Type)))
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 📁 Files: %d", info.FileCount)))
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 💾 Uncompressed: %.2f MB", float64(info.TotalSize)/(1024*1024))))
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 📦 Compressed: %.2f MB", float64(info.CompressedSize)/(1024*1024))))
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 🎯 Ratio: %.1f%%", info.CompressionRatio)))
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 🔒 SHA256: %s...", info.Checksum[:16])))
if len(info.Files) > 0 && len(info.Files) <= 20 {
fmt.Println()
fmt.Println(HeaderStyle.Render("📝 File List"))
for _, f := range info.Files {
icon := "📄"
if f.IsDir {
icon = "📁"
}
fmt.Println(InfoStyle.Render(fmt.Sprintf(" %s %s (%.2f KB)", icon, f.Name, float64(f.Size)/1024)))
}
}
}

143
internal/ui/compress.go Normal file
View File

@@ -0,0 +1,143 @@
package ui
import (
"fmt"
"os"
"strings"
"zipprine/internal/archiver"
"zipprine/internal/models"
"github.com/charmbracelet/huh"
)
func RunCompressFlow() error {
config := &models.CompressConfig{}
var sourcePath, outputPath string
var archiveTypeStr string
var excludeInput, includeInput string
var verify bool
var compressionLevel string
form := huh.NewForm(
huh.NewGroup(
huh.NewInput().
Title("📁 Source Path").
Description("Enter the path to compress (file or directory)").
Placeholder("/path/to/source").
Value(&sourcePath).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("source path cannot be empty")
}
if _, err := os.Stat(s); os.IsNotExist(err) {
return fmt.Errorf("path does not exist")
}
return nil
}),
huh.NewInput().
Title("💾 Output Path").
Description("Where to save the archive").
Placeholder("/path/to/output.zip").
Value(&outputPath).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("output path cannot be empty")
}
return nil
}).Suggestions([]string{".zip", ".tar.gz", ".tar", ".gz"}),
),
huh.NewGroup(
huh.NewSelect[string]().
Title("🎨 Archive Type").
Description("Choose your compression format").
Options(
huh.NewOption("ZIP - Universal & Compatible 📦", "ZIP"),
huh.NewOption("TAR.GZ - Linux Classic (Best Compression) 🐧", "TARGZ"),
huh.NewOption("TAR - No Compression 📄", "TAR"),
huh.NewOption("GZIP - Single File Compression 🔧", "GZIP"),
).
Value(&archiveTypeStr),
huh.NewSelect[string]().
Title("⚡ Compression Level").
Description("Higher = smaller but slower").
Options(
huh.NewOption("Fast (Level 1)", "1"),
huh.NewOption("Balanced (Level 5)", "5"),
huh.NewOption("Best (Level 9)", "9"),
).
Value(&compressionLevel),
),
huh.NewGroup(
huh.NewText().
Title("🚫 Exclude Patterns").
Description("Comma-separated patterns to exclude (e.g., *.log,node_modules,*.tmp)").
Placeholder("*.log,temp/*,.git,__pycache__").
Value(&excludeInput),
huh.NewText().
Title("✅ Include Patterns").
Description("Comma-separated patterns to include (leave empty for all)").
Placeholder("*.go,*.md,src/*").
Value(&includeInput),
),
huh.NewGroup(
huh.NewConfirm().
Title("🔐 Verify Archive Integrity").
Description("Check the archive after creation?").
Value(&verify).
Affirmative("Yes please!").
Negative("Skip it"),
),
).WithTheme(huh.ThemeCatppuccin())
if err := form.Run(); err != nil {
return err
}
config.SourcePath = sourcePath
config.OutputPath = outputPath
config.ArchiveType = models.ArchiveType(archiveTypeStr)
config.VerifyIntegrity = verify
fmt.Sscanf(compressionLevel, "%d", &config.CompressionLevel)
if excludeInput != "" {
config.ExcludePaths = strings.Split(excludeInput, ",")
for i := range config.ExcludePaths {
config.ExcludePaths[i] = strings.TrimSpace(config.ExcludePaths[i])
}
}
if includeInput != "" {
config.IncludePaths = strings.Split(includeInput, ",")
for i := range config.IncludePaths {
config.IncludePaths[i] = strings.TrimSpace(config.IncludePaths[i])
}
}
fmt.Println()
fmt.Println(InfoStyle.Render("🎯 Starting compression..."))
if err := archiver.Compress(config); err != nil {
return err
}
fmt.Println(SuccessStyle.Render("✅ Archive created successfully!"))
if config.VerifyIntegrity {
fmt.Println(InfoStyle.Render("🔍 Verifying archive integrity..."))
info, err := archiver.Analyze(config.OutputPath)
if err != nil {
return err
}
displayArchiveInfo(info)
}
return nil
}

93
internal/ui/extract.go Normal file
View File

@@ -0,0 +1,93 @@
package ui
import (
"fmt"
"os"
"zipprine/internal/archiver"
"zipprine/internal/models"
"github.com/charmbracelet/huh"
)
func RunExtractFlow() error {
config := &models.ExtractConfig{}
var archivePath, destPath string
var overwrite, preservePerms bool
form := huh.NewForm(
huh.NewGroup(
huh.NewInput().
Title("📦 Archive Path").
Description("Path to the archive file").
Placeholder("/path/to/archive.zip").
Value(&archivePath).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("archive path cannot be empty")
}
if _, err := os.Stat(s); os.IsNotExist(err) {
return fmt.Errorf("archive does not exist")
}
return nil
}),
huh.NewInput().
Title("📂 Destination Path").
Description("Where to extract files").
Placeholder("/path/to/destination").
Value(&destPath).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("destination path cannot be empty")
}
return nil
}),
),
huh.NewGroup(
huh.NewConfirm().
Title("⚠️ Overwrite Existing Files").
Description("Replace files if they already exist?").
Value(&overwrite).
Affirmative("Yes, overwrite").
Negative("No, skip"),
huh.NewConfirm().
Title("🔒 Preserve Permissions").
Description("Keep original file permissions?").
Value(&preservePerms).
Affirmative("Yes").
Negative("No"),
),
).WithTheme(huh.ThemeCatppuccin())
if err := form.Run(); err != nil {
return err
}
config.ArchivePath = archivePath
config.DestPath = destPath
config.OverwriteAll = overwrite
config.PreservePerms = preservePerms
fmt.Println()
fmt.Println(InfoStyle.Render("🔍 Detecting archive type..."))
detectedType, err := archiver.DetectArchiveType(archivePath)
if err != nil {
return err
}
config.ArchiveType = detectedType
fmt.Println(SuccessStyle.Render(fmt.Sprintf("✅ Detected: %s", detectedType)))
fmt.Println(InfoStyle.Render("📂 Extracting files..."))
if err := archiver.Extract(config); err != nil {
return err
}
fmt.Println(SuccessStyle.Render("✅ Extraction completed!"))
return nil
}

32
internal/ui/styles.go Normal file
View File

@@ -0,0 +1,32 @@
package ui
import "github.com/charmbracelet/lipgloss"
var (
TitleStyle = lipgloss.NewStyle().
Bold(true).
Foreground(lipgloss.Color("#7D56F4")).
BorderStyle(lipgloss.RoundedBorder()).
BorderForeground(lipgloss.Color("#7D56F4")).
Padding(0, 1)
SuccessStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("#04B575")).
Bold(true)
ErrorStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("#FF0000")).
Bold(true)
InfoStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("#00BFFF"))
WarningStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("#FFA500")).
Bold(true)
HeaderStyle = lipgloss.NewStyle().
Bold(true).
Foreground(lipgloss.Color("#FF79C6")).
Underline(true)
)