(Feat): Initial Commit
This commit is contained in:
38
.gitignore
vendored
Normal file
38
.gitignore
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
# Binaries
|
||||
build/
|
||||
releases/
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
ziprine
|
||||
ziprine-*
|
||||
|
||||
# Test binary
|
||||
*.test
|
||||
|
||||
# Output of go coverage tool
|
||||
*.out
|
||||
|
||||
# Dependency directories
|
||||
vendor/
|
||||
|
||||
# Go workspace file
|
||||
go.work
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
.claude
|
||||
.qodo
|
||||
33
Dockerfile
Normal file
33
Dockerfile
Normal file
@@ -0,0 +1,33 @@
|
||||
# Multi-stage build for Ziprine
|
||||
FROM golang:1.21-alpine AS builder
|
||||
|
||||
# Install build dependencies
|
||||
RUN apk add --no-cache git make
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy go mod files
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
# Download dependencies
|
||||
RUN go mod download
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN make build
|
||||
|
||||
# Final stage
|
||||
FROM alpine:latest
|
||||
|
||||
RUN apk --no-cache add ca-certificates
|
||||
|
||||
WORKDIR /root/
|
||||
|
||||
# Copy binary from builder
|
||||
COPY --from=builder /app/build/ziprine .
|
||||
|
||||
# Run
|
||||
ENTRYPOINT ["./ziprine"]
|
||||
250
Makefile
Normal file
250
Makefile
Normal file
@@ -0,0 +1,250 @@
|
||||
# zipprine Makefile - Cross-platform build system
|
||||
# Build for all major architectures
|
||||
|
||||
.PHONY: all build clean install test help deps build-all release
|
||||
|
||||
# Application name
|
||||
BINARY_NAME=zipprine
|
||||
VERSION?=1.0.0
|
||||
BUILD_DIR=build
|
||||
RELEASE_DIR=releases
|
||||
|
||||
# Go parameters
|
||||
GOCMD=go
|
||||
GOBUILD=$(GOCMD) build
|
||||
GOCLEAN=$(GOCMD) clean
|
||||
GOTEST=$(GOCMD) test
|
||||
GOGET=$(GOCMD) get
|
||||
GOMOD=$(GOCMD) mod
|
||||
GOINSTALL=$(GOCMD) install
|
||||
|
||||
# Build flags
|
||||
LDFLAGS=-ldflags "-s -w -X main.Version=$(VERSION)"
|
||||
BUILD_FLAGS=-trimpath
|
||||
|
||||
# Source
|
||||
MAIN_PATH=./cmd/zipprine
|
||||
|
||||
# Color output
|
||||
RED=\033[0;31m
|
||||
GREEN=\033[0;32m
|
||||
YELLOW=\033[0;33m
|
||||
BLUE=\033[0;34m
|
||||
MAGENTA=\033[0;35m
|
||||
CYAN=\033[0;36m
|
||||
NC=\033[0m # No Color
|
||||
|
||||
##@ General
|
||||
|
||||
help: ## Display this help screen
|
||||
@echo "$(CYAN)╔═══════════════════════════════════════════════════╗$(NC)"
|
||||
@echo "$(CYAN)║ 🗜️ zipprine Build System 🚀 ║$(NC)"
|
||||
@echo "$(CYAN)╚═══════════════════════════════════════════════════╝$(NC)"
|
||||
@echo ""
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "Usage:\n make $(CYAN)<target>$(NC)\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " $(CYAN)%-15s$(NC) %s\n", $1, $2 } /^##@/ { printf "\n$(MAGENTA)%s$(NC)\n", substr($0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Development
|
||||
|
||||
deps: ## Download dependencies
|
||||
@echo "$(BLUE)📦 Downloading dependencies...$(NC)"
|
||||
@$(GOMOD) download
|
||||
@$(GOMOD) tidy
|
||||
@echo "$(GREEN)✅ Dependencies installed$(NC)"
|
||||
|
||||
test: ## Run tests
|
||||
@echo "$(BLUE)🧪 Running tests...$(NC)"
|
||||
@$(GOTEST) -v ./...
|
||||
@echo "$(GREEN)✅ Tests passed$(NC)"
|
||||
|
||||
clean: ## Clean build artifacts
|
||||
@echo "$(YELLOW)🧹 Cleaning build artifacts...$(NC)"
|
||||
@$(GOCLEAN)
|
||||
@rm -rf $(BUILD_DIR)
|
||||
@rm -rf $(RELEASE_DIR)
|
||||
@echo "$(GREEN)✅ Clean complete$(NC)"
|
||||
|
||||
fmt: ## Format code
|
||||
@echo "$(BLUE)📝 Formatting code...$(NC)"
|
||||
@$(GOCMD) fmt ./...
|
||||
@echo "$(GREEN)✅ Code formatted$(NC)"
|
||||
|
||||
vet: ## Run go vet
|
||||
@echo "$(BLUE)🔍 Running go vet...$(NC)"
|
||||
@$(GOCMD) vet ./...
|
||||
@echo "$(GREEN)✅ Vet complete$(NC)"
|
||||
|
||||
lint: fmt vet ## Run linters
|
||||
@echo "$(GREEN)✅ Linting complete$(NC)"
|
||||
|
||||
##@ Building
|
||||
|
||||
build: deps ## Build for current platform
|
||||
@echo "$(BLUE)🔨 Building $(BINARY_NAME) for current platform...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@$(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME) $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Build complete: $(BUILD_DIR)/$(BINARY_NAME)$(NC)"
|
||||
|
||||
install: deps ## Install to $GOPATH/bin
|
||||
@echo "$(BLUE)📥 Installing $(BINARY_NAME)...$(NC)"
|
||||
@$(GOINSTALL) $(LDFLAGS) $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Installed to $(shell go env GOPATH)/bin/$(BINARY_NAME)$(NC)"
|
||||
|
||||
run: build ## Build and run
|
||||
@echo "$(CYAN)▶️ Running $(BINARY_NAME)...$(NC)"
|
||||
@./$(BUILD_DIR)/$(BINARY_NAME)
|
||||
|
||||
##@ Cross-Platform Builds
|
||||
|
||||
build-linux-amd64: ## Build for Linux AMD64
|
||||
@echo "$(BLUE)🐧 Building for Linux AMD64...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=linux GOARCH=amd64 $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-linux-amd64 $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-linux-amd64$(NC)"
|
||||
|
||||
build-linux-arm64: ## Build for Linux ARM64
|
||||
@echo "$(BLUE)🐧 Building for Linux ARM64...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=linux GOARCH=arm64 $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-linux-arm64 $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-linux-arm64$(NC)"
|
||||
|
||||
build-linux-arm: ## Build for Linux ARM
|
||||
@echo "$(BLUE)🐧 Building for Linux ARM...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=linux GOARCH=arm $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-linux-arm $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-linux-arm$(NC)"
|
||||
|
||||
build-darwin-amd64: ## Build for macOS AMD64 (Intel)
|
||||
@echo "$(BLUE)🍎 Building for macOS AMD64...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=darwin GOARCH=amd64 $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-darwin-amd64 $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-darwin-amd64$(NC)"
|
||||
|
||||
build-darwin-arm64: ## Build for macOS ARM64 (Apple Silicon)
|
||||
@echo "$(BLUE)🍎 Building for macOS ARM64...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=darwin GOARCH=arm64 $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-darwin-arm64 $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-darwin-arm64$(NC)"
|
||||
|
||||
build-windows-amd64: ## Build for Windows AMD64
|
||||
@echo "$(BLUE)🪟 Building for Windows AMD64...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=windows GOARCH=amd64 $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-windows-amd64.exe $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-windows-amd64.exe$(NC)"
|
||||
|
||||
build-windows-arm64: ## Build for Windows ARM64
|
||||
@echo "$(BLUE)🪟 Building for Windows ARM64...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=windows GOARCH=arm64 $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-windows-arm64.exe $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-windows-arm64.exe$(NC)"
|
||||
|
||||
build-freebsd-amd64: ## Build for FreeBSD AMD64
|
||||
@echo "$(BLUE)👹 Building for FreeBSD AMD64...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=freebsd GOARCH=amd64 $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-freebsd-amd64 $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-freebsd-amd64$(NC)"
|
||||
|
||||
build-openbsd-amd64: ## Build for OpenBSD AMD64
|
||||
@echo "$(BLUE)🐡 Building for OpenBSD AMD64...$(NC)"
|
||||
@mkdir -p $(BUILD_DIR)
|
||||
@GOOS=openbsd GOARCH=amd64 $(GOBUILD) $(BUILD_FLAGS) $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-openbsd-amd64 $(MAIN_PATH)
|
||||
@echo "$(GREEN)✅ Built: $(BUILD_DIR)/$(BINARY_NAME)-openbsd-amd64$(NC)"
|
||||
|
||||
build-all: deps ## Build for all platforms
|
||||
@echo "$(MAGENTA)╔═══════════════════════════════════════════════════╗$(NC)"
|
||||
@echo "$(MAGENTA)║ 🌍 Building for ALL architectures 🚀 ║$(NC)"
|
||||
@echo "$(MAGENTA)╚═══════════════════════════════════════════════════╝$(NC)"
|
||||
@echo ""
|
||||
@$(MAKE) build-linux-amd64
|
||||
@$(MAKE) build-linux-arm64
|
||||
@$(MAKE) build-linux-arm
|
||||
@$(MAKE) build-darwin-amd64
|
||||
@$(MAKE) build-darwin-arm64
|
||||
@$(MAKE) build-windows-amd64
|
||||
@$(MAKE) build-windows-arm64
|
||||
@$(MAKE) build-freebsd-amd64
|
||||
@$(MAKE) build-openbsd-amd64
|
||||
@echo ""
|
||||
@echo "$(GREEN)╔═══════════════════════════════════════════════════╗$(NC)"
|
||||
@echo "$(GREEN)║ ✨ All builds complete! ✨ ║$(NC)"
|
||||
@echo "$(GREEN)╚═══════════════════════════════════════════════════╝$(NC)"
|
||||
@ls -lh $(BUILD_DIR)/
|
||||
|
||||
##@ Release
|
||||
|
||||
release: clean build-all ## Create release packages
|
||||
@echo "$(MAGENTA)📦 Creating release packages...$(NC)"
|
||||
@mkdir -p $(RELEASE_DIR)
|
||||
|
||||
@echo "$(BLUE) → Packaging Linux AMD64...$(NC)"
|
||||
@tar -czf $(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-linux-amd64.tar.gz -C $(BUILD_DIR) $(BINARY_NAME)-linux-amd64
|
||||
|
||||
@echo "$(BLUE) → Packaging Linux ARM64...$(NC)"
|
||||
@tar -czf $(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-linux-arm64.tar.gz -C $(BUILD_DIR) $(BINARY_NAME)-linux-arm64
|
||||
|
||||
@echo "$(BLUE) → Packaging Linux ARM...$(NC)"
|
||||
@tar -czf $(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-linux-arm.tar.gz -C $(BUILD_DIR) $(BINARY_NAME)-linux-arm
|
||||
|
||||
@echo "$(BLUE) → Packaging macOS AMD64...$(NC)"
|
||||
@tar -czf $(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-darwin-amd64.tar.gz -C $(BUILD_DIR) $(BINARY_NAME)-darwin-amd64
|
||||
|
||||
@echo "$(BLUE) → Packaging macOS ARM64...$(NC)"
|
||||
@tar -czf $(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-darwin-arm64.tar.gz -C $(BUILD_DIR) $(BINARY_NAME)-darwin-arm64
|
||||
|
||||
@echo "$(BLUE) → Packaging Windows AMD64...$(NC)"
|
||||
@cd $(BUILD_DIR) && zip -q ../$(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-windows-amd64.zip $(BINARY_NAME)-windows-amd64.exe
|
||||
|
||||
@echo "$(BLUE) → Packaging Windows ARM64...$(NC)"
|
||||
@cd $(BUILD_DIR) && zip -q ../$(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-windows-arm64.zip $(BINARY_NAME)-windows-arm64.exe
|
||||
|
||||
@echo "$(BLUE) → Packaging FreeBSD AMD64...$(NC)"
|
||||
@tar -czf $(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-freebsd-amd64.tar.gz -C $(BUILD_DIR) $(BINARY_NAME)-freebsd-amd64
|
||||
|
||||
@echo "$(BLUE) → Packaging OpenBSD AMD64...$(NC)"
|
||||
@tar -czf $(RELEASE_DIR)/$(BINARY_NAME)-$(VERSION)-openbsd-amd64.tar.gz -C $(BUILD_DIR) $(BINARY_NAME)-openbsd-amd64
|
||||
|
||||
@echo ""
|
||||
@echo "$(GREEN)✅ Release packages created:$(NC)"
|
||||
@ls -lh $(RELEASE_DIR)/
|
||||
@echo ""
|
||||
@echo "$(CYAN)📊 Package sizes:$(NC)"
|
||||
@du -sh $(RELEASE_DIR)/*
|
||||
|
||||
checksums: ## Generate SHA256 checksums for releases
|
||||
@echo "$(BLUE)🔐 Generating checksums...$(NC)"
|
||||
@cd $(RELEASE_DIR) && shasum -a 256 * > SHA256SUMS
|
||||
@echo "$(GREEN)✅ Checksums generated: $(RELEASE_DIR)/SHA256SUMS$(NC)"
|
||||
@cat $(RELEASE_DIR)/SHA256SUMS
|
||||
|
||||
##@ Docker (Bonus)
|
||||
|
||||
docker-build: ## Build Docker image
|
||||
@echo "$(BLUE)🐳 Building Docker image...$(NC)"
|
||||
@docker build -t $(BINARY_NAME):$(VERSION) -t $(BINARY_NAME):latest .
|
||||
@echo "$(GREEN)✅ Docker image built$(NC)"
|
||||
|
||||
docker-run: ## Run in Docker
|
||||
@echo "$(CYAN)🐳 Running in Docker...$(NC)"
|
||||
@docker run -it --rm $(BINARY_NAME):latest
|
||||
|
||||
##@ Info
|
||||
|
||||
version: ## Show version
|
||||
@echo "$(CYAN)zipprine version: $(VERSION)$(NC)"
|
||||
|
||||
platforms: ## Show supported platforms
|
||||
@echo "$(CYAN)Supported platforms:$(NC)"
|
||||
@echo " 🐧 Linux: AMD64, ARM64, ARM"
|
||||
@echo " 🍎 macOS: AMD64 (Intel), ARM64 (Apple Silicon)"
|
||||
@echo " 🪟 Windows: AMD64, ARM64"
|
||||
@echo " 👹 FreeBSD: AMD64"
|
||||
@echo " 🐡 OpenBSD: AMD64"
|
||||
|
||||
size: ## Show binary sizes
|
||||
@echo "$(CYAN)Binary sizes:$(NC)"
|
||||
@if [ -d "$(BUILD_DIR)" ]; then \
|
||||
du -sh $(BUILD_DIR)/* | sort -h; \
|
||||
else \
|
||||
echo "$(RED)No builds found. Run 'make build-all' first.$(NC)"; \
|
||||
fi
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
115
README.md
Normal file
115
README.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# 🗜️ Zipprine - Advanced Archive Manager
|
||||
|
||||
A beautiful, feature-rich TUI application for managing archives with style!
|
||||
|
||||
## ✨ Features
|
||||
|
||||
### 📦 Compression
|
||||
|
||||
- **Multiple formats**: ZIP, TAR, TAR.GZ, GZIP
|
||||
- **Compression levels**: Fast, Balanced, Best
|
||||
- **Smart filtering**: Include/exclude patterns with wildcards
|
||||
- **Integrity verification**: SHA256 checksums and validation
|
||||
|
||||
### 📂 Extraction
|
||||
|
||||
- **Auto-detection**: Automatically detects archive type by magic bytes
|
||||
- **Safe extraction**: Optional overwrite protection
|
||||
- **Permission preservation**: Keep original file permissions
|
||||
- **Progress tracking**: Real-time extraction feedback
|
||||
|
||||
### 🔍 Analysis
|
||||
|
||||
- **Detailed statistics**: File count, sizes, compression ratios
|
||||
- **File listing**: View contents without extraction
|
||||
- **Checksum verification**: SHA256 integrity checks
|
||||
- **Format detection**: Magic byte analysis
|
||||
|
||||
## 🚀 Installation
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/bereck-work/ziprine.git
|
||||
cd ziprine
|
||||
|
||||
# Install dependencies
|
||||
go mod download
|
||||
|
||||
# Build
|
||||
go build -o ziprine ./cmd/ziprine
|
||||
|
||||
# Run
|
||||
./ziprine
|
||||
```
|
||||
|
||||
## 📖 Usage
|
||||
|
||||
Simply run `ziprine` and follow the interactive prompts!
|
||||
|
||||
### Compress Files
|
||||
|
||||
```bash
|
||||
./ziprine
|
||||
# Select: Compress files/folders
|
||||
# Enter source path: /path/to/folder
|
||||
# Choose format: ZIP, TAR.GZ, TAR, or GZIP
|
||||
# Set compression level and filters
|
||||
```
|
||||
|
||||
### Extract Archives
|
||||
|
||||
```bash
|
||||
./ziprine
|
||||
# Select: Extract archive
|
||||
# Archive type is auto-detected!
|
||||
# Choose destination and options
|
||||
```
|
||||
|
||||
### Analyze Archives
|
||||
|
||||
```bash
|
||||
./ziprine
|
||||
# Select: Analyze archive
|
||||
# View detailed statistics and file listing
|
||||
```
|
||||
|
||||
## 🎨 Pattern Examples
|
||||
|
||||
**Exclude patterns**:
|
||||
|
||||
- `*.log` - Exclude all log files
|
||||
- `node_modules` - Exclude node_modules directory
|
||||
- `temp/*` - Exclude everything in temp folder
|
||||
- `.git,__pycache__,*.tmp` - Multiple patterns
|
||||
|
||||
**Include patterns**:
|
||||
|
||||
- `*.go` - Only Go files
|
||||
- `src/*,docs/*` - Only src and docs folders
|
||||
- `*.md,*.txt` - Only markdown and text files
|
||||
|
||||
## 🏗️ Project Structure
|
||||
|
||||
```
|
||||
ziprine/
|
||||
├── cmd/ziprine/ # Main application entry
|
||||
├── internal/
|
||||
│ ├── archiver/ # Archive operations
|
||||
│ ├── ui/ # TUI components
|
||||
│ └── models/ # Data structures
|
||||
└── pkg/fileutil/ # Utility functions
|
||||
```
|
||||
|
||||
## 🛠️ Technologies
|
||||
|
||||
- **[Charm Bracelet Huh](https://github.com/charmbracelet/huh)** - Beautiful TUI forms
|
||||
- **[Lipgloss](https://github.com/charmbracelet/lipgloss)** - Styling and colors
|
||||
- **Go standard library** - Archive formats
|
||||
|
||||
## 📝 License
|
||||
|
||||
MIT License - Feel free to use and modify!
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
Contributions are welcome! Feel free to open issues or submit PRs.
|
||||
60
cmd/zipprine/main.go
Normal file
60
cmd/zipprine/main.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"zipprine/internal/ui"
|
||||
|
||||
"github.com/charmbracelet/huh"
|
||||
)
|
||||
|
||||
func main() {
|
||||
fmt.Println(ui.TitleStyle.Render("🗜️ Zipprine - Archive Like a Pro! 🚀"))
|
||||
fmt.Println()
|
||||
|
||||
var operation string
|
||||
|
||||
mainMenu := huh.NewForm(
|
||||
huh.NewGroup(
|
||||
huh.NewSelect[string]().
|
||||
Title("🎯 What would you like to do?").
|
||||
Options(
|
||||
huh.NewOption("📦 Compress files/folders", "compress"),
|
||||
huh.NewOption("📂 Extract archive", "extract"),
|
||||
huh.NewOption("🔍 Analyze archive", "analyze"),
|
||||
huh.NewOption("🚪 Exit", "exit"),
|
||||
).
|
||||
Value(&operation),
|
||||
),
|
||||
).WithTheme(huh.ThemeCatppuccin())
|
||||
|
||||
if err := mainMenu.Run(); err != nil {
|
||||
fmt.Println(ui.ErrorStyle.Render("❌ Error: " + err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
switch operation {
|
||||
case "compress":
|
||||
if err := ui.RunCompressFlow(); err != nil {
|
||||
fmt.Println(ui.ErrorStyle.Render("❌ Error: " + err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
case "extract":
|
||||
if err := ui.RunExtractFlow(); err != nil {
|
||||
fmt.Println(ui.ErrorStyle.Render("❌ Error: " + err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
case "analyze":
|
||||
if err := ui.RunAnalyzeFlow(); err != nil {
|
||||
fmt.Println(ui.ErrorStyle.Render("❌ Error: " + err.Error()))
|
||||
os.Exit(1)
|
||||
}
|
||||
case "exit":
|
||||
fmt.Println(ui.InfoStyle.Render("👋 Goodbye!"))
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println(ui.SuccessStyle.Render("✨ Operation completed successfully!"))
|
||||
}
|
||||
36
go.mod
Normal file
36
go.mod
Normal file
@@ -0,0 +1,36 @@
|
||||
module zipprine
|
||||
|
||||
go 1.25.4
|
||||
|
||||
require (
|
||||
github.com/charmbracelet/huh v0.8.0
|
||||
github.com/charmbracelet/lipgloss v1.1.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/atotto/clipboard v0.1.4 // indirect
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||
github.com/catppuccin/go v0.3.0 // indirect
|
||||
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7 // indirect
|
||||
github.com/charmbracelet/bubbletea v1.3.6 // indirect
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
||||
github.com/charmbracelet/x/ansi v0.9.3 // indirect
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
|
||||
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 // indirect
|
||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-localereader v0.0.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
|
||||
github.com/muesli/cancelreader v0.2.2 // indirect
|
||||
github.com/muesli/termenv v0.16.0 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.23.0 // indirect
|
||||
)
|
||||
75
go.sum
Normal file
75
go.sum
Normal file
@@ -0,0 +1,75 @@
|
||||
github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
|
||||
github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
|
||||
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
|
||||
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||
github.com/aymanbagabas/go-udiff v0.3.1 h1:LV+qyBQ2pqe0u42ZsUEtPiCaUoqgA9gYRDs3vj1nolY=
|
||||
github.com/aymanbagabas/go-udiff v0.3.1/go.mod h1:G0fsKmG+P6ylD0r6N/KgQD/nWzgfnl8ZBcNLgcbrw8E=
|
||||
github.com/catppuccin/go v0.3.0 h1:d+0/YicIq+hSTo5oPuRi5kOpqkVA5tAsU6dNhvRu+aY=
|
||||
github.com/catppuccin/go v0.3.0/go.mod h1:8IHJuMGaUUjQM82qBrGNBv7LFq6JI3NnQCF6MOlZjpc=
|
||||
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7 h1:JFgG/xnwFfbezlUnFMJy0nusZvytYysV4SCS2cYbvws=
|
||||
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7/go.mod h1:ISC1gtLcVilLOf23wvTfoQuYbW2q0JevFxPfUzZ9Ybw=
|
||||
github.com/charmbracelet/bubbletea v1.3.6 h1:VkHIxPJQeDt0aFJIsVxw8BQdh/F/L2KKZGsK6et5taU=
|
||||
github.com/charmbracelet/bubbletea v1.3.6/go.mod h1:oQD9VCRQFF8KplacJLo28/jofOI2ToOfGYeFgBBxHOc=
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
|
||||
github.com/charmbracelet/huh v0.8.0 h1:Xz/Pm2h64cXQZn/Jvele4J3r7DDiqFCNIVteYukxDvY=
|
||||
github.com/charmbracelet/huh v0.8.0/go.mod h1:5YVc+SlZ1IhQALxRPpkGwwEKftN/+OlJlnJYlDRFqN4=
|
||||
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
|
||||
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
|
||||
github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0=
|
||||
github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
|
||||
github.com/charmbracelet/x/conpty v0.1.0 h1:4zc8KaIcbiL4mghEON8D72agYtSeIgq8FSThSPQIb+U=
|
||||
github.com/charmbracelet/x/conpty v0.1.0/go.mod h1:rMFsDJoDwVmiYM10aD4bH2XiRgwI7NYJtQgl5yskjEQ=
|
||||
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86 h1:JSt3B+U9iqk37QUU2Rvb6DSBYRLtWqFqfxf8l5hOZUA=
|
||||
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86/go.mod h1:2P0UgXMEa6TsToMSuFqKFQR+fZTO9CNGUNokkPatT/0=
|
||||
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ=
|
||||
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
|
||||
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 h1:qko3AQ4gK1MTS/de7F5hPGx6/k1u0w4TeYmBFwzYVP4=
|
||||
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0/go.mod h1:pBhA0ybfXv6hDjQUZ7hk1lVxBiUbupdw5R31yPUViVQ=
|
||||
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
|
||||
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
|
||||
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
|
||||
github.com/charmbracelet/x/termios v0.1.1/go.mod h1:rB7fnv1TgOPOyyKRJ9o+AsTU/vK5WHJ2ivHeut/Pcwo=
|
||||
github.com/charmbracelet/x/xpty v0.1.2 h1:Pqmu4TEJ8KeA9uSkISKMU3f+C1F6OGBn8ABuGlqCbtI=
|
||||
github.com/charmbracelet/x/xpty v0.1.2/go.mod h1:XK2Z0id5rtLWcpeNiMYBccNNBrP2IJnzHI0Lq13Xzq4=
|
||||
github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s=
|
||||
github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
|
||||
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4=
|
||||
github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE=
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
|
||||
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
|
||||
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
||||
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
35
internal/archiver/archiver.go
Normal file
35
internal/archiver/archiver.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package archiver
|
||||
|
||||
import (
|
||||
"zipprine/internal/models"
|
||||
)
|
||||
|
||||
func Compress(config *models.CompressConfig) error {
|
||||
switch config.ArchiveType {
|
||||
case models.ZIP:
|
||||
return createZip(config)
|
||||
case models.TARGZ:
|
||||
return createTarGz(config)
|
||||
case models.TAR:
|
||||
return createTar(config)
|
||||
case models.GZIP:
|
||||
return createGzip(config)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func Extract(config *models.ExtractConfig) error {
|
||||
switch config.ArchiveType {
|
||||
case models.ZIP:
|
||||
return extractZip(config)
|
||||
case models.TARGZ:
|
||||
return extractTarGz(config)
|
||||
case models.TAR:
|
||||
return extractTar(config)
|
||||
case models.GZIP:
|
||||
return extractGzip(config)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
107
internal/archiver/detect.go
Normal file
107
internal/archiver/detect.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package archiver
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"zipprine/internal/models"
|
||||
)
|
||||
|
||||
func DetectArchiveType(path string) (models.ArchiveType, error) {
|
||||
// First, try by extension
|
||||
ext := strings.ToLower(filepath.Ext(path))
|
||||
switch ext {
|
||||
case ".zip":
|
||||
return models.ZIP, nil
|
||||
case ".gz":
|
||||
if strings.HasSuffix(strings.ToLower(path), ".tar.gz") {
|
||||
return models.TARGZ, nil
|
||||
}
|
||||
return models.GZIP, nil
|
||||
case ".tar":
|
||||
return models.TAR, nil
|
||||
case ".tgz":
|
||||
return models.TARGZ, nil
|
||||
}
|
||||
|
||||
// Try by magic bytes
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
header := make([]byte, 512)
|
||||
n, err := file.Read(header)
|
||||
if err != nil && err != io.EOF {
|
||||
return "", err
|
||||
}
|
||||
header = header[:n]
|
||||
|
||||
// ZIP magic: PK (0x504B)
|
||||
if len(header) >= 2 && header[0] == 0x50 && header[1] == 0x4B {
|
||||
return models.ZIP, nil
|
||||
}
|
||||
|
||||
// GZIP magic: 0x1F 0x8B
|
||||
if len(header) >= 2 && header[0] == 0x1F && header[1] == 0x8B {
|
||||
// Check if it's a tar.gz by trying to decompress and check for tar header
|
||||
file.Seek(0, 0)
|
||||
gzReader, err := gzip.NewReader(file)
|
||||
if err == nil {
|
||||
defer gzReader.Close()
|
||||
tarHeader := make([]byte, 512)
|
||||
if n, _ := gzReader.Read(tarHeader); n >= 257 {
|
||||
// TAR magic: "ustar" at offset 257
|
||||
if bytes.Equal(tarHeader[257:262], []byte("ustar")) {
|
||||
return models.TARGZ, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return models.GZIP, nil
|
||||
}
|
||||
|
||||
// TAR magic: "ustar" at offset 257
|
||||
if len(header) >= 262 && bytes.Equal(header[257:262], []byte("ustar")) {
|
||||
return models.TAR, nil
|
||||
}
|
||||
|
||||
return models.AUTO, nil
|
||||
}
|
||||
|
||||
func Analyze(path string) (*models.ArchiveInfo, error) {
|
||||
archiveType, err := DetectArchiveType(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch archiveType {
|
||||
case models.ZIP:
|
||||
return analyzeZip(path)
|
||||
case models.TARGZ:
|
||||
return analyzeTar(path, true)
|
||||
case models.TAR:
|
||||
return analyzeTar(path, false)
|
||||
case models.GZIP:
|
||||
// For GZIP, provide basic file info
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
fileStat, _ := file.Stat()
|
||||
return &models.ArchiveInfo{
|
||||
Type: models.GZIP,
|
||||
CompressedSize: fileStat.Size(),
|
||||
FileCount: 1,
|
||||
Files: []models.FileInfo{},
|
||||
}, nil
|
||||
default:
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
281
internal/archiver/tar.go
Normal file
281
internal/archiver/tar.go
Normal file
@@ -0,0 +1,281 @@
|
||||
package archiver
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"zipprine/internal/models"
|
||||
"zipprine/pkg/fileutil"
|
||||
)
|
||||
|
||||
func createTar(config *models.CompressConfig) error {
|
||||
outFile, err := os.Create(config.OutputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
tarWriter := tar.NewWriter(outFile)
|
||||
defer tarWriter.Close()
|
||||
|
||||
return addToTar(tarWriter, config)
|
||||
}
|
||||
|
||||
func createTarGz(config *models.CompressConfig) error {
|
||||
outFile, err := os.Create(config.OutputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
gzWriter, err := gzip.NewWriterLevel(outFile, config.CompressionLevel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer gzWriter.Close()
|
||||
|
||||
tarWriter := tar.NewWriter(gzWriter)
|
||||
defer tarWriter.Close()
|
||||
|
||||
return addToTar(tarWriter, config)
|
||||
}
|
||||
|
||||
func createGzip(config *models.CompressConfig) error {
|
||||
inFile, err := os.Open(config.SourcePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inFile.Close()
|
||||
|
||||
outFile, err := os.Create(config.OutputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
gzWriter, err := gzip.NewWriterLevel(outFile, config.CompressionLevel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer gzWriter.Close()
|
||||
|
||||
_, err = io.Copy(gzWriter, inFile)
|
||||
return err
|
||||
}
|
||||
|
||||
func addToTar(tarWriter *tar.Writer, config *models.CompressConfig) error {
|
||||
return filepath.Walk(config.SourcePath, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !fileutil.ShouldInclude(path, config.ExcludePaths, config.IncludePaths) {
|
||||
if info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(config.SourcePath, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
header, err := tar.FileInfoHeader(info, "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
header.Name = relPath
|
||||
|
||||
if err := tarWriter.WriteHeader(header); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Printf(" → %s\n", relPath)
|
||||
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
_, err = io.Copy(tarWriter, file)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func extractTar(config *models.ExtractConfig) error {
|
||||
file, err := os.Open(config.ArchivePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
tarReader := tar.NewReader(file)
|
||||
return extractFromTar(tarReader, config)
|
||||
}
|
||||
|
||||
func extractTarGz(config *models.ExtractConfig) error {
|
||||
file, err := os.Open(config.ArchivePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
gzReader, err := gzip.NewReader(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer gzReader.Close()
|
||||
|
||||
tarReader := tar.NewReader(gzReader)
|
||||
return extractFromTar(tarReader, config)
|
||||
}
|
||||
|
||||
func extractGzip(config *models.ExtractConfig) error {
|
||||
inFile, err := os.Open(config.ArchivePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer inFile.Close()
|
||||
|
||||
gzReader, err := gzip.NewReader(inFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer gzReader.Close()
|
||||
|
||||
outPath := filepath.Join(config.DestPath, filepath.Base(config.ArchivePath))
|
||||
outPath = outPath[:len(outPath)-3] // Remove .gz extension
|
||||
|
||||
outFile, err := os.Create(outPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
_, err = io.Copy(outFile, gzReader)
|
||||
return err
|
||||
}
|
||||
|
||||
func extractFromTar(tarReader *tar.Reader, config *models.ExtractConfig) error {
|
||||
for {
|
||||
header, err := tarReader.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
destPath := filepath.Join(config.DestPath, header.Name)
|
||||
|
||||
switch header.Typeflag {
|
||||
case tar.TypeDir:
|
||||
os.MkdirAll(destPath, os.ModePerm)
|
||||
case tar.TypeReg:
|
||||
if !config.OverwriteAll {
|
||||
if _, err := os.Stat(destPath); err == nil {
|
||||
fmt.Printf(" ⚠️ Skipping: %s\n", header.Name)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf(" → Extracting: %s\n", header.Name)
|
||||
|
||||
os.MkdirAll(filepath.Dir(destPath), os.ModePerm)
|
||||
|
||||
outFile, err := os.Create(destPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := io.Copy(outFile, tarReader); err != nil {
|
||||
outFile.Close()
|
||||
return err
|
||||
}
|
||||
outFile.Close()
|
||||
|
||||
if config.PreservePerms {
|
||||
os.Chmod(destPath, os.FileMode(header.Mode))
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func analyzeTar(path string, isGzipped bool) (*models.ArchiveInfo, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
info := &models.ArchiveInfo{
|
||||
Type: models.TAR,
|
||||
Files: []models.FileInfo{},
|
||||
}
|
||||
|
||||
if isGzipped {
|
||||
info.Type = models.TARGZ
|
||||
}
|
||||
|
||||
fileStat, _ := file.Stat()
|
||||
info.CompressedSize = fileStat.Size()
|
||||
|
||||
hash := sha256.New()
|
||||
io.Copy(hash, file)
|
||||
info.Checksum = fmt.Sprintf("%x", hash.Sum(nil))
|
||||
|
||||
// Reopen for tar reading
|
||||
file.Seek(0, 0)
|
||||
|
||||
var tarReader *tar.Reader
|
||||
if isGzipped {
|
||||
gzReader, err := gzip.NewReader(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer gzReader.Close()
|
||||
tarReader = tar.NewReader(gzReader)
|
||||
} else {
|
||||
tarReader = tar.NewReader(file)
|
||||
}
|
||||
|
||||
for {
|
||||
header, err := tarReader.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info.FileCount++
|
||||
info.TotalSize += header.Size
|
||||
|
||||
if len(info.Files) < 100 {
|
||||
info.Files = append(info.Files, models.FileInfo{
|
||||
Name: header.Name,
|
||||
Size: header.Size,
|
||||
IsDir: header.Typeflag == tar.TypeDir,
|
||||
ModTime: header.ModTime.Format("2006-01-02 15:04:05"),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if info.TotalSize > 0 {
|
||||
info.CompressionRatio = (1 - float64(info.CompressedSize)/float64(info.TotalSize)) * 100
|
||||
}
|
||||
|
||||
return info, nil
|
||||
}
|
||||
174
internal/archiver/zip.go
Normal file
174
internal/archiver/zip.go
Normal file
@@ -0,0 +1,174 @@
|
||||
package archiver
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"compress/flate"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"zipprine/internal/models"
|
||||
"zipprine/pkg/fileutil"
|
||||
)
|
||||
|
||||
func createZip(config *models.CompressConfig) error {
|
||||
outFile, err := os.Create(config.OutputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
zipWriter := zip.NewWriter(outFile)
|
||||
defer zipWriter.Close()
|
||||
|
||||
// Set compression level
|
||||
if config.CompressionLevel > 0 {
|
||||
zipWriter.RegisterCompressor(zip.Deflate, func(out io.Writer) (io.WriteCloser, error) {
|
||||
return flate.NewWriter(out, config.CompressionLevel)
|
||||
})
|
||||
}
|
||||
|
||||
return filepath.Walk(config.SourcePath, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !fileutil.ShouldInclude(path, config.ExcludePaths, config.IncludePaths) {
|
||||
if info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(config.SourcePath, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Printf(" → %s\n", relPath)
|
||||
|
||||
header, err := zip.FileInfoHeader(info)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
header.Name = relPath
|
||||
header.Method = zip.Deflate
|
||||
|
||||
writer, err := zipWriter.CreateHeader(header)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
_, err = io.Copy(writer, file)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func extractZip(config *models.ExtractConfig) error {
|
||||
r, err := zip.OpenReader(config.ArchivePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
for _, f := range r.File {
|
||||
destPath := filepath.Join(config.DestPath, f.Name)
|
||||
|
||||
if f.FileInfo().IsDir() {
|
||||
os.MkdirAll(destPath, os.ModePerm)
|
||||
continue
|
||||
}
|
||||
|
||||
if !config.OverwriteAll {
|
||||
if _, err := os.Stat(destPath); err == nil {
|
||||
fmt.Printf(" ⚠️ Skipping: %s (already exists)\n", f.Name)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf(" → Extracting: %s\n", f.Name)
|
||||
|
||||
if err := os.MkdirAll(filepath.Dir(destPath), os.ModePerm); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
outFile, err := os.Create(destPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
outFile.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = io.Copy(outFile, rc)
|
||||
outFile.Close()
|
||||
rc.Close()
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if config.PreservePerms {
|
||||
os.Chmod(destPath, f.Mode())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func analyzeZip(path string) (*models.ArchiveInfo, error) {
|
||||
r, err := zip.OpenReader(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
info := &models.ArchiveInfo{
|
||||
Type: models.ZIP,
|
||||
Files: []models.FileInfo{},
|
||||
}
|
||||
|
||||
file, _ := os.Open(path)
|
||||
defer file.Close()
|
||||
fileStat, _ := file.Stat()
|
||||
info.CompressedSize = fileStat.Size()
|
||||
|
||||
hash := sha256.New()
|
||||
io.Copy(hash, file)
|
||||
info.Checksum = fmt.Sprintf("%x", hash.Sum(nil))
|
||||
|
||||
for _, f := range r.File {
|
||||
info.FileCount++
|
||||
info.TotalSize += int64(f.UncompressedSize64)
|
||||
|
||||
if len(info.Files) < 100 {
|
||||
info.Files = append(info.Files, models.FileInfo{
|
||||
Name: f.Name,
|
||||
Size: int64(f.UncompressedSize64),
|
||||
IsDir: f.FileInfo().IsDir(),
|
||||
ModTime: f.Modified.Format("2006-01-02 15:04:05"),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if info.TotalSize > 0 {
|
||||
info.CompressionRatio = (1 - float64(info.CompressedSize)/float64(info.TotalSize)) * 100
|
||||
}
|
||||
|
||||
return info, nil
|
||||
}
|
||||
46
internal/models/types.go
Normal file
46
internal/models/types.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package models
|
||||
|
||||
type ArchiveType string
|
||||
|
||||
const (
|
||||
ZIP ArchiveType = "ZIP"
|
||||
TARGZ ArchiveType = "TAR.GZ"
|
||||
TAR ArchiveType = "TAR"
|
||||
GZIP ArchiveType = "GZIP"
|
||||
AUTO ArchiveType = "AUTO"
|
||||
)
|
||||
|
||||
type CompressConfig struct {
|
||||
SourcePath string
|
||||
OutputPath string
|
||||
ArchiveType ArchiveType
|
||||
ExcludePaths []string
|
||||
IncludePaths []string
|
||||
VerifyIntegrity bool
|
||||
CompressionLevel int
|
||||
}
|
||||
|
||||
type ExtractConfig struct {
|
||||
ArchivePath string
|
||||
DestPath string
|
||||
ArchiveType ArchiveType
|
||||
OverwriteAll bool
|
||||
PreservePerms bool
|
||||
}
|
||||
|
||||
type ArchiveInfo struct {
|
||||
Type ArchiveType
|
||||
FileCount int
|
||||
TotalSize int64
|
||||
CompressedSize int64
|
||||
CompressionRatio float64
|
||||
Files []FileInfo
|
||||
Checksum string
|
||||
}
|
||||
|
||||
type FileInfo struct {
|
||||
Name string
|
||||
Size int64
|
||||
IsDir bool
|
||||
ModTime string
|
||||
}
|
||||
72
internal/ui/analyze.go
Normal file
72
internal/ui/analyze.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"zipprine/internal/archiver"
|
||||
"zipprine/internal/models"
|
||||
|
||||
"github.com/charmbracelet/huh"
|
||||
)
|
||||
|
||||
func RunAnalyzeFlow() error {
|
||||
var archivePath string
|
||||
|
||||
form := huh.NewForm(
|
||||
huh.NewGroup(
|
||||
huh.NewInput().
|
||||
Title("📦 Archive Path").
|
||||
Description("Path to the archive to analyze").
|
||||
Placeholder("/path/to/archive.zip").
|
||||
Value(&archivePath).
|
||||
Validate(func(s string) error {
|
||||
if s == "" {
|
||||
return fmt.Errorf("archive path cannot be empty")
|
||||
}
|
||||
if _, err := os.Stat(s); os.IsNotExist(err) {
|
||||
return fmt.Errorf("archive does not exist")
|
||||
}
|
||||
return nil
|
||||
}),
|
||||
),
|
||||
).WithTheme(huh.ThemeCatppuccin())
|
||||
|
||||
if err := form.Run(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println(InfoStyle.Render("🔍 Analyzing archive..."))
|
||||
|
||||
info, err := archiver.Analyze(archivePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
displayArchiveInfo(info)
|
||||
return nil
|
||||
}
|
||||
|
||||
func displayArchiveInfo(info *models.ArchiveInfo) {
|
||||
fmt.Println()
|
||||
fmt.Println(HeaderStyle.Render("📊 Archive Information"))
|
||||
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 🎨 Type: %s", info.Type)))
|
||||
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 📁 Files: %d", info.FileCount)))
|
||||
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 💾 Uncompressed: %.2f MB", float64(info.TotalSize)/(1024*1024))))
|
||||
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 📦 Compressed: %.2f MB", float64(info.CompressedSize)/(1024*1024))))
|
||||
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 🎯 Ratio: %.1f%%", info.CompressionRatio)))
|
||||
fmt.Println(InfoStyle.Render(fmt.Sprintf(" 🔒 SHA256: %s...", info.Checksum[:16])))
|
||||
|
||||
if len(info.Files) > 0 && len(info.Files) <= 20 {
|
||||
fmt.Println()
|
||||
fmt.Println(HeaderStyle.Render("📝 File List"))
|
||||
for _, f := range info.Files {
|
||||
icon := "📄"
|
||||
if f.IsDir {
|
||||
icon = "📁"
|
||||
}
|
||||
fmt.Println(InfoStyle.Render(fmt.Sprintf(" %s %s (%.2f KB)", icon, f.Name, float64(f.Size)/1024)))
|
||||
}
|
||||
}
|
||||
}
|
||||
143
internal/ui/compress.go
Normal file
143
internal/ui/compress.go
Normal file
@@ -0,0 +1,143 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"zipprine/internal/archiver"
|
||||
"zipprine/internal/models"
|
||||
|
||||
"github.com/charmbracelet/huh"
|
||||
)
|
||||
|
||||
func RunCompressFlow() error {
|
||||
config := &models.CompressConfig{}
|
||||
|
||||
var sourcePath, outputPath string
|
||||
var archiveTypeStr string
|
||||
var excludeInput, includeInput string
|
||||
var verify bool
|
||||
var compressionLevel string
|
||||
|
||||
form := huh.NewForm(
|
||||
huh.NewGroup(
|
||||
huh.NewInput().
|
||||
Title("📁 Source Path").
|
||||
Description("Enter the path to compress (file or directory)").
|
||||
Placeholder("/path/to/source").
|
||||
Value(&sourcePath).
|
||||
Validate(func(s string) error {
|
||||
if s == "" {
|
||||
return fmt.Errorf("source path cannot be empty")
|
||||
}
|
||||
if _, err := os.Stat(s); os.IsNotExist(err) {
|
||||
return fmt.Errorf("path does not exist")
|
||||
}
|
||||
return nil
|
||||
}),
|
||||
|
||||
huh.NewInput().
|
||||
Title("💾 Output Path").
|
||||
Description("Where to save the archive").
|
||||
Placeholder("/path/to/output.zip").
|
||||
Value(&outputPath).
|
||||
Validate(func(s string) error {
|
||||
if s == "" {
|
||||
return fmt.Errorf("output path cannot be empty")
|
||||
}
|
||||
return nil
|
||||
}).Suggestions([]string{".zip", ".tar.gz", ".tar", ".gz"}),
|
||||
),
|
||||
|
||||
huh.NewGroup(
|
||||
huh.NewSelect[string]().
|
||||
Title("🎨 Archive Type").
|
||||
Description("Choose your compression format").
|
||||
Options(
|
||||
huh.NewOption("ZIP - Universal & Compatible 📦", "ZIP"),
|
||||
huh.NewOption("TAR.GZ - Linux Classic (Best Compression) 🐧", "TARGZ"),
|
||||
huh.NewOption("TAR - No Compression 📄", "TAR"),
|
||||
huh.NewOption("GZIP - Single File Compression 🔧", "GZIP"),
|
||||
).
|
||||
Value(&archiveTypeStr),
|
||||
|
||||
huh.NewSelect[string]().
|
||||
Title("⚡ Compression Level").
|
||||
Description("Higher = smaller but slower").
|
||||
Options(
|
||||
huh.NewOption("Fast (Level 1)", "1"),
|
||||
huh.NewOption("Balanced (Level 5)", "5"),
|
||||
huh.NewOption("Best (Level 9)", "9"),
|
||||
).
|
||||
Value(&compressionLevel),
|
||||
),
|
||||
|
||||
huh.NewGroup(
|
||||
huh.NewText().
|
||||
Title("🚫 Exclude Patterns").
|
||||
Description("Comma-separated patterns to exclude (e.g., *.log,node_modules,*.tmp)").
|
||||
Placeholder("*.log,temp/*,.git,__pycache__").
|
||||
Value(&excludeInput),
|
||||
|
||||
huh.NewText().
|
||||
Title("✅ Include Patterns").
|
||||
Description("Comma-separated patterns to include (leave empty for all)").
|
||||
Placeholder("*.go,*.md,src/*").
|
||||
Value(&includeInput),
|
||||
),
|
||||
|
||||
huh.NewGroup(
|
||||
huh.NewConfirm().
|
||||
Title("🔐 Verify Archive Integrity").
|
||||
Description("Check the archive after creation?").
|
||||
Value(&verify).
|
||||
Affirmative("Yes please!").
|
||||
Negative("Skip it"),
|
||||
),
|
||||
).WithTheme(huh.ThemeCatppuccin())
|
||||
|
||||
if err := form.Run(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
config.SourcePath = sourcePath
|
||||
config.OutputPath = outputPath
|
||||
config.ArchiveType = models.ArchiveType(archiveTypeStr)
|
||||
config.VerifyIntegrity = verify
|
||||
fmt.Sscanf(compressionLevel, "%d", &config.CompressionLevel)
|
||||
|
||||
if excludeInput != "" {
|
||||
config.ExcludePaths = strings.Split(excludeInput, ",")
|
||||
for i := range config.ExcludePaths {
|
||||
config.ExcludePaths[i] = strings.TrimSpace(config.ExcludePaths[i])
|
||||
}
|
||||
}
|
||||
|
||||
if includeInput != "" {
|
||||
config.IncludePaths = strings.Split(includeInput, ",")
|
||||
for i := range config.IncludePaths {
|
||||
config.IncludePaths[i] = strings.TrimSpace(config.IncludePaths[i])
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println(InfoStyle.Render("🎯 Starting compression..."))
|
||||
|
||||
if err := archiver.Compress(config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Println(SuccessStyle.Render("✅ Archive created successfully!"))
|
||||
|
||||
if config.VerifyIntegrity {
|
||||
fmt.Println(InfoStyle.Render("🔍 Verifying archive integrity..."))
|
||||
info, err := archiver.Analyze(config.OutputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
displayArchiveInfo(info)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
93
internal/ui/extract.go
Normal file
93
internal/ui/extract.go
Normal file
@@ -0,0 +1,93 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"zipprine/internal/archiver"
|
||||
"zipprine/internal/models"
|
||||
|
||||
"github.com/charmbracelet/huh"
|
||||
)
|
||||
|
||||
func RunExtractFlow() error {
|
||||
config := &models.ExtractConfig{}
|
||||
|
||||
var archivePath, destPath string
|
||||
var overwrite, preservePerms bool
|
||||
|
||||
form := huh.NewForm(
|
||||
huh.NewGroup(
|
||||
huh.NewInput().
|
||||
Title("📦 Archive Path").
|
||||
Description("Path to the archive file").
|
||||
Placeholder("/path/to/archive.zip").
|
||||
Value(&archivePath).
|
||||
Validate(func(s string) error {
|
||||
if s == "" {
|
||||
return fmt.Errorf("archive path cannot be empty")
|
||||
}
|
||||
if _, err := os.Stat(s); os.IsNotExist(err) {
|
||||
return fmt.Errorf("archive does not exist")
|
||||
}
|
||||
return nil
|
||||
}),
|
||||
|
||||
huh.NewInput().
|
||||
Title("📂 Destination Path").
|
||||
Description("Where to extract files").
|
||||
Placeholder("/path/to/destination").
|
||||
Value(&destPath).
|
||||
Validate(func(s string) error {
|
||||
if s == "" {
|
||||
return fmt.Errorf("destination path cannot be empty")
|
||||
}
|
||||
return nil
|
||||
}),
|
||||
),
|
||||
|
||||
huh.NewGroup(
|
||||
huh.NewConfirm().
|
||||
Title("⚠️ Overwrite Existing Files").
|
||||
Description("Replace files if they already exist?").
|
||||
Value(&overwrite).
|
||||
Affirmative("Yes, overwrite").
|
||||
Negative("No, skip"),
|
||||
|
||||
huh.NewConfirm().
|
||||
Title("🔒 Preserve Permissions").
|
||||
Description("Keep original file permissions?").
|
||||
Value(&preservePerms).
|
||||
Affirmative("Yes").
|
||||
Negative("No"),
|
||||
),
|
||||
).WithTheme(huh.ThemeCatppuccin())
|
||||
|
||||
if err := form.Run(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
config.ArchivePath = archivePath
|
||||
config.DestPath = destPath
|
||||
config.OverwriteAll = overwrite
|
||||
config.PreservePerms = preservePerms
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println(InfoStyle.Render("🔍 Detecting archive type..."))
|
||||
|
||||
detectedType, err := archiver.DetectArchiveType(archivePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
config.ArchiveType = detectedType
|
||||
|
||||
fmt.Println(SuccessStyle.Render(fmt.Sprintf("✅ Detected: %s", detectedType)))
|
||||
fmt.Println(InfoStyle.Render("📂 Extracting files..."))
|
||||
|
||||
if err := archiver.Extract(config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Println(SuccessStyle.Render("✅ Extraction completed!"))
|
||||
return nil
|
||||
}
|
||||
32
internal/ui/styles.go
Normal file
32
internal/ui/styles.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package ui
|
||||
|
||||
import "github.com/charmbracelet/lipgloss"
|
||||
|
||||
var (
|
||||
TitleStyle = lipgloss.NewStyle().
|
||||
Bold(true).
|
||||
Foreground(lipgloss.Color("#7D56F4")).
|
||||
BorderStyle(lipgloss.RoundedBorder()).
|
||||
BorderForeground(lipgloss.Color("#7D56F4")).
|
||||
Padding(0, 1)
|
||||
|
||||
SuccessStyle = lipgloss.NewStyle().
|
||||
Foreground(lipgloss.Color("#04B575")).
|
||||
Bold(true)
|
||||
|
||||
ErrorStyle = lipgloss.NewStyle().
|
||||
Foreground(lipgloss.Color("#FF0000")).
|
||||
Bold(true)
|
||||
|
||||
InfoStyle = lipgloss.NewStyle().
|
||||
Foreground(lipgloss.Color("#00BFFF"))
|
||||
|
||||
WarningStyle = lipgloss.NewStyle().
|
||||
Foreground(lipgloss.Color("#FFA500")).
|
||||
Bold(true)
|
||||
|
||||
HeaderStyle = lipgloss.NewStyle().
|
||||
Bold(true).
|
||||
Foreground(lipgloss.Color("#FF79C6")).
|
||||
Underline(true)
|
||||
)
|
||||
64
pkg/fileutil/fileutil.go
Normal file
64
pkg/fileutil/fileutil.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package fileutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ShouldInclude determines if a file should be included based on exclude/include patterns
|
||||
func ShouldInclude(path string, excludePaths, includePaths []string) bool {
|
||||
// Check exclude patterns first
|
||||
for _, pattern := range excludePaths {
|
||||
if matched, _ := filepath.Match(pattern, filepath.Base(path)); matched {
|
||||
return false
|
||||
}
|
||||
// Also check if pattern is in the path
|
||||
if strings.Contains(path, pattern) {
|
||||
return false
|
||||
}
|
||||
// Handle directory patterns
|
||||
if strings.HasSuffix(pattern, "/*") {
|
||||
dirPattern := strings.TrimSuffix(pattern, "/*")
|
||||
if strings.Contains(path, dirPattern) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If include patterns are specified, check them
|
||||
if len(includePaths) > 0 {
|
||||
for _, pattern := range includePaths {
|
||||
if matched, _ := filepath.Match(pattern, filepath.Base(path)); matched {
|
||||
return true
|
||||
}
|
||||
if strings.Contains(path, pattern) {
|
||||
return true
|
||||
}
|
||||
// Handle directory patterns
|
||||
if strings.HasSuffix(pattern, "/*") {
|
||||
dirPattern := strings.TrimSuffix(pattern, "/*")
|
||||
if strings.Contains(path, dirPattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// FormatBytes converts bytes to human-readable format
|
||||
func FormatBytes(bytes int64) string {
|
||||
const unit = 1024
|
||||
if bytes < unit {
|
||||
return fmt.Sprintf("%d B", bytes)
|
||||
}
|
||||
div, exp := int64(unit), 0
|
||||
for n := bytes / unit; n >= unit; n /= unit {
|
||||
div *= unit
|
||||
exp++
|
||||
}
|
||||
return fmt.Sprintf("%.1f %cB", float64(bytes)/float64(div), "KMGTPE"[exp])
|
||||
}
|
||||
Reference in New Issue
Block a user