Linux Disk Space Management Cheat Sheet
Quick Diagnosis
Check Disk Space
# Overview of all filesystems
df -h
# Human-readable with filesystem type
df -hT
# Show inodes usage (can be full even with space available)
df -i
# Specific filesystem
df -h /home
# Show all filesystems including pseudo filesystems
df -ha
Find What’s Using Space
# Disk usage of current directory
du -h --max-depth=1
# Summary of directory
du -sh /path/to/dir
# Top 10 largest directories
du -h --max-depth=1 | sort -hr | head -10
# Interactive disk usage analyzer (install with apt/yum)
ncdu /
ncdu /home
# GUI disk usage analyzer
baobab # GNOME Disk Usage Analyzer
Finding Large Files
Find Large Files
# Find files larger than 100MB in current directory
find . -type f -size +100M -exec ls -lh {} \;
# Find files larger than 1GB
find / -type f -size +1G -exec ls -lh {} \; 2>/dev/null
# Top 20 largest files in /var
find /var -type f -exec du -h {} \; | sort -rh | head -20
# Find large files modified more than 30 days ago
find /var -type f -size +100M -mtime +30 -exec ls -lh {} \;
# Using du for top files
du -ah /path | sort -rh | head -20
# Find large files with details
find / -type f -size +500M -exec ls -lh {} \; 2>/dev/null | \
awk '{print $5, $9}' | sort -rh
Find Large Directories
# Largest directories in /
du -h --max-depth=1 / 2>/dev/null | sort -rh | head -10
# Largest directories in current directory
du -h -d 1 . | sort -rh
# All subdirectories sorted by size
du -h | sort -rh | head -20
# Exclude certain directories
du -h --exclude=/proc --exclude=/sys / | sort -rh | head -20
System Cleanup
Package Manager Cleanup
# Debian/Ubuntu (APT)
sudo apt clean # Clear package cache
sudo apt autoclean # Clear old package cache
sudo apt autoremove # Remove unused packages
sudo apt autoremove --purge # Remove with config files
# Remove old kernels (keep current and one previous)
sudo apt autoremove --purge
# List installed kernels
dpkg --list | grep linux-image
# Red Hat/CentOS (YUM)
sudo yum clean all # Clear all cache
sudo yum autoremove # Remove unused packages
# Fedora (DNF)
sudo dnf clean all
sudo dnf autoremove
# Remove old kernel versions (keep 2)
sudo dnf remove --oldinstallonly --setopt installonly_limit=2 kernel
# Arch Linux (Pacman)
sudo pacman -Sc # Clean package cache
sudo pacman -Scc # Clean all cache
sudo pacman -Rns $(pacman -Qtdq) # Remove orphaned packages
Clear System Caches
# Clear PageCache only
sudo sync && sudo sysctl vm.drop_caches=1
# Clear dentries and inodes
sudo sync && sudo sysctl vm.drop_caches=2
# Clear PageCache, dentries and inodes
sudo sync && sudo sysctl vm.drop_caches=3
# Clear systemd journal logs
sudo journalctl --vacuum-time=7d # Keep last 7 days
sudo journalctl --vacuum-size=500M # Keep last 500MB
sudo journalctl --rotate # Rotate journals
# Check journal disk usage
journalctl --disk-usage
Temporary Files
# Clear /tmp (be careful!)
sudo rm -rf /tmp/*
sudo rm -rf /tmp/.*
# Clear /var/tmp
sudo rm -rf /var/tmp/*
# Clear user cache
rm -rf ~/.cache/*
# Clear thumbnail cache
rm -rf ~/.cache/thumbnails/*
# Clear browser caches
rm -rf ~/.cache/mozilla/firefox/*/cache2/*
rm -rf ~/.cache/google-chrome/Default/Cache/*
rm -rf ~/.cache/chromium/Default/Cache/*
# Find and remove all .cache directories
find ~ -type d -name ".cache" -exec du -sh {} \;
Log File Management
Find and Manage Logs
# Check log directory size
du -sh /var/log
# Find largest log files
find /var/log -type f -exec ls -lh {} \; | sort -k5 -hr | head -10
# Find old log files
find /var/log -type f -name "*.log" -mtime +30
# Check specific log sizes
ls -lh /var/log/*.log
# View log sizes sorted
du -h /var/log | sort -rh | head -20
Clear Log Files
# Truncate log file (preserve file)
sudo truncate -s 0 /var/log/syslog
# Clear log file content
sudo > /var/log/syslog
# Remove old rotated logs
sudo rm /var/log/*.log.*
sudo rm /var/log/*/*.log.gz
# Compress old logs
sudo gzip /var/log/*.log.1
# Remove logs older than 7 days
sudo find /var/log -type f -name "*.log.*" -mtime +7 -delete
# Clear apt logs
sudo rm /var/log/apt/*.log
Logrotate Configuration
# Force log rotation
sudo logrotate -f /etc/logrotate.conf
# Test logrotate configuration
sudo logrotate -d /etc/logrotate.conf
# Edit logrotate config for specific service
sudo nano /etc/logrotate.d/rsyslog
# Example config to rotate logs more aggressively
# /var/log/syslog {
# daily
# rotate 7
# compress
# delaycompress
# notifempty
# create 0640 syslog adm
# }
Application-Specific Cleanup
Docker Cleanup
# Remove unused containers
docker container prune
# Remove unused images
docker image prune
docker image prune -a # Remove all unused images
# Remove unused volumes
docker volume prune
# Remove unused networks
docker network prune
# Remove everything unused
docker system prune
docker system prune -a # More aggressive
# Remove old/dangling images
docker rmi $(docker images -f "dangling=true" -q)
# Check Docker disk usage
docker system df
# Remove specific stopped containers
docker rm $(docker ps -aq -f status=exited)
Snap Packages (Ubuntu)
# List installed snaps
snap list --all
# Remove old snap versions
sudo snap set system refresh.retain=2
# Remove old revisions manually
#!/bin/bash
snap list --all | awk '/disabled/{print $1, $3}' |
while read snapname revision; do
sudo snap remove "$snapname" --revision="$revision"
done
# Check snap disk usage
du -h /var/lib/snapd/snaps
Flatpak Cleanup
# Remove unused runtimes
flatpak uninstall --unused
# Clean cache
flatpak uninstall --delete-data
# Remove specific app data
flatpak uninstall --delete-data app-name
NPM/Node.js
# Clear npm cache
npm cache clean --force
# Check cache size
du -sh ~/.npm
# Remove node_modules in old projects
find ~ -name "node_modules" -type d -prune -exec du -sh {} \;
find ~ -name "node_modules" -type d -prune -exec rm -rf {} \;
Python
# Clear pip cache
pip cache purge
# Check cache size
du -sh ~/.cache/pip
# Remove __pycache__ directories
find . -type d -name __pycache__ -exec rm -rf {} \;
# Remove .pyc files
find . -name "*.pyc" -delete
Ruby
# Clean gem cache
gem cleanup
# Remove old gems
gem cleanup -d
# Check gem directory size
du -sh ~/.gem
Composer (PHP)
# Clear composer cache
composer clear-cache
# Check cache size
du -sh ~/.composer/cache
User-Specific Cleanup
Home Directory Cleanup
# Find large files in home
find ~ -type f -size +100M -exec ls -lh {} \;
# Check home directory usage by subdirectory
du -h --max-depth=1 ~ | sort -rh
# Clear user cache
rm -rf ~/.cache/*
# Clear trash
rm -rf ~/.local/share/Trash/*
# Find and remove old downloads
find ~/Downloads -type f -mtime +90 -delete
# Find duplicate files (using fdupes)
sudo apt install fdupes
fdupes -r ~/Documents
fdupes -r -d ~/Documents # Interactive deletion
Desktop Environment Caches
# GNOME
rm -rf ~/.cache/gnome-software/*
rm -rf ~/.cache/thumbnails/*
# KDE
rm -rf ~/.cache/plasma*
rm -rf ~/.cache/krunner
# Clear recent files
rm ~/.local/share/recently-used.xbel
Database Cleanup
MySQL/MariaDB
# Remove binary logs
mysql -u root -p -e "PURGE BINARY LOGS BEFORE DATE_SUB(NOW(), INTERVAL 7 DAY);"
# Optimize tables
mysqlcheck -o --all-databases -u root -p
# Check database sizes
mysql -u root -p -e "SELECT table_schema AS 'Database',
ROUND(SUM(data_length + index_length) / 1024 / 1024, 2) AS 'Size (MB)'
FROM information_schema.TABLES
GROUP BY table_schema;"
PostgreSQL
# Vacuum database
sudo -u postgres psql -c "VACUUM FULL;"
# Clean old WAL files
sudo -u postgres pg_archivecleanup /var/lib/postgresql/*/main/pg_wal
# Check database sizes
sudo -u postgres psql -c "\l+"
Redis
# Flush all data (careful!)
redis-cli FLUSHALL
# Get memory usage
redis-cli INFO memory
Advanced Techniques
Find and Remove Duplicates
# Using fdupes
sudo apt install fdupes
fdupes -r /path/to/directory
# Delete duplicates interactively
fdupes -r -d /path/to/directory
# Using rdfind
sudo apt install rdfind
rdfind -makehardlinks true /path/to/directory
Compress Old Files
# Find and compress old log files
find /var/log -name "*.log" -mtime +7 -exec gzip {} \;
# Compress files older than 30 days
find /path -type f -mtime +30 -exec gzip {} \;
# Batch compress with tar
tar -czf archive-$(date +%Y%m%d).tar.gz /path/to/old/files
Move to Different Partition
# Move large directories to another partition
sudo mv /var/log /mnt/newdisk/log
sudo ln -s /mnt/newdisk/log /var/log
# Move Docker data directory
sudo systemctl stop docker
sudo mv /var/lib/docker /mnt/newdisk/docker
sudo ln -s /mnt/newdisk/docker /var/lib/docker
sudo systemctl start docker
# Move home directory (advanced)
# Requires careful planning and backup
Sparse Files
# Find sparse files
find / -type f -printf "%S\t%p\n" 2>/dev/null | \
awk '$1 < 1.0 {print}'
# Create sparse file from existing
cp --sparse=always original.file sparse.file
Inode Management
Check Inodes
# Check inode usage
df -i
# Find directories with most files
find / -xdev -type d -exec bash -c 'echo "{}: $(find "{}" -maxdepth 1 -type f | wc -l)"' \; 2>/dev/null | \
sort -t: -k2 -rn | head -20
# Find directories with many small files
for dir in /*; do
echo "$dir: $(find "$dir" -maxdepth 1 -type f 2>/dev/null | wc -l)"
done | sort -t: -k2 -rn
Free Inodes
# Remove many small files in a directory
find /path -type f -delete
# Remove old session files
find /var/lib/php/sessions -type f -mtime +30 -delete
# Remove old mail
find /var/mail -type f -mtime +90 -delete
Monitoring and Prevention
Set Up Disk Usage Alerts
# Create monitoring script
cat > ~/bin/disk-alert.sh << 'EOF'
#!/bin/bash
THRESHOLD=90
df -H | grep -vE '^Filesystem|tmpfs|cdrom' | awk '{ print $5 " " $1 }' | while read output;
do
usage=$(echo $output | awk '{ print $1}' | cut -d'%' -f1)
partition=$(echo $output | awk '{ print $2 }')
if [ $usage -ge $THRESHOLD ]; then
echo "Warning: $partition is ${usage}% full" | mail -s "Disk Space Alert" [email protected]
fi
done
EOF
chmod +x ~/bin/disk-alert.sh
# Add to crontab (run daily)
crontab -e
# 0 9 * * * ~/bin/disk-alert.sh
# Edit logrotate config
sudo nano /etc/logrotate.conf
# Set aggressive rotation
# weekly
# rotate 4
# create
# compress
# include /etc/logrotate.d
# Application-specific rotation
sudo nano /etc/logrotate.d/myapp
Quota Management
# Install quota tools
sudo apt install quota
# Check user quotas
quota -v
# Check all user quotas
sudo repquota -a
# Set quota for user
sudo setquota -u username 10G 12G 0 0 /home
Emergency Procedures
When Root Filesystem is Full
# Check what's using space (read-only operations)
df -h
du -sh /*
# Clear immediate space
sudo rm -rf /tmp/*
sudo journalctl --vacuum-size=100M
sudo apt clean
# Find and remove large files
sudo find / -xdev -type f -size +100M -exec ls -lh {} \;
# Check for deleted but open files
sudo lsof / | grep deleted
# Restart services holding deleted files
sudo systemctl restart rsyslog
sudo systemctl restart apache2
Recover from 100% Full
# Find processes writing to disk
sudo lsof +L1
sudo lsof | grep deleted
# Stop non-essential services
sudo systemctl stop apache2
sudo systemctl stop mysql
# Emergency cleanup
sudo find /tmp -type f -delete
sudo find /var/tmp -type f -delete
sudo rm -rf /var/log/*.log.*
sudo truncate -s 0 /var/log/syslog
# Clear journal
sudo journalctl --vacuum-size=50M
# Restart services
sudo systemctl start apache2
sudo systemctl start mysql
Handle Deleted But Open Files
# Find deleted files still held open
sudo lsof +L1
# Find which process
sudo lsof | grep -E "deleted|DEL"
# Kill or restart the process
sudo systemctl restart service-name
# Or kill process
sudo kill -9 PID
Best Practices
Regular Maintenance
# Create cleanup script
cat > ~/bin/cleanup.sh << 'EOF'
#!/bin/bash
echo "Starting system cleanup..."
# Package manager
sudo apt autoremove -y
sudo apt autoclean
# Logs
sudo journalctl --vacuum-time=7d
sudo find /var/log -name "*.log.*" -mtime +7 -delete
# Cache
rm -rf ~/.cache/*
sudo sync && sudo sysctl vm.drop_caches=3
# Docker (if installed)
if command -v docker &> /dev/null; then
docker system prune -f
fi
echo "Cleanup complete!"
df -h
EOF
chmod +x ~/bin/cleanup.sh
# Schedule weekly cleanup
(crontab -l 2>/dev/null; echo "0 2 * * 0 ~/bin/cleanup.sh") | crontab -
Prevent Issues
- Set up monitoring - Alert before disk fills
- Configure log rotation - Prevent log accumulation
- Regular cleanup - Schedule weekly maintenance
- Use quotas - Limit per-user space
- Monitor growth - Track disk usage trends
- Separate partitions - Isolate system from data
- Archive old data - Move to cheaper storage
- Clean development - Remove old node_modules, etc.
Quick Reference
# Check disk space
df -h
du -sh /path
# Find large files
find / -type f -size +1G -exec ls -lh {} \; 2>/dev/null
# Top 10 largest directories
du -h / --max-depth=1 2>/dev/null | sort -rh | head -10
# Package cleanup
sudo apt autoremove && sudo apt clean
# Clear logs
sudo journalctl --vacuum-time=3d
# Docker cleanup
docker system prune -a
# Clear cache
rm -rf ~/.cache/*
# Find deleted but open files
sudo lsof +L1
Disk Usage Analyzers
# CLI
ncdu # NCurses Disk Usage (interactive)
duc # Disk Usage Checker (with database)
duf # Modern df alternative
dust # Modern du alternative
# GUI
baobab # GNOME Disk Usage Analyzer
filelight # KDE Disk Usage
qdirstat # Qt Directory Statistics
Installation
# ncdu
sudo apt install ncdu
sudo yum install ncdu
# duf
wget https://github.com/muesli/duf/releases/download/v0.8.1/duf_0.8.1_linux_amd64.deb
sudo dpkg -i duf_0.8.1_linux_amd64.deb
# dust
cargo install du-dust
Tips
- Use ncdu - Interactive and easier than du
- Check inodes - Can be full even with free space
- Look for logs - Often the biggest culprit
- Check deleted files -
lsof +L1 for held files
- Clear caches safely - Won’t harm system
- Be careful with rm - Always double-check paths
- Use -exec with find - More efficient than xargs
- Compress old files - Gzip can save 70-90% space
- Symlink large dirs - Move to bigger partition
- Regular maintenance - Prevent issues before they occur
- Test in safe directory first - Before running on /
- Keep backups - Before aggressive cleanup
- Monitor trends - Catch problems early
- Document paths - Know where space goes
- Automate cleanup - Scripts + cron = no surprises