Tools: 30 Linux Commands Every Developer Should Know

Tools: 30 Linux Commands Every Developer Should Know

30 Linux Commands Every Developer Should Know

File Operations

Disk & Memory

Network

Process Management

Text Processing

Archives

User & Permissions

System Info

My Daily Workflow Commands These are the commands I use every single day on my VPS. What's your most-used Linux command? Did I miss any essentials? Follow @armorbreak for more developer content. Templates let you quickly answer FAQs or store snippets for re-use. Hide child comments as well For further actions, you may consider blocking this person and/or reporting abuse

Command

Copy

# Find files by name find . -name "*.js" -type f find /var/log -name "*.log" -mtime -7 # Modified in last 7 days # Search file contents grep -r "TODO" src/ # Recursive search grep -rn "function" src/ # With line numbers grep -r "error" /var/log/ --include="*.log" # In specific files only # Quick find (faster than find for name searches) locate config.json # Updatedb first fd pattern src/ # Install: -weight: 500;">apt -weight: 500;">install fd-find # Better grep (ripgrep) rg "TODO" src/ # Faster, ignores .gitignore # Find files by name find . -name "*.js" -type f find /var/log -name "*.log" -mtime -7 # Modified in last 7 days # Search file contents grep -r "TODO" src/ # Recursive search grep -rn "function" src/ # With line numbers grep -r "error" /var/log/ --include="*.log" # In specific files only # Quick find (faster than find for name searches) locate config.json # Updatedb first fd pattern src/ # Install: -weight: 500;">apt -weight: 500;">install fd-find # Better grep (ripgrep) rg "TODO" src/ # Faster, ignores .gitignore # Find files by name find . -name "*.js" -type f find /var/log -name "*.log" -mtime -7 # Modified in last 7 days # Search file contents grep -r "TODO" src/ # Recursive search grep -rn "function" src/ # With line numbers grep -r "error" /var/log/ --include="*.log" # In specific files only # Quick find (faster than find for name searches) locate config.json # Updatedb first fd pattern src/ # Install: -weight: 500;">apt -weight: 500;">install fd-find # Better grep (ripgrep) rg "TODO" src/ # Faster, ignores .gitignore # Disk usage df -h # Human-readable disk usage du -sh * # Directory sizes in current folder du -sh /var/* | sort -hr | head -10 # Top 10 largest directories # Find large files find . -size +100M -type f # Files > 100MB find . -size +1G -exec ls -lh {} \; # Show details of huge files # Memory free -h # RAM usage top # Process resource usage (or htop) ps aux | sort -k4nr | head -10 # Top 10 memory-hungry processes # Disk usage df -h # Human-readable disk usage du -sh * # Directory sizes in current folder du -sh /var/* | sort -hr | head -10 # Top 10 largest directories # Find large files find . -size +100M -type f # Files > 100MB find . -size +1G -exec ls -lh {} \; # Show details of huge files # Memory free -h # RAM usage top # Process resource usage (or htop) ps aux | sort -k4nr | head -10 # Top 10 memory-hungry processes # Disk usage df -h # Human-readable disk usage du -sh * # Directory sizes in current folder du -sh /var/* | sort -hr | head -10 # Top 10 largest directories # Find large files find . -size +100M -type f # Files > 100MB find . -size +1G -exec ls -lh {} \; # Show details of huge files # Memory free -h # RAM usage top # Process resource usage (or htop) ps aux | sort -k4nr | head -10 # Top 10 memory-hungry processes # Port checking ss -tlnp | grep :3000 # What's listening on port 3000? netstat -tlnp # All listening ports # Connection testing -weight: 500;">curl -I https://example.com # Headers only -weight: 500;">curl -s -o /dev/null -w "%{http_code}" URL # Just -weight: 500;">status code -weight: 500;">wget -qO- https://ifconfig.me # Your public IP # DNS dig example.com # DNS lookup nslookup example.com # Alternative DNS check host example.com # Simple DNS query # Debug connectivity ping -c 4 google.com # 4 pings traceroute google.com # Route to destination telnet host 80 # Test TCP connection nc -zv localhost 3000 # Check if port is open # Port checking ss -tlnp | grep :3000 # What's listening on port 3000? netstat -tlnp # All listening ports # Connection testing -weight: 500;">curl -I https://example.com # Headers only -weight: 500;">curl -s -o /dev/null -w "%{http_code}" URL # Just -weight: 500;">status code -weight: 500;">wget -qO- https://ifconfig.me # Your public IP # DNS dig example.com # DNS lookup nslookup example.com # Alternative DNS check host example.com # Simple DNS query # Debug connectivity ping -c 4 google.com # 4 pings traceroute google.com # Route to destination telnet host 80 # Test TCP connection nc -zv localhost 3000 # Check if port is open # Port checking ss -tlnp | grep :3000 # What's listening on port 3000? netstat -tlnp # All listening ports # Connection testing -weight: 500;">curl -I https://example.com # Headers only -weight: 500;">curl -s -o /dev/null -w "%{http_code}" URL # Just -weight: 500;">status code -weight: 500;">wget -qO- https://ifconfig.me # Your public IP # DNS dig example.com # DNS lookup nslookup example.com # Alternative DNS check host example.com # Simple DNS query # Debug connectivity ping -c 4 google.com # 4 pings traceroute google.com # Route to destination telnet host 80 # Test TCP connection nc -zv localhost 3000 # Check if port is open # Find and kill ps aux | grep node # Find node processes kill 12345 # Graceful kill kill -9 12345 # Force kill pkill -f "node server.js" # Kill by pattern # Background processes node server.js & # Run in background jobs # List background jobs fg %1 # Bring job 1 to front bg %1 # Resume suspended job in background nohup node server.js & # Survives logout # Monitor process watch -n 2 '-weight: 500;">curl -s http://localhost:3000/health' # Repeat every 2 seconds tail -f /var/log/app.log # Follow log file # Find and kill ps aux | grep node # Find node processes kill 12345 # Graceful kill kill -9 12345 # Force kill pkill -f "node server.js" # Kill by pattern # Background processes node server.js & # Run in background jobs # List background jobs fg %1 # Bring job 1 to front bg %1 # Resume suspended job in background nohup node server.js & # Survives logout # Monitor process watch -n 2 '-weight: 500;">curl -s http://localhost:3000/health' # Repeat every 2 seconds tail -f /var/log/app.log # Follow log file # Find and kill ps aux | grep node # Find node processes kill 12345 # Graceful kill kill -9 12345 # Force kill pkill -f "node server.js" # Kill by pattern # Background processes node server.js & # Run in background jobs # List background jobs fg %1 # Bring job 1 to front bg %1 # Resume suspended job in background nohup node server.js & # Survives logout # Monitor process watch -n 2 '-weight: 500;">curl -s http://localhost:3000/health' # Repeat every 2 seconds tail -f /var/log/app.log # Follow log file # View files less bigfile.txt # Scrollable viewer (q to quit) head -20 file.txt # First 20 lines tail -50 file.txt # Last 50 lines tail -f logfile.txt # Live follow # Count wc -l file.txt # Line count wc -w file.txt # Word count grep -c "error" logfile.txt # Count matches # Sort & unique sort file.txt # Sort lines sort -u file.txt # Unique lines sort file.txt | uniq -c # Count occurrences sort file.txt | uniq -c | sort -rn # Sorted by frequency # Extract columns awk '{print $1, $3}' file.txt # Print columns 1 and 3 cut -d',' -f2,5 csv.csv # CSV column extraction # Replace sed 's/old/new/g' file.txt # Replace all occurrences sed -i 's/foo/bar/g' file.txt # In-place replace # Chain commands together cat access.log | awk '{print $7}' | sort | uniq -c | sort -rn | head -20 # Top 20 most requested URLs # View files less bigfile.txt # Scrollable viewer (q to quit) head -20 file.txt # First 20 lines tail -50 file.txt # Last 50 lines tail -f logfile.txt # Live follow # Count wc -l file.txt # Line count wc -w file.txt # Word count grep -c "error" logfile.txt # Count matches # Sort & unique sort file.txt # Sort lines sort -u file.txt # Unique lines sort file.txt | uniq -c # Count occurrences sort file.txt | uniq -c | sort -rn # Sorted by frequency # Extract columns awk '{print $1, $3}' file.txt # Print columns 1 and 3 cut -d',' -f2,5 csv.csv # CSV column extraction # Replace sed 's/old/new/g' file.txt # Replace all occurrences sed -i 's/foo/bar/g' file.txt # In-place replace # Chain commands together cat access.log | awk '{print $7}' | sort | uniq -c | sort -rn | head -20 # Top 20 most requested URLs # View files less bigfile.txt # Scrollable viewer (q to quit) head -20 file.txt # First 20 lines tail -50 file.txt # Last 50 lines tail -f logfile.txt # Live follow # Count wc -l file.txt # Line count wc -w file.txt # Word count grep -c "error" logfile.txt # Count matches # Sort & unique sort file.txt # Sort lines sort -u file.txt # Unique lines sort file.txt | uniq -c # Count occurrences sort file.txt | uniq -c | sort -rn # Sorted by frequency # Extract columns awk '{print $1, $3}' file.txt # Print columns 1 and 3 cut -d',' -f2,5 csv.csv # CSV column extraction # Replace sed 's/old/new/g' file.txt # Replace all occurrences sed -i 's/foo/bar/g' file.txt # In-place replace # Chain commands together cat access.log | awk '{print $7}' | sort | uniq -c | sort -rn | head -20 # Top 20 most requested URLs # tar.gz tar -czvf archive.tar.gz folder/ # Create tar -xzvf archive.tar.gz # Extract tar -tzvf archive.tar.gz # List contents # zip zip -r archive.zip folder/ # Create unzip archive.zip # Extract # Quick extract anything extract() { if [ -f $1 ]; then case $1 in *.tar.bz2) tar xjf $1 ;; *.tar.gz) tar xzf $1 ;; *.bz2) bunzip2 $1 ;; *.rar) unrar x $1 ;; *.gz) gunzip $1 ;; *.tar) tar xf $1 ;; *.tbz2) tar xjf $1 ;; *.tgz) tar xzf $1 ;; *.zip) unzip $1 ;; *) echo "Cannot extract '$1'" ;; esac fi } # tar.gz tar -czvf archive.tar.gz folder/ # Create tar -xzvf archive.tar.gz # Extract tar -tzvf archive.tar.gz # List contents # zip zip -r archive.zip folder/ # Create unzip archive.zip # Extract # Quick extract anything extract() { if [ -f $1 ]; then case $1 in *.tar.bz2) tar xjf $1 ;; *.tar.gz) tar xzf $1 ;; *.bz2) bunzip2 $1 ;; *.rar) unrar x $1 ;; *.gz) gunzip $1 ;; *.tar) tar xf $1 ;; *.tbz2) tar xjf $1 ;; *.tgz) tar xzf $1 ;; *.zip) unzip $1 ;; *) echo "Cannot extract '$1'" ;; esac fi } # tar.gz tar -czvf archive.tar.gz folder/ # Create tar -xzvf archive.tar.gz # Extract tar -tzvf archive.tar.gz # List contents # zip zip -r archive.zip folder/ # Create unzip archive.zip # Extract # Quick extract anything extract() { if [ -f $1 ]; then case $1 in *.tar.bz2) tar xjf $1 ;; *.tar.gz) tar xzf $1 ;; *.bz2) bunzip2 $1 ;; *.rar) unrar x $1 ;; *.gz) gunzip $1 ;; *.tar) tar xf $1 ;; *.tbz2) tar xjf $1 ;; *.tgz) tar xzf $1 ;; *.zip) unzip $1 ;; *) echo "Cannot extract '$1'" ;; esac fi } # Who am I? whoami # Current user id # User ID and groups who # Logged-in users w # Who's doing what # Permissions ls -la # Detailed listing with permissions chmod +x script.sh # Make executable chmod 644 file.txt # rw-r--r-- chmod 755 script.sh # rwxr-xr-x chown user:group file.txt # Change owner # Sudo without password for specific command (be careful!) -weight: 600;">sudo visudo → add: username ALL=(ALL) NOPASSWD: /usr/bin/-weight: 500;">systemctl -weight: 500;">restart nginx # Who am I? whoami # Current user id # User ID and groups who # Logged-in users w # Who's doing what # Permissions ls -la # Detailed listing with permissions chmod +x script.sh # Make executable chmod 644 file.txt # rw-r--r-- chmod 755 script.sh # rwxr-xr-x chown user:group file.txt # Change owner # Sudo without password for specific command (be careful!) -weight: 600;">sudo visudo → add: username ALL=(ALL) NOPASSWD: /usr/bin/-weight: 500;">systemctl -weight: 500;">restart nginx # Who am I? whoami # Current user id # User ID and groups who # Logged-in users w # Who's doing what # Permissions ls -la # Detailed listing with permissions chmod +x script.sh # Make executable chmod 644 file.txt # rw-r--r-- chmod 755 script.sh # rwxr-xr-x chown user:group file.txt # Change owner # Sudo without password for specific command (be careful!) -weight: 600;">sudo visudo → add: username ALL=(ALL) NOPASSWD: /usr/bin/-weight: 500;">systemctl -weight: 500;">restart nginx uname -a # Full system info uname -m # Architecture (x86_64/arm64) cat /etc/os-release # OS version uptime # How long system has been running lscpu # CPU info lsblk # Block devices (disks) free -h # Memory ip addr # Network interfaces -weight: 500;">systemctl -weight: 500;">status nginx # Service -weight: 500;">status journalctl -u nginx -f # Follow -weight: 500;">service logs uname -a # Full system info uname -m # Architecture (x86_64/arm64) cat /etc/os-release # OS version uptime # How long system has been running lscpu # CPU info lsblk # Block devices (disks) free -h # Memory ip addr # Network interfaces -weight: 500;">systemctl -weight: 500;">status nginx # Service -weight: 500;">status journalctl -u nginx -f # Follow -weight: 500;">service logs uname -a # Full system info uname -m # Architecture (x86_64/arm64) cat /etc/os-release # OS version uptime # How long system has been running lscpu # CPU info lsblk # Block devices (disks) free -h # Memory ip addr # Network interfaces -weight: 500;">systemctl -weight: 500;">status nginx # Service -weight: 500;">status journalctl -u nginx -f # Follow -weight: 500;">service logs # Deploy new code cd /app && -weight: 500;">git pull origin main && -weight: 500;">npm run build && pm2 -weight: 500;">restart app # Check what's eating disk du -sh /* 2>/dev/null | sort -hr | head -10 # Quick backup tar -czvf backup-$(date +%Y%m%d).tar.gz important_folder/ # Monitor a deploy pm2 logs app --lines 50 # Fix permissions after deploy chown -R www-data:www-data /app/public chmod -R 755 /app/public # Quick port check ss -tlnp | grep -E '(3000|8080|443)' # Find and delete old logs (>30 days) find /var/log/app -name "*.log" -mtime +30 -delete # Deploy new code cd /app && -weight: 500;">git pull origin main && -weight: 500;">npm run build && pm2 -weight: 500;">restart app # Check what's eating disk du -sh /* 2>/dev/null | sort -hr | head -10 # Quick backup tar -czvf backup-$(date +%Y%m%d).tar.gz important_folder/ # Monitor a deploy pm2 logs app --lines 50 # Fix permissions after deploy chown -R www-data:www-data /app/public chmod -R 755 /app/public # Quick port check ss -tlnp | grep -E '(3000|8080|443)' # Find and delete old logs (>30 days) find /var/log/app -name "*.log" -mtime +30 -delete # Deploy new code cd /app && -weight: 500;">git pull origin main && -weight: 500;">npm run build && pm2 -weight: 500;">restart app # Check what's eating disk du -sh /* 2>/dev/null | sort -hr | head -10 # Quick backup tar -czvf backup-$(date +%Y%m%d).tar.gz important_folder/ # Monitor a deploy pm2 logs app --lines 50 # Fix permissions after deploy chown -R www-data:www-data /app/public chmod -R 755 /app/public # Quick port check ss -tlnp | grep -E '(3000|8080|443)' # Find and delete old logs (>30 days) find /var/log/app -name "*.log" -mtime +30 -delete