Tools: The 30 Linux Commands I Use Every Day on My VPS (2026)
The 30 Linux Commands I Use Every Day on My VPS
File Operations
Process Management
Network Debugging
Disk & Storage
Text Processing (The Unix Way)
System Monitoring
Git Quick Commands
SSH & Remote
My .bashrc Aliases
The One Command That Saves Me Daily I manage a $5 VPS that runs 5 services. These are the commands I can't live without. One glance tells me everything I need to know about my server's health. What are your must-have Linux commands? Share your favorites! Follow @armorbreak for more sysadmin tips. Templates let you quickly answer FAQs or store snippets for re-use. Are you sure you want to ? It will become hidden in your post, but will still be visible via the comment's permalink. Hide child comments as well For further actions, you may consider blocking this person and/or reporting abuse
# Find large files (when disk is full)
du -sh * | sort -rh | head -10
# Output:
# 4.2G node_modules/
# 512M logs/
# 128M data.db # Find files modified in last N days
find /var/log -mtime -7 -name "*.log" -ls # Quick file search
fzf # Fuzzy finder — -weight: 500;">install it, thank me later
locate "config.json" # Instant file search (updatedb runs daily) # Watch a file for changes (live log tailing)
tail -f /var/log/nginx/error.log
# With highlights:
tail -f /var/log/app.log | grep --color=always -E "ERROR|WARN" # Compare two files
diff -u file1.json file2.json
sdiff -s file1 file2 # Side-by-side comparison
# Find large files (when disk is full)
du -sh * | sort -rh | head -10
# Output:
# 4.2G node_modules/
# 512M logs/
# 128M data.db # Find files modified in last N days
find /var/log -mtime -7 -name "*.log" -ls # Quick file search
fzf # Fuzzy finder — -weight: 500;">install it, thank me later
locate "config.json" # Instant file search (updatedb runs daily) # Watch a file for changes (live log tailing)
tail -f /var/log/nginx/error.log
# With highlights:
tail -f /var/log/app.log | grep --color=always -E "ERROR|WARN" # Compare two files
diff -u file1.json file2.json
sdiff -s file1 file2 # Side-by-side comparison
# Find large files (when disk is full)
du -sh * | sort -rh | head -10
# Output:
# 4.2G node_modules/
# 512M logs/
# 128M data.db # Find files modified in last N days
find /var/log -mtime -7 -name "*.log" -ls # Quick file search
fzf # Fuzzy finder — -weight: 500;">install it, thank me later
locate "config.json" # Instant file search (updatedb runs daily) # Watch a file for changes (live log tailing)
tail -f /var/log/nginx/error.log
# With highlights:
tail -f /var/log/app.log | grep --color=always -E "ERROR|WARN" # Compare two files
diff -u file1.json file2.json
sdiff -s file1 file2 # Side-by-side comparison
# What's eating my CPU?
htop # Better top (-weight: 500;">install it!)
# Or: ps aux --sort=-%cpu | head -10 # What's eating my RAM?
ps aux --sort=-%mem | head -10
free -h # Memory overview # Find process by port
ss -tlnp | grep :3000
# Output: LISTEN 0 128 *:3000 users:(("node",pid=12345)) # Kill processes by name
pkill -f "node server.js"
kill $(lsof -ti:3000) # Kill whatever is on port 3000 # Process tree
pstree -p # Shows parent-child relationships
pgrep -a node # All Node.js processes with args # Background & foreground
-weight: 500;">npm -weight: 500;">start &
jobs # List background jobs
fg %1 # Bring job 1 to foreground
Ctrl+Z # Suspend current job
bg %1 # Resume in background # nohup = survives terminal close
nohup -weight: 500;">npm -weight: 500;">start > app.log 2>&1 &
# What's eating my CPU?
htop # Better top (-weight: 500;">install it!)
# Or: ps aux --sort=-%cpu | head -10 # What's eating my RAM?
ps aux --sort=-%mem | head -10
free -h # Memory overview # Find process by port
ss -tlnp | grep :3000
# Output: LISTEN 0 128 *:3000 users:(("node",pid=12345)) # Kill processes by name
pkill -f "node server.js"
kill $(lsof -ti:3000) # Kill whatever is on port 3000 # Process tree
pstree -p # Shows parent-child relationships
pgrep -a node # All Node.js processes with args # Background & foreground
-weight: 500;">npm -weight: 500;">start &
jobs # List background jobs
fg %1 # Bring job 1 to foreground
Ctrl+Z # Suspend current job
bg %1 # Resume in background # nohup = survives terminal close
nohup -weight: 500;">npm -weight: 500;">start > app.log 2>&1 &
# What's eating my CPU?
htop # Better top (-weight: 500;">install it!)
# Or: ps aux --sort=-%cpu | head -10 # What's eating my RAM?
ps aux --sort=-%mem | head -10
free -h # Memory overview # Find process by port
ss -tlnp | grep :3000
# Output: LISTEN 0 128 *:3000 users:(("node",pid=12345)) # Kill processes by name
pkill -f "node server.js"
kill $(lsof -ti:3000) # Kill whatever is on port 3000 # Process tree
pstree -p # Shows parent-child relationships
pgrep -a node # All Node.js processes with args # Background & foreground
-weight: 500;">npm -weight: 500;">start &
jobs # List background jobs
fg %1 # Bring job 1 to foreground
Ctrl+Z # Suspend current job
bg %1 # Resume in background # nohup = survives terminal close
nohup -weight: 500;">npm -weight: 500;">start > app.log 2>&1 &
# Is the port open?
-weight: 500;">curl -I http://localhost:3000
ss -tlnp | grep :3000 # DNS check
dig example.com +short
nslookup example.com # Connection test (is this host reachable?)
ping -c 3 google.com
nc -zv github.com 443 # Port scan single port
nc -zv example.com 80-100 # Port range # HTTP debugging (see headers, redirects)
-weight: 500;">curl -v https://api.example.com/data
-weight: 500;">curl -I https://example.com # Headers only
-weight: 500;">curl -sI -o /dev/null -w "%{http_code} %{time_total}s\n" URL # Status + time # Follow redirects
-weight: 500;">curl -L https://bit.ly/something # POST request
-weight: 500;">curl -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d '{"name": "Alex"}' # Download file
-weight: 500;">wget -q URL -O output.txt
-weight: 500;">curl -sL URL -o output.txt # Network interfaces + IPs
ip addr show
hostname -I # Just IP addresses
# Is the port open?
-weight: 500;">curl -I http://localhost:3000
ss -tlnp | grep :3000 # DNS check
dig example.com +short
nslookup example.com # Connection test (is this host reachable?)
ping -c 3 google.com
nc -zv github.com 443 # Port scan single port
nc -zv example.com 80-100 # Port range # HTTP debugging (see headers, redirects)
-weight: 500;">curl -v https://api.example.com/data
-weight: 500;">curl -I https://example.com # Headers only
-weight: 500;">curl -sI -o /dev/null -w "%{http_code} %{time_total}s\n" URL # Status + time # Follow redirects
-weight: 500;">curl -L https://bit.ly/something # POST request
-weight: 500;">curl -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d '{"name": "Alex"}' # Download file
-weight: 500;">wget -q URL -O output.txt
-weight: 500;">curl -sL URL -o output.txt # Network interfaces + IPs
ip addr show
hostname -I # Just IP addresses
# Is the port open?
-weight: 500;">curl -I http://localhost:3000
ss -tlnp | grep :3000 # DNS check
dig example.com +short
nslookup example.com # Connection test (is this host reachable?)
ping -c 3 google.com
nc -zv github.com 443 # Port scan single port
nc -zv example.com 80-100 # Port range # HTTP debugging (see headers, redirects)
-weight: 500;">curl -v https://api.example.com/data
-weight: 500;">curl -I https://example.com # Headers only
-weight: 500;">curl -sI -o /dev/null -w "%{http_code} %{time_total}s\n" URL # Status + time # Follow redirects
-weight: 500;">curl -L https://bit.ly/something # POST request
-weight: 500;">curl -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d '{"name": "Alex"}' # Download file
-weight: 500;">wget -q URL -O output.txt
-weight: 500;">curl -sL URL -o output.txt # Network interfaces + IPs
ip addr show
hostname -I # Just IP addresses
# Am I running out of space?
df -h # Human-readable disk usage
du -sh /* | sort -rh | head-10 # Biggest directories # Find large files (>100MB)
find / -type f -size +100M -exec ls -lh {} \; 2>/dev/null | sort -k5 -rh # Clean up
rm -rf node_modules/ # Obvious
-weight: 500;">npm cache clean --force # Free up -weight: 500;">npm cache
-weight: 500;">docker system prune -af # Docker cleanup (careful!) # Disk usage by type
ncdu # Interactive disk usage explorer (amazing tool) # Inode check (lots of small files?)
df -i # If %Used is 100%, you're out of inodes not space
# Am I running out of space?
df -h # Human-readable disk usage
du -sh /* | sort -rh | head-10 # Biggest directories # Find large files (>100MB)
find / -type f -size +100M -exec ls -lh {} \; 2>/dev/null | sort -k5 -rh # Clean up
rm -rf node_modules/ # Obvious
-weight: 500;">npm cache clean --force # Free up -weight: 500;">npm cache
-weight: 500;">docker system prune -af # Docker cleanup (careful!) # Disk usage by type
ncdu # Interactive disk usage explorer (amazing tool) # Inode check (lots of small files?)
df -i # If %Used is 100%, you're out of inodes not space
# Am I running out of space?
df -h # Human-readable disk usage
du -sh /* | sort -rh | head-10 # Biggest directories # Find large files (>100MB)
find / -type f -size +100M -exec ls -lh {} \; 2>/dev/null | sort -k5 -rh # Clean up
rm -rf node_modules/ # Obvious
-weight: 500;">npm cache clean --force # Free up -weight: 500;">npm cache
-weight: 500;">docker system prune -af # Docker cleanup (careful!) # Disk usage by type
ncdu # Interactive disk usage explorer (amazing tool) # Inode check (lots of small files?)
df -i # If %Used is 100%, you're out of inodes not space
# Grep power user
grep -r "TODO" src/ --include="*.ts" # Recursive, filtered
grep -n "error" app.log | tail -20 # Line numbers
grep -c "" file.txt # Count lines
grep -E "(error|warn|fail)" log.txt # Regex OR
grep -v "^#" config.ini # Exclude comments
grep -A 5 -B 2 "exception" error.log # Context lines # Sed quick edits
sed -i 's/old/new/g' file.txt # Replace all occurrences
sed -i '42d' file.txt # Delete line 42
sed -n '10,20p' file.txt # Print lines 10-20
sed -i '1i\# Header' file.txt # Insert at line 1 # Awk for column processing
awk '{print $1, $NF}' access.log # First + last column
awk '{sum+=$NF} END {print sum}' nums.txt # Sum last column
awk -F',' '{print $2}' csv.csv # CSV parsing
awk 'length > 80' longlines.txt # Filter long lines # Sort & unique
sort file.txt | uniq # Deduplicate
sort -rn numbers.txt # Reverse numeric
sort -k2 -t',' file.csv # Sort by column 2 # Quick JSON manipulation (with jq)
cat data.json | jq '.users[].name'
cat data.json | jq '.[] | select(.age > 30)'
echo '{"key":"value"}' | jq '.key' # Extract value
# Grep power user
grep -r "TODO" src/ --include="*.ts" # Recursive, filtered
grep -n "error" app.log | tail -20 # Line numbers
grep -c "" file.txt # Count lines
grep -E "(error|warn|fail)" log.txt # Regex OR
grep -v "^#" config.ini # Exclude comments
grep -A 5 -B 2 "exception" error.log # Context lines # Sed quick edits
sed -i 's/old/new/g' file.txt # Replace all occurrences
sed -i '42d' file.txt # Delete line 42
sed -n '10,20p' file.txt # Print lines 10-20
sed -i '1i\# Header' file.txt # Insert at line 1 # Awk for column processing
awk '{print $1, $NF}' access.log # First + last column
awk '{sum+=$NF} END {print sum}' nums.txt # Sum last column
awk -F',' '{print $2}' csv.csv # CSV parsing
awk 'length > 80' longlines.txt # Filter long lines # Sort & unique
sort file.txt | uniq # Deduplicate
sort -rn numbers.txt # Reverse numeric
sort -k2 -t',' file.csv # Sort by column 2 # Quick JSON manipulation (with jq)
cat data.json | jq '.users[].name'
cat data.json | jq '.[] | select(.age > 30)'
echo '{"key":"value"}' | jq '.key' # Extract value
# Grep power user
grep -r "TODO" src/ --include="*.ts" # Recursive, filtered
grep -n "error" app.log | tail -20 # Line numbers
grep -c "" file.txt # Count lines
grep -E "(error|warn|fail)" log.txt # Regex OR
grep -v "^#" config.ini # Exclude comments
grep -A 5 -B 2 "exception" error.log # Context lines # Sed quick edits
sed -i 's/old/new/g' file.txt # Replace all occurrences
sed -i '42d' file.txt # Delete line 42
sed -n '10,20p' file.txt # Print lines 10-20
sed -i '1i\# Header' file.txt # Insert at line 1 # Awk for column processing
awk '{print $1, $NF}' access.log # First + last column
awk '{sum+=$NF} END {print sum}' nums.txt # Sum last column
awk -F',' '{print $2}' csv.csv # CSV parsing
awk 'length > 80' longlines.txt # Filter long lines # Sort & unique
sort file.txt | uniq # Deduplicate
sort -rn numbers.txt # Reverse numeric
sort -k2 -t',' file.csv # Sort by column 2 # Quick JSON manipulation (with jq)
cat data.json | jq '.users[].name'
cat data.json | jq '.[] | select(.age > 30)'
echo '{"key":"value"}' | jq '.key' # Extract value
# Real-time monitoring
htop # Processes (interactive)
iotop # Disk I/O
iftop # Network bandwidth
nethogs # Network per-process # One-liner stats
echo "=== CPU ===" && nproc && echo "=== RAM ===" && free -h | grep Mem && echo "=== Disk ===" && df -h / && echo "=== Uptime ===" && uptime # Last reboot
who -b
last reboot | head -5 # Current runlevel / systemd targets
-weight: 500;">systemctl list-units --state=running --type=-weight: 500;">service # Journalctl (systemd logs)
journalctl -u nginx -f # Follow nginx logs
journalctl --since "1 hour ago" # Recent logs
journalctl -p err # Only errors
# Real-time monitoring
htop # Processes (interactive)
iotop # Disk I/O
iftop # Network bandwidth
nethogs # Network per-process # One-liner stats
echo "=== CPU ===" && nproc && echo "=== RAM ===" && free -h | grep Mem && echo "=== Disk ===" && df -h / && echo "=== Uptime ===" && uptime # Last reboot
who -b
last reboot | head -5 # Current runlevel / systemd targets
-weight: 500;">systemctl list-units --state=running --type=-weight: 500;">service # Journalctl (systemd logs)
journalctl -u nginx -f # Follow nginx logs
journalctl --since "1 hour ago" # Recent logs
journalctl -p err # Only errors
# Real-time monitoring
htop # Processes (interactive)
iotop # Disk I/O
iftop # Network bandwidth
nethogs # Network per-process # One-liner stats
echo "=== CPU ===" && nproc && echo "=== RAM ===" && free -h | grep Mem && echo "=== Disk ===" && df -h / && echo "=== Uptime ===" && uptime # Last reboot
who -b
last reboot | head -5 # Current runlevel / systemd targets
-weight: 500;">systemctl list-units --state=running --type=-weight: 500;">service # Journalctl (systemd logs)
journalctl -u nginx -f # Follow nginx logs
journalctl --since "1 hour ago" # Recent logs
journalctl -p err # Only errors
# Status super-view
-weight: 500;">git -weight: 500;">status -sb # Short branch + -weight: 500;">status # What changed?
-weight: 500;">git diff --stat # File summary
-weight: 500;">git diff name-only # Just filenames # Who changed this?
-weight: 500;">git blame file.ts -L 10,20 # Lines 10-20 only
-weight: 500;">git log --oneline --author="Alex" # My commits # Undo mistakes
-weight: 500;">git checkout -- file.ts # Discard changes to file
-weight: 500;">git reset HEAD~1 # Undo last commit (keep changes)
-weight: 500;">git commit --amend # Edit last commit message # Stash management
-weight: 500;">git stash push -m "WIP feature X"
-weight: 500;">git stash list
-weight: 500;">git stash pop # Branch cleanup
-weight: 500;">git branch -vv # Show tracking info
-weight: 500;">git branch --merged | grep -v '\*' | xargs -weight: 500;">git branch -d # Delete merged branches
# Status super-view
-weight: 500;">git -weight: 500;">status -sb # Short branch + -weight: 500;">status # What changed?
-weight: 500;">git diff --stat # File summary
-weight: 500;">git diff name-only # Just filenames # Who changed this?
-weight: 500;">git blame file.ts -L 10,20 # Lines 10-20 only
-weight: 500;">git log --oneline --author="Alex" # My commits # Undo mistakes
-weight: 500;">git checkout -- file.ts # Discard changes to file
-weight: 500;">git reset HEAD~1 # Undo last commit (keep changes)
-weight: 500;">git commit --amend # Edit last commit message # Stash management
-weight: 500;">git stash push -m "WIP feature X"
-weight: 500;">git stash list
-weight: 500;">git stash pop # Branch cleanup
-weight: 500;">git branch -vv # Show tracking info
-weight: 500;">git branch --merged | grep -v '\*' | xargs -weight: 500;">git branch -d # Delete merged branches
# Status super-view
-weight: 500;">git -weight: 500;">status -sb # Short branch + -weight: 500;">status # What changed?
-weight: 500;">git diff --stat # File summary
-weight: 500;">git diff name-only # Just filenames # Who changed this?
-weight: 500;">git blame file.ts -L 10,20 # Lines 10-20 only
-weight: 500;">git log --oneline --author="Alex" # My commits # Undo mistakes
-weight: 500;">git checkout -- file.ts # Discard changes to file
-weight: 500;">git reset HEAD~1 # Undo last commit (keep changes)
-weight: 500;">git commit --amend # Edit last commit message # Stash management
-weight: 500;">git stash push -m "WIP feature X"
-weight: 500;">git stash list
-weight: 500;">git stash pop # Branch cleanup
-weight: 500;">git branch -vv # Show tracking info
-weight: 500;">git branch --merged | grep -v '\*' | xargs -weight: 500;">git branch -d # Delete merged branches
# Quick connection
ssh user@host "command" # Run command remotely # Copy files
scp file.txt user@host:/path/
rsync -avz local/ user@host:/remote/ # Sync directories # SSH tunnel (access remote port locally)
ssh -L 8080:localhost:3000 user@host
# Now localhost:8080 → remote:3000 # Key-based auth (no passwords!)
ssh-copy-id user@host # Copy your public key
# Quick connection
ssh user@host "command" # Run command remotely # Copy files
scp file.txt user@host:/path/
rsync -avz local/ user@host:/remote/ # Sync directories # SSH tunnel (access remote port locally)
ssh -L 8080:localhost:3000 user@host
# Now localhost:8080 → remote:3000 # Key-based auth (no passwords!)
ssh-copy-id user@host # Copy your public key
# Quick connection
ssh user@host "command" # Run command remotely # Copy files
scp file.txt user@host:/path/
rsync -avz local/ user@host:/remote/ # Sync directories # SSH tunnel (access remote port locally)
ssh -L 8080:localhost:3000 user@host
# Now localhost:8080 → remote:3000 # Key-based auth (no passwords!)
ssh-copy-id user@host # Copy your public key
# Add these to ~/.bashrc
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
alias ..='cd ..'
alias ...='cd ../..'
alias gs='-weight: 500;">git -weight: 500;">status -sb'
alias gl='-weight: 500;">git log --oneline --graph -15'
alias gp='-weight: 500;">git push'
alias gd='-weight: 500;">git diff'
alias gc='-weight: 500;">git checkout'
alias nb='-weight: 500;">npm run build'
alias ns='-weight: 500;">npm run -weight: 500;">start'
alias nt='-weight: 500;">npm test'
alias dc='-weight: 500;">docker compose'
alias ports='ss -tlnp'
alias myip='-weight: 500;">curl -s ifconfig.me'
alias weather='-weight: 500;">curl -s wttr.in/?format=3' # Quick functions
mkcd() { mkdir -p "$1" && cd "$1"; }
extract() { if [ -f "$1" ]; then case "$1" in *.tar.gz) tar xzf "$1" ;; *.tar.xz) tar xJf "$1" ;; *.zip) unzip "$1" ;; *) echo "Unknown format";; esac fi
}
# Add these to ~/.bashrc
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
alias ..='cd ..'
alias ...='cd ../..'
alias gs='-weight: 500;">git -weight: 500;">status -sb'
alias gl='-weight: 500;">git log --oneline --graph -15'
alias gp='-weight: 500;">git push'
alias gd='-weight: 500;">git diff'
alias gc='-weight: 500;">git checkout'
alias nb='-weight: 500;">npm run build'
alias ns='-weight: 500;">npm run -weight: 500;">start'
alias nt='-weight: 500;">npm test'
alias dc='-weight: 500;">docker compose'
alias ports='ss -tlnp'
alias myip='-weight: 500;">curl -s ifconfig.me'
alias weather='-weight: 500;">curl -s wttr.in/?format=3' # Quick functions
mkcd() { mkdir -p "$1" && cd "$1"; }
extract() { if [ -f "$1" ]; then case "$1" in *.tar.gz) tar xzf "$1" ;; *.tar.xz) tar xJf "$1" ;; *.zip) unzip "$1" ;; *) echo "Unknown format";; esac fi
}
# Add these to ~/.bashrc
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
alias ..='cd ..'
alias ...='cd ../..'
alias gs='-weight: 500;">git -weight: 500;">status -sb'
alias gl='-weight: 500;">git log --oneline --graph -15'
alias gp='-weight: 500;">git push'
alias gd='-weight: 500;">git diff'
alias gc='-weight: 500;">git checkout'
alias nb='-weight: 500;">npm run build'
alias ns='-weight: 500;">npm run -weight: 500;">start'
alias nt='-weight: 500;">npm test'
alias dc='-weight: 500;">docker compose'
alias ports='ss -tlnp'
alias myip='-weight: 500;">curl -s ifconfig.me'
alias weather='-weight: 500;">curl -s wttr.in/?format=3' # Quick functions
mkcd() { mkdir -p "$1" && cd "$1"; }
extract() { if [ -f "$1" ]; then case "$1" in *.tar.gz) tar xzf "$1" ;; *.tar.xz) tar xJf "$1" ;; *.zip) unzip "$1" ;; *) echo "Unknown format";; esac fi
}
# My "server health check" one-liner
echo "=== $(date) ===" && \
echo "Uptime: $(uptime -p)" && \
echo "CPU: $(top -bn1 | grep "Cpu(s)" | awk '{print $2}')" && \
echo "RAM: $(free -h | awk '/Mem:/{print $3"/"$2}') " && \
echo "Disk: $(df -h / | awk 'NR==2{print $3"/"$2" ("$5")"}')" && \
echo "Processes: $(ps aux | wc -l)" && \
echo "Node procs: $(pgrep -c node)" && \
echo "Ports: $(ss -tlnp | wc -l) listening"
# My "server health check" one-liner
echo "=== $(date) ===" && \
echo "Uptime: $(uptime -p)" && \
echo "CPU: $(top -bn1 | grep "Cpu(s)" | awk '{print $2}')" && \
echo "RAM: $(free -h | awk '/Mem:/{print $3"/"$2}') " && \
echo "Disk: $(df -h / | awk 'NR==2{print $3"/"$2" ("$5")"}')" && \
echo "Processes: $(ps aux | wc -l)" && \
echo "Node procs: $(pgrep -c node)" && \
echo "Ports: $(ss -tlnp | wc -l) listening"
# My "server health check" one-liner
echo "=== $(date) ===" && \
echo "Uptime: $(uptime -p)" && \
echo "CPU: $(top -bn1 | grep "Cpu(s)" | awk '{print $2}')" && \
echo "RAM: $(free -h | awk '/Mem:/{print $3"/"$2}') " && \
echo "Disk: $(df -h / | awk 'NR==2{print $3"/"$2" ("$5")"}')" && \
echo "Processes: $(ps aux | wc -l)" && \
echo "Node procs: $(pgrep -c node)" && \
echo "Ports: $(ss -tlnp | wc -l) listening"
=== Sat May 16 02:49:00 CST 2026 ===
Uptime: up 3 weeks, 2 days, 14 hours
CPU: 2.3%
RAM: 1.8G/3.7G (51%)
Disk: 37G/59G (65%)
Processes: 187
Node procs: 4
Ports: 12 listening
=== Sat May 16 02:49:00 CST 2026 ===
Uptime: up 3 weeks, 2 days, 14 hours
CPU: 2.3%
RAM: 1.8G/3.7G (51%)
Disk: 37G/59G (65%)
Processes: 187
Node procs: 4
Ports: 12 listening
=== Sat May 16 02:49:00 CST 2026 ===
Uptime: up 3 weeks, 2 days, 14 hours
CPU: 2.3%
RAM: 1.8G/3.7G (51%)
Disk: 37G/59G (65%)
Processes: 187
Node procs: 4
Ports: 12 listening