/ Published in: Bash

Expand |
Embed | Plain Text
Copy this code and paste it in your HTML
# bash4 - Use Associative Arrays to find duplicate files. # Generate a file "rmdups" which contains rm commands to remove the duplicates. # Check this file and then source it from the current (!) directory # to actually remove the duplicates. Works equally with the (deprecated) # md5sum program instead of sha*sum. unset flist; declare -A flist while read -r sum fname; do if [[ ${flist[$sum]} ]]; then printf 'rm -- "%s" # Same as >%s<\n' "$fname" "${flist[$sum]}" else flist[$sum]="$fname" fi done < <(find . -type f -exec sha256sum {} +) >rmdups
Comments
