GoHash.go 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345
  1. package main
  2. import (
  3. "bufio"
  4. "crypto/sha256"
  5. "encoding/hex"
  6. "encoding/json"
  7. "fmt"
  8. "io"
  9. "io/ioutil"
  10. "log"
  11. "os"
  12. "path/filepath"
  13. "runtime"
  14. "sync"
  15. "time"
  16. "code.cloudfoundry.org/bytefmt"
  17. flag "github.com/spf13/pflag"
  18. )
  19. // Fdhashes struct for holding all informations about one folder.
  20. type Fdhashes struct {
  21. Path string
  22. Hashes map[string]string
  23. Times map[string]time.Time
  24. Dirty bool
  25. }
  26. var hashes map[string]Fdhashes
  27. var wg sync.WaitGroup
  28. var mu sync.RWMutex
  29. var driveLetter string
  30. var rewrite bool
  31. var compare bool
  32. func init() {
  33. flag.BoolVarP(&rewrite, "rewrite", "r", false, "rewrite all fhhashes files.")
  34. flag.BoolVarP(&compare, "compare", "c", false, "compare all file hashes and writing a compartion report.")
  35. }
  36. func main() {
  37. log.Println("starting GoHash")
  38. runtime.GOMAXPROCS(2)
  39. hashes = make(map[string]Fdhashes)
  40. flag.Parse()
  41. myFile := flag.Arg(0)
  42. file, err := os.Stat(myFile)
  43. if os.IsNotExist(err) {
  44. log.Fatalln("File does not exists:", myFile)
  45. }
  46. if file.IsDir() {
  47. log.Println("start with folder:", myFile)
  48. driveLetter = ""
  49. if runtime.GOOS == "windows" {
  50. driveLetter = filepath.VolumeName(myFile) + "/"
  51. }
  52. if compare {
  53. compareFolder(myFile)
  54. } else {
  55. processFolder(myFile)
  56. fmt.Println("waiting")
  57. wg.Wait()
  58. saveAllHashFiles()
  59. }
  60. } else {
  61. log.Printf("file %s has hash %s\n", myFile, getSha256Hash(myFile))
  62. }
  63. log.Println("done")
  64. }
  65. func getSha256Hash(fileStr string) string {
  66. f, err := os.Open(fileStr)
  67. if err != nil {
  68. log.Fatal(err)
  69. }
  70. defer f.Close()
  71. h := sha256.New()
  72. if _, err := io.Copy(h, f); err != nil {
  73. log.Fatal(err)
  74. }
  75. return hex.EncodeToString(h.Sum(nil))
  76. }
  77. var lock1 = sync.RWMutex{}
  78. var lock2 = sync.RWMutex{}
  79. func outputHash(fileStr string) {
  80. var hashFile Fdhashes
  81. doHash := true
  82. defer wg.Done()
  83. dir, fileName := filepath.Split(fileStr)
  84. if fileName == ".fdhashes3" {
  85. return
  86. }
  87. // checking if hash is present
  88. mu.Lock()
  89. hashFile, ok := hashes[dir]
  90. if !ok {
  91. _, err := os.Stat(dir + ".fdhashes3")
  92. if os.IsNotExist(err) {
  93. hashFile = Fdhashes{Path: dir, Hashes: make(map[string]string), Times: make(map[string]time.Time), Dirty: true}
  94. } else {
  95. hashFile = loadHashfile(dir + ".fdhashes3")
  96. }
  97. hashes[dir] = hashFile
  98. }
  99. lock1.RLock()
  100. _, ok = hashFile.Hashes[fileName]
  101. lock1.RUnlock()
  102. mu.Unlock()
  103. doHash = !ok
  104. // checking if dattime is identically
  105. file, _ := os.Stat(fileStr)
  106. time := file.ModTime()
  107. lock2.RLock()
  108. savedTime, ok := hashFile.Times[fileName]
  109. lock2.RUnlock()
  110. if !time.Equal(savedTime) || !ok {
  111. doHash = true
  112. }
  113. if doHash {
  114. log.Printf("starting %s\n", fileStr)
  115. hash := getSha256Hash(fileStr)
  116. log.Printf("ready %s\n", fileStr)
  117. mu.Lock()
  118. lock1.Lock()
  119. hashFile.Hashes[fileName] = hash
  120. lock1.Unlock()
  121. lock2.Lock()
  122. hashFile.Times[fileName] = time
  123. lock2.Unlock()
  124. saveHashfile(&hashFile)
  125. hashes[dir] = hashFile
  126. mu.Unlock()
  127. log.Printf("file \"%s\" has hash \"%s\"\n", fileStr, hash)
  128. }
  129. }
  130. var count int
  131. var addWork int
  132. func processFolder(folder string) {
  133. count = 0
  134. addWork = 0
  135. err := filepath.Walk(folder, func(path string, info os.FileInfo, err error) error {
  136. count++
  137. if (count % 100) == 0 {
  138. fmt.Print(".")
  139. }
  140. if (count % 10000) == 0 {
  141. fmt.Println()
  142. }
  143. filename := info.Name()
  144. if filename[0:1] != "." {
  145. if info.IsDir() {
  146. fmt.Println(path)
  147. }
  148. if !info.IsDir() {
  149. addWork++
  150. wg.Add(1)
  151. go outputHash(path)
  152. if addWork > 1000 {
  153. fmt.Println("x")
  154. wg.Wait()
  155. saveAllHashFiles()
  156. addWork = 0
  157. }
  158. }
  159. }
  160. return nil
  161. })
  162. if err != nil {
  163. panic(err)
  164. }
  165. }
  166. func saveHashfile(hashFile *Fdhashes) {
  167. hashFile.Dirty = true
  168. }
  169. func saveAllHashFiles() {
  170. hashList := make([]Fdhashes, 0)
  171. for _, hashFile := range hashes {
  172. if hashFile.Dirty {
  173. hashFile.Dirty = false
  174. b, err := json.Marshal(hashFile)
  175. if err != nil {
  176. fmt.Println(err)
  177. return
  178. }
  179. err = ioutil.WriteFile(hashFile.Path+".fdhashes3", b, 0644)
  180. if err != nil {
  181. panic(err)
  182. }
  183. hashList = append(hashList, hashFile)
  184. }
  185. }
  186. hashes = make(map[string]Fdhashes)
  187. for _, hashFile := range hashList {
  188. hashes[hashFile.Path] = hashFile
  189. }
  190. }
  191. func loadHashfile(fileStr string) Fdhashes {
  192. dir, _ := filepath.Split(fileStr)
  193. data := Fdhashes{Path: dir, Hashes: make(map[string]string), Times: make(map[string]time.Time), Dirty: false}
  194. if !rewrite {
  195. file, err := ioutil.ReadFile(fileStr)
  196. if err != nil {
  197. panic(err)
  198. }
  199. err = json.Unmarshal([]byte(file), &data)
  200. if err != nil {
  201. log.Printf("can't read file %s", fileStr)
  202. }
  203. }
  204. if data.Path != dir {
  205. data.Path = dir
  206. data.Dirty = true
  207. }
  208. return data
  209. }
  210. func compareFolder(folder string) {
  211. loadAllHashFiles(folder)
  212. index := make(map[string][]string)
  213. for _, hashFile := range hashes {
  214. for filename, hash := range hashFile.Hashes {
  215. values := index[hash]
  216. if values == nil {
  217. values = make([]string, 0)
  218. }
  219. values = append(values, fmt.Sprintf("%s/%s", hashFile.Path, filename))
  220. index[hash] = values
  221. }
  222. }
  223. size := len(index)
  224. f, err := os.Create("report.txt")
  225. check(err)
  226. w := bufio.NewWriter(f)
  227. count := 0
  228. var filesize int64
  229. fileCount := 0
  230. for _, values := range index {
  231. count++
  232. if count%100 == 0 {
  233. fmt.Printf("%d (%d) checking\n", count, size)
  234. }
  235. if len(values) > 1 {
  236. info, err := os.Stat(values[0])
  237. if err == nil {
  238. w.WriteString(fmt.Sprintf("found identically hash: size: %d\n", info.Size()))
  239. filesize += int64(len(values)-1) * info.Size()
  240. }
  241. fileCount += len(values) - 1
  242. for _, filename := range values {
  243. w.WriteString(fmt.Sprintf(" %s\n", filename))
  244. }
  245. w.Flush()
  246. }
  247. }
  248. w.WriteString(fmt.Sprintf("can save up to %s on %d files\n", bytefmt.ByteSize(uint64(filesize)), fileCount))
  249. w.Flush()
  250. }
  251. func compareFolder2(folder string) {
  252. loadAllHashFiles(folder)
  253. size := len(hashes)
  254. f, err := os.Create("report.txt")
  255. check(err)
  256. w := bufio.NewWriter(f)
  257. count := 0
  258. for _, hashFile := range hashes {
  259. count++
  260. fmt.Printf("%d (%d) checking: %s\n", count, size, hashFile.Path)
  261. // fmt.Printf("checking: %s\n", hashFile.Path)
  262. for filename, hash := range hashFile.Hashes {
  263. if value, found := search(hash, filename, hashFile.Path); found {
  264. w.WriteString("found identically hash\n")
  265. w.WriteString(fmt.Sprintf(" src: %s/%s\n", hashFile.Path, filename))
  266. w.WriteString(fmt.Sprintf(" dest: %s\n", value))
  267. w.Flush()
  268. }
  269. }
  270. }
  271. }
  272. func search(srcHash string, exFilename string, exFilepath string) (value string, found bool) {
  273. for _, hashFile := range hashes {
  274. for filename, hash := range hashFile.Hashes {
  275. if (filename != exFilename) && (hashFile.Path != exFilepath) {
  276. if hash == srcHash {
  277. value += fmt.Sprintf("%s/%s;", hashFile.Path, filename)
  278. found = true
  279. }
  280. }
  281. }
  282. }
  283. return
  284. }
  285. func loadAllHashFiles(folder string) {
  286. count = 0
  287. addWork = 0
  288. err := filepath.Walk(folder, func(path string, info os.FileInfo, err error) error {
  289. //filename := info.Name()
  290. if info.IsDir() {
  291. fmt.Print(".")
  292. hashFile, ok := hashes[path]
  293. if !ok {
  294. _, err := os.Stat(path + "/.fdhashes3")
  295. if os.IsNotExist(err) {
  296. hashFile = Fdhashes{Path: path, Hashes: make(map[string]string), Times: make(map[string]time.Time), Dirty: true}
  297. } else {
  298. hashFile = loadHashfile(path + "/.fdhashes3")
  299. }
  300. hashes[path] = hashFile
  301. }
  302. }
  303. return nil
  304. })
  305. if err != nil {
  306. panic(err)
  307. }
  308. fmt.Printf("\nfound %d hash files.\n", len(hashes))
  309. }
  310. func check(e error) {
  311. if e != nil {
  312. panic(e)
  313. }
  314. }