classifier.go 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. package main
  2. import (
  3. "bytes"
  4. "fmt"
  5. "io/ioutil"
  6. "log"
  7. "net/http"
  8. "net/http/httputil"
  9. "regexp"
  10. "strings"
  11. )
  12. func passAndLearn(resp *http.Response) error {
  13. ProxyFlow.response = resp
  14. ProxyFlow.seniority++
  15. req := ProxyFlow.request
  16. switch {
  17. case resp.StatusCode == 401:
  18. log.Println("401: We don't want to store credentials")
  19. case resp.StatusCode > 399:
  20. buf := bytes.NewBufferString(BlockMessage)
  21. resp.Body = ioutil.NopCloser(buf)
  22. resp.Status = "403 Forbidden"
  23. resp.StatusCode = 403
  24. resp.Header["Content-Length"] = []string{fmt.Sprint(buf.Len())}
  25. resp.Header.Set("Content-Encoding", "none")
  26. log.Println("Filing inside bad class")
  27. feedRequest(req, "BAD")
  28. ControPlane.StatsTokens <- "LEARN-BAD"
  29. default:
  30. log.Println("Filing inside Good Class: ", resp.StatusCode)
  31. feedRequest(req, "GOOD")
  32. }
  33. return nil
  34. }
  35. func blockAndlearn(resp *http.Response) error {
  36. ProxyFlow.response = resp
  37. ProxyFlow.seniority++
  38. req := ProxyFlow.request
  39. buf := bytes.NewBufferString(BlockMessage)
  40. resp.Body = ioutil.NopCloser(buf)
  41. resp.Status = "403 Forbidden"
  42. resp.StatusCode = 403
  43. resp.Header["Content-Length"] = []string{fmt.Sprint(buf.Len())}
  44. resp.Header.Set("Content-Encoding", "none")
  45. switch {
  46. case resp.StatusCode == 401:
  47. log.Println("401: We don't want to store credentials")
  48. case resp.StatusCode > 399:
  49. log.Println("Filing inside bad class")
  50. feedRequest(req, "BAD")
  51. default:
  52. log.Println("Filing inside Good Class: ", resp.StatusCode)
  53. ControPlane.StatsTokens <- "LEARN-GOOD"
  54. feedRequest(req, "GOOD")
  55. }
  56. return nil
  57. }
  58. func sanitizeHeaders(s string) string {
  59. re := regexp.MustCompile(`[[:alnum:]]{4,32}|([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})|([0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12})`)
  60. matched := re.FindAllString(s, -1)
  61. uMatched := Unique(matched)
  62. tmpSt := strings.ToLower(strings.Join(uMatched, " "))
  63. log.Println("Matched: " + tmpSt)
  64. return tmpSt
  65. }
  66. func feedRequest(req *http.Request, class string) {
  67. feed := formatRequest(req)
  68. if class == "BAD" {
  69. log.Println("Feeding BAD token: ", feed)
  70. ControPlane.BadTokens <- sanitizeHeaders(feed)
  71. }
  72. if class == "GOOD" {
  73. log.Println("Feeding GOOD Token:", feed)
  74. ControPlane.GoodTokens <- sanitizeHeaders(feed)
  75. }
  76. }
  77. func formatRequest(req *http.Request) string {
  78. ingestBody := req.ContentLength < 2048 && req.ContentLength > 1
  79. log.Println("Ingesting the body: ", ingestBody)
  80. requestDump, err := httputil.DumpRequest(req, ingestBody)
  81. if err != nil {
  82. fmt.Println(err)
  83. }
  84. return fmt.Sprintf("%s\n", requestDump)
  85. }
  86. //Unique returns unique elements in the string
  87. func Unique(slice []string) []string {
  88. // create a map with all the values as key
  89. uniqMap := make(map[string]struct{})
  90. for _, v := range slice {
  91. uniqMap[v] = struct{}{}
  92. }
  93. // turn the map keys into a slice
  94. uniqSlice := make([]string, 0, len(uniqMap))
  95. for v := range uniqMap {
  96. uniqSlice = append(uniqSlice, v)
  97. }
  98. return uniqSlice
  99. }