classifier.go 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122
  1. package main
  2. import (
  3. "bytes"
  4. "fmt"
  5. "io/ioutil"
  6. "log"
  7. "net/http"
  8. "net/http/httputil"
  9. "regexp"
  10. "strings"
  11. )
  12. func passAndLearn(resp *http.Response) error {
  13. ProxyFlow.response = resp
  14. ProxyFlow.seniority++
  15. req := ProxyFlow.request
  16. switch {
  17. case resp.StatusCode == 401:
  18. log.Println("401: We don't want to store credentials")
  19. case resp.StatusCode > 399:
  20. buf := bytes.NewBufferString(BlockMessage)
  21. resp.Body = ioutil.NopCloser(buf)
  22. resp.Status = "403 Forbidden"
  23. resp.StatusCode = 403
  24. resp.Header["Content-Length"] = []string{fmt.Sprint(buf.Len())}
  25. resp.Header.Set("Content-Encoding", "none")
  26. log.Println("Filing inside bad class")
  27. feedRequest(req, "BAD")
  28. ControPlane.StatsTokens <- "LEARN-BAD"
  29. default:
  30. log.Println("Filing inside Good Class: ", resp.StatusCode)
  31. feedRequest(req, "GOOD")
  32. }
  33. return nil
  34. }
  35. func blockAndlearn(resp *http.Response) error {
  36. ProxyFlow.response = resp
  37. ProxyFlow.seniority++
  38. req := ProxyFlow.request
  39. buf := bytes.NewBufferString(BlockMessage)
  40. resp.Body = ioutil.NopCloser(buf)
  41. resp.Status = "403 Forbidden"
  42. resp.StatusCode = 403
  43. resp.Header["Content-Length"] = []string{fmt.Sprint(buf.Len())}
  44. resp.Header.Set("Content-Encoding", "none")
  45. switch {
  46. case resp.StatusCode == 401:
  47. log.Println("401: We don't want to store credentials")
  48. case resp.StatusCode > 399:
  49. log.Println("Filing inside bad class")
  50. feedRequest(req, "BAD")
  51. default:
  52. log.Println("Filing inside Good Class: ", resp.StatusCode)
  53. ControPlane.StatsTokens <- "LEARN-GOOD"
  54. feedRequest(req, "GOOD")
  55. }
  56. return nil
  57. }
  58. func sanitizeHeaders(s string) string {
  59. re := regexp.MustCompile(`[a-zA-Z]{4,32}|([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})|([{][/].*[}])|([0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12})`)
  60. matched := re.FindAllString(s, -1)
  61. tmpSt := strings.ToLower(strings.Join(matched, " "))
  62. tmpSt = strings.ReplaceAll(tmpSt, "{", "")
  63. tmpSt = strings.ReplaceAll(tmpSt, "}", "")
  64. log.Println("Matched: " + tmpSt)
  65. return tmpSt
  66. }
  67. func feedRequest(req *http.Request, class string) {
  68. feed := formatRequest(req)
  69. feed = sanitizeHeaders(feed)
  70. if class == "BAD" {
  71. log.Println("Feeding BAD token: ", feed)
  72. ControPlane.BadTokens <- feed
  73. }
  74. if class == "GOOD" {
  75. log.Println("Feeding GOOD Token:", feed)
  76. ControPlane.GoodTokens <- feed
  77. }
  78. }
  79. func formatRequest(req *http.Request) string {
  80. ingestBody := req.ContentLength < 2048 && req.ContentLength > 1
  81. log.Println("Ingesting the body: ", ingestBody)
  82. requestDump, err := httputil.DumpRequest(req, ingestBody)
  83. if err != nil {
  84. fmt.Println(err)
  85. }
  86. return fmt.Sprintf("{%s} %s\n", req.URL.Path, requestDump)
  87. }