Skip to content

Commit

Permalink
Add httpdump
Browse files Browse the repository at this point in the history
  • Loading branch information
ac0d3r committed May 7, 2022
1 parent 7c2e6f3 commit f781275
Show file tree
Hide file tree
Showing 11 changed files with 243 additions and 108 deletions.
9 changes: 5 additions & 4 deletions internal/runner/worker.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ import (
"github.com/Buzz2d0/xssfinder/pkg/chrome/cookies"
"github.com/Buzz2d0/xssfinder/pkg/chrome/xss/checker"
"github.com/Buzz2d0/xssfinder/pkg/chrome/xss/dom"
"github.com/Buzz2d0/xssfinder/pkg/httpdump"
"github.com/Buzz2d0/xssfinder/pkg/notify"
"github.com/Buzz2d0/xssfinder/pkg/proxy"
"github.com/chromedp/chromedp"
"github.com/gokitx/pkgs/limiter"
"github.com/sirupsen/logrus"
Expand All @@ -38,7 +38,7 @@ func NewWorker(limitNum int64,
}
}

func (w *Worker) Start(ctx context.Context, C <-chan proxy.Request) error {
func (w *Worker) Start(ctx context.Context, C <-chan httpdump.Request) error {
ctx, cancel := context.WithCancel(ctx)
defer cancel()

Expand All @@ -54,7 +54,7 @@ func (w *Worker) Start(ctx context.Context, C <-chan proxy.Request) error {
case task := <-C:
w.Allow()
logrus.Infoln("[worker] received task:", task.URL, task.Response.Status)
go func(ctx context.Context, req proxy.Request) {
go func(ctx context.Context, req httpdump.Request) {
defer w.Done()
if err := w.scan(ctx, req); err != nil {
logrus.Errorln("[worker] scan task error:", err)
Expand All @@ -64,7 +64,7 @@ func (w *Worker) Start(ctx context.Context, C <-chan proxy.Request) error {
}
}

func (w *Worker) scan(ctx context.Context, req proxy.Request) error {
func (w *Worker) scan(ctx context.Context, req httpdump.Request) error {
var preTasks chromedp.Tasks
preTasks = w.preActions[:]
if len(req.Cookies) != 0 {
Expand Down Expand Up @@ -114,6 +114,7 @@ func (w *Worker) checkDomPoc(ctx context.Context, points []dom.VulPoint, preActi
}

func (w *Worker) reportDom(url string, point dom.VulPoint) {
// TODO report
p, _ := json.MarshalIndent(point, "", " ")
logrus.Infof("[report] url: %s\n\ttype: dom-based\n\tdesc: %s\n", url, string(p))
if w.notifier != nil {
Expand Down
4 changes: 2 additions & 2 deletions pkg/chrome/cookies/cookies.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@ package cookies

import (
"context"
"net/http"

"github.com/Buzz2d0/xssfinder/pkg/httpdump"
"github.com/chromedp/cdproto/cdp"
"github.com/chromedp/cdproto/network"
"github.com/chromedp/chromedp"
)

func SetWithHttpCookie(c []*http.Cookie) chromedp.Action {
func SetWithHttpCookie(c []httpdump.Cookie) chromedp.Action {
cookies := make([]*network.CookieParam, len(c))
for i := range c {

Expand Down
66 changes: 66 additions & 0 deletions pkg/httpdump/http.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
package httpdump

import (
"bytes"
"context"
"crypto/tls"
"io"
"net/http"
"time"

"github.com/gokitx/pkgs/urlx"
"github.com/sirupsen/logrus"
)

var (
defaultClient *http.Client = &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
},
}
)

func Do(req Request, timeout time.Duration) (Response, error) {
ctx, cancel := context.WithTimeout(context.Background(), timeout)
defer cancel()

resp := Response{}
hreq, err := http.NewRequest(req.Method, req.URL, nil)
if err != nil {
return resp, err
}
hreq.WithContext(ctx)
hreq.Header = req.Header.Clone()
if req.Method != http.MethodGet {
hreq.PostForm = urlx.CloneUrlValues(req.PostForm)
}
hresp, err := defaultClient.Do(hreq)
if err != nil {
return resp, err
}

resp.Status = hresp.StatusCode
resp.Header = hresp.Header.Clone()
if resp.Body != nil {
defer hresp.Body.Close()

buf := bufferPool.Get().(*bytes.Buffer)
buf.Reset()
defer func() {
if buf != nil {
buf.Reset()
bufferPool.Put(buf)
}
buf = nil
}()

if _, err := io.Copy(buf, hresp.Body); err != nil {
logrus.Errorln("[httputil] copy resp.body error:", err)
} else {
resp.Body = buf.Bytes()
}
}
return resp, nil
}
18 changes: 18 additions & 0 deletions pkg/httpdump/http_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package httpdump

import (
"net/http"
"testing"
"time"
)

func TestDo(t *testing.T) {
req := Request{
Method: http.MethodGet,
URL: "https://www.baidu.com",
}

t.Log(Do(req, time.Second))

t.Log(req)
}
130 changes: 35 additions & 95 deletions pkg/proxy/httputil.go → pkg/httpdump/httpdump.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package proxy
package httpdump

import (
"bytes"
Expand All @@ -7,10 +7,9 @@ import (
"net/http"
"net/textproto"
"net/url"
"path/filepath"
"sync"
"time"

"github.com/gokitx/pkgs/slicex"
"github.com/gokitx/pkgs/urlx"
"github.com/sirupsen/logrus"
)
Expand All @@ -24,15 +23,23 @@ var (
)

type Request struct {
Method string
URL string
Header http.Header
Host string
Form url.Values
PostForm url.Values
MultipartForm *multipart.Form
Cookies []*http.Cookie
Response Response
Method string
URL string
Header http.Header
Host string
PostForm url.Values
Cookies []Cookie
Response Response
}

type Cookie struct {
Name string
Value string
Domain string
Path string
HttpOnly bool
Secure bool
Expires time.Time
}

type Response struct {
Expand All @@ -53,9 +60,8 @@ func MakeRequest(req *http.Request) Request {
URL: req.URL.String(),
}
r2.Header = req.Header.Clone()
r2.Form = urlx.CloneUrlValues(req.Form)
req.ParseForm()
r2.PostForm = urlx.CloneUrlValues(req.PostForm)
r2.MultipartForm = cloneMultipartForm(req.MultipartForm)
return r2
}

Expand All @@ -65,13 +71,14 @@ func MakeResponse(req Request, resp *http.Response) Response {
Header: resp.Header.Clone(),
Body: nil,
}
// reponse setcookies
cookies := resp.Cookies()
for i := range cookies {
if cookies[i].Domain == "" {
cookies[i].Domain = resp.Request.Host
}
}
req.Cookies = cookies
req.Cookies = append(req.Cookies, dumpCookies(cookies)...)

if resp.Body != nil {
buf := bufferPool.Get().(*bytes.Buffer)
Expand Down Expand Up @@ -141,85 +148,18 @@ func cloneMultipartFileHeader(fh *multipart.FileHeader) *multipart.FileHeader {
return fh2
}

var (
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types
exts = []string{
".aac",
".abw",
".arc",
".avif",
".avi",
".azw",
".bin",
".bmp",
".bz",
".bz2",
".cda",
".csh",
".css",
".csv",
".doc",
".docx",
".eot",
".epub",
".gz",
".gif",
".ico",
".ics",
".jpeg",
".jpg",
".js",
".json",
".jsonld",
".mid",
".midi",
".mjs",
".mp3",
".mp4",
".mpeg",
".mpkg",
".odp",
".ods",
".odt",
".oga",
".ogv",
".ogx",
".opus",
".otf",
".png",
".pdf",
".ppt",
".pptx",
".rar",
".rtf",
".sh",
".svg",
".swf",
".tar",
".tif ",
".tiff",
".ts",
".ttf",
".txt",
".vsd",
".wav",
".weba",
".webm",
".webp",
".woff",
".woff2",
".xls",
".xlsx",
".xml",
".xul",
".zip",
".3gp",
".3g2",
".7z",
func dumpCookies(c []*http.Cookie) []Cookie {
r := make([]Cookie, len(c))
for i := range c {
r[i] = Cookie{
Name: c[i].Name,
Value: c[i].Value,
Domain: c[i].Domain,
Path: c[i].Path,
HttpOnly: c[i].HttpOnly,
Secure: c[i].Secure,
Expires: c[i].Expires,
}
}
)

func ignoreRequestWithPath(path string) bool {
return slicex.ContainsIn(exts,
filepath.Ext(path))
return r
}
4 changes: 3 additions & 1 deletion pkg/parser/html/html.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
const (
inputTag = "input"
scriptTag = "script"
styleTag = "style"
)

func GetParams(r io.Reader) ([]string, error) {
Expand All @@ -33,7 +34,8 @@ func GetParams(r io.Reader) ([]string, error) {
}
}
case scriptTag:
if n.FirstChild != nil {
if n.FirstChild != nil &&
n.FirstChild.Type == html.TextNode {
if vars, err := javascript.GetAllVariable(n.FirstChild.Data); err == nil {
params = append(params, vars...)
}
Expand Down
13 changes: 7 additions & 6 deletions pkg/proxy/mitm.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"strings"
"sync"

"github.com/Buzz2d0/xssfinder/pkg/httpdump"
"github.com/elazarl/goproxy"
"github.com/gokitx/pkgs/slicex"
"github.com/sirupsen/logrus"
Expand Down Expand Up @@ -44,7 +45,7 @@ type MitmServer struct {
goProxy *goproxy.ProxyHttpServer
srv *http.Server

C <-chan Request
C <-chan httpdump.Request
}

func NewMitmServer(conf Config) *MitmServer {
Expand All @@ -58,7 +59,7 @@ func NewMitmServer(conf Config) *MitmServer {
proxy.OnRequest(goproxy.DstHostIs(conf.CaHost)).
DoFunc(DownloadCaHandlerFunc)
proxy.OnRequest().HandleConnect(goproxy.AlwaysMitm)
c := make(chan Request, 5e1)
c := make(chan httpdump.Request, 5e1)
mitm := &MitmServer{
addr: conf.Addr,
cahost: conf.CaHost,
Expand Down Expand Up @@ -98,11 +99,11 @@ func (m *MitmServer) OnRequest(req *http.Request, ctx *goproxy.ProxyCtx) (*http.
if ignoreRequestWithPath(req.URL.Path) {
return req, nil
}
m.reqs.Store(ctx.Session, MakeRequest(req))
m.reqs.Store(ctx.Session, httpdump.MakeRequest(req))
return req, nil
}

func (m *MitmServer) MakeOnResponse(c chan Request) func(resp *http.Response, ctx *goproxy.ProxyCtx) *http.Response {
func (m *MitmServer) MakeOnResponse(c chan httpdump.Request) func(resp *http.Response, ctx *goproxy.ProxyCtx) *http.Response {
return func(resp *http.Response, ctx *goproxy.ProxyCtx) *http.Response {
if resp == nil {
m.reqs.Delete(ctx.Session)
Expand All @@ -113,9 +114,9 @@ func (m *MitmServer) MakeOnResponse(c chan Request) func(resp *http.Response, ct
if strings.Contains(contentType, "text/html") ||
strings.Contains(contentType, "text/htm") {
if req, ok := m.reqs.LoadAndDelete(ctx.Session); ok {
if request, ok := req.(Request); ok {
if request, ok := req.(httpdump.Request); ok {
logrus.Debugln("[mitm] received:", request.URL)
request.Response = MakeResponse(request, resp)
request.Response = httpdump.MakeResponse(request, resp)
c <- request
}
}
Expand Down
Loading

0 comments on commit f781275

Please sign in to comment.