nuclei/v2/pkg/executer/executer_http.go

314 lines
9.0 KiB
Go
Raw Normal View History

2020-07-16 10:57:28 +02:00
package executer
2020-04-26 05:50:33 +05:30
import (
"bufio"
2020-04-26 05:50:33 +05:30
"crypto/tls"
2020-04-28 22:15:26 +02:00
"fmt"
2020-04-26 05:50:33 +05:30
"io"
"io/ioutil"
"net/http"
2020-07-16 16:15:24 +02:00
"net/http/cookiejar"
2020-06-22 19:30:01 +05:30
"net/http/httputil"
2020-04-27 23:49:53 +05:30
"net/url"
2020-06-22 19:30:01 +05:30
"os"
2020-05-22 00:23:38 +02:00
"strings"
"sync"
2020-04-26 05:50:33 +05:30
"time"
"github.com/pkg/errors"
2020-06-22 19:30:01 +05:30
"github.com/projectdiscovery/gologger"
2020-07-01 16:17:24 +05:30
"github.com/projectdiscovery/nuclei/v2/pkg/matchers"
"github.com/projectdiscovery/nuclei/v2/pkg/requests"
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
2020-04-26 05:50:33 +05:30
"github.com/projectdiscovery/retryablehttp-go"
2020-04-28 04:01:25 +02:00
"golang.org/x/net/proxy"
2020-04-26 05:50:33 +05:30
)
2020-07-16 10:57:28 +02:00
// HTTPExecuter is client for performing HTTP requests
2020-04-26 05:50:33 +05:30
// for a template.
2020-07-16 10:57:28 +02:00
type HTTPExecuter struct {
2020-07-18 21:42:23 +02:00
debug bool
Results bool
jsonOutput bool
httpClient *retryablehttp.Client
template *templates.Template
bulkHttpRequest *requests.BulkHTTPRequest
writer *bufio.Writer
outputMutex *sync.Mutex
customHeaders requests.CustomHeaders
CookieJar *cookiejar.Jar
}
2020-07-16 10:57:28 +02:00
// HTTPOptions contains configuration options for the HTTP executer.
type HTTPOptions struct {
2020-07-18 21:42:23 +02:00
Template *templates.Template
BulkHttpRequest *requests.BulkHTTPRequest
Writer *bufio.Writer
Timeout int
Retries int
ProxyURL string
ProxySocksURL string
Debug bool
JSON bool
CustomHeaders requests.CustomHeaders
CookieReuse bool
CookieJar *cookiejar.Jar
2020-04-26 05:50:33 +05:30
}
2020-07-16 10:57:28 +02:00
// NewHTTPExecuter creates a new HTTP executer from a template
2020-04-26 05:50:33 +05:30
// and a HTTP request query.
2020-07-16 10:57:28 +02:00
func NewHTTPExecuter(options *HTTPOptions) (*HTTPExecuter, error) {
2020-04-28 00:29:57 +05:30
var proxyURL *url.URL
var err error
2020-04-26 05:50:33 +05:30
2020-04-28 00:29:57 +05:30
if options.ProxyURL != "" {
proxyURL, err = url.Parse(options.ProxyURL)
}
2020-04-27 23:49:53 +05:30
if err != nil {
return nil, err
}
2020-04-26 05:50:33 +05:30
// Create the HTTP Client
2020-04-28 00:29:57 +05:30
client := makeHTTPClient(proxyURL, options)
2020-04-26 05:50:33 +05:30
client.CheckRetry = retryablehttp.HostSprayRetryPolicy()
2020-07-16 16:15:24 +02:00
if options.CookieJar != nil {
client.HTTPClient.Jar = options.CookieJar
} else if options.CookieReuse {
jar, err := cookiejar.New(nil)
if err != nil {
return nil, err
}
client.HTTPClient.Jar = jar
}
2020-04-26 05:50:33 +05:30
2020-07-16 10:57:28 +02:00
executer := &HTTPExecuter{
2020-07-18 21:42:23 +02:00
debug: options.Debug,
jsonOutput: options.JSON,
httpClient: client,
template: options.Template,
bulkHttpRequest: options.BulkHttpRequest,
outputMutex: &sync.Mutex{},
writer: options.Writer,
customHeaders: options.CustomHeaders,
CookieJar: options.CookieJar,
2020-04-26 05:50:33 +05:30
}
2020-04-27 23:49:53 +05:30
return executer, nil
2020-04-26 05:50:33 +05:30
}
// ExecuteHTTP executes the HTTP request on a URL
2020-07-16 10:57:28 +02:00
func (e *HTTPExecuter) ExecuteHTTP(URL string) (result Result) {
2020-07-10 09:04:38 +02:00
result.Matches = make(map[string]interface{})
result.Extractions = make(map[string]interface{})
2020-07-19 03:14:19 +02:00
dynamicvalues := make(map[string]interface{})
2020-04-26 05:50:33 +05:30
2020-07-21 20:51:58 +02:00
e.bulkHttpRequest.Reset()
2020-07-19 03:14:19 +02:00
for e.bulkHttpRequest.Next() && !result.Done {
httpRequest, err := e.bulkHttpRequest.MakeHTTPRequest(URL, dynamicvalues, e.bulkHttpRequest.Current())
2020-07-18 21:42:23 +02:00
if err != nil {
2020-07-10 09:04:38 +02:00
result.Error = errors.Wrap(err, "could not make http request")
return
}
2020-06-22 19:30:01 +05:30
2020-07-20 21:23:04 +02:00
err = e.handleHTTP(URL, httpRequest, dynamicvalues, &result)
2020-07-19 03:14:19 +02:00
if err != nil {
result.Error = errors.Wrap(err, "could not make http request")
return
}
2020-07-18 21:42:23 +02:00
e.bulkHttpRequest.Increment()
}
gologger.Verbosef("Sent HTTP request to %s\n", "http-request", URL)
return
}
2020-07-19 03:14:19 +02:00
func (e *HTTPExecuter) handleHTTP(URL string, request *requests.HttpRequest, dynamicvalues map[string]interface{}, result *Result) error {
2020-07-18 21:42:23 +02:00
e.setCustomHeaders(request)
req := request.Request
if e.debug {
gologger.Infof("Dumped HTTP request for %s (%s)\n\n", URL, e.template.ID)
dumpedRequest, err := httputil.DumpRequest(req.Request, true)
2020-04-26 05:50:33 +05:30
if err != nil {
2020-07-18 21:42:23 +02:00
return errors.Wrap(err, "could not make http request")
2020-04-26 05:50:33 +05:30
}
2020-07-18 21:42:23 +02:00
fmt.Fprintf(os.Stderr, "%s", string(dumpedRequest))
}
resp, err := e.httpClient.Do(req)
if err != nil {
if resp != nil {
resp.Body.Close()
2020-06-22 19:30:01 +05:30
}
2020-07-20 21:23:04 +02:00
return errors.Wrap(err, "Could not do request")
2020-07-18 21:42:23 +02:00
}
2020-06-22 19:30:01 +05:30
2020-07-18 21:42:23 +02:00
if e.debug {
gologger.Infof("Dumped HTTP response for %s (%s)\n\n", URL, e.template.ID)
dumpedResponse, err := httputil.DumpResponse(resp, true)
2020-04-26 05:50:33 +05:30
if err != nil {
2020-07-18 21:42:23 +02:00
return errors.Wrap(err, "could not dump http response")
2020-04-26 05:50:33 +05:30
}
2020-07-20 21:23:04 +02:00
fmt.Fprintf(os.Stderr, "%s\n", string(dumpedResponse))
2020-07-18 21:42:23 +02:00
}
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
io.Copy(ioutil.Discard, resp.Body)
2020-04-26 05:50:33 +05:30
resp.Body.Close()
2020-07-18 21:42:23 +02:00
return errors.Wrap(err, "could not read http body")
}
resp.Body.Close()
2020-04-26 05:50:33 +05:30
2020-07-18 21:42:23 +02:00
// net/http doesn't automatically decompress the response body if an encoding has been specified by the user in the request
// so in case we have to manually do it
data, err = requests.HandleDecompression(req, data)
if err != nil {
return errors.Wrap(err, "could not decompress http body")
}
2020-07-18 21:42:23 +02:00
// Convert response body from []byte to string with zero copy
body := unsafeToString(data)
headers := headersToString(resp.Header)
matcherCondition := e.bulkHttpRequest.GetMatchersCondition()
for _, matcher := range e.bulkHttpRequest.Matchers {
// Check if the matcher matched
if !matcher.Match(resp, body, headers) {
// If the condition is AND we haven't matched, try next request.
if matcherCondition == matchers.ANDCondition {
return nil
2020-04-26 05:50:33 +05:30
}
2020-07-18 21:42:23 +02:00
} else {
// If the matcher has matched, and its an OR
// write the first output then move to next matcher.
if matcherCondition == matchers.ORCondition && len(e.bulkHttpRequest.Extractors) == 0 {
result.Matches[matcher.Name] = nil
// probably redundant but ensures we snapshot current payload values when matchers are valid
result.Meta = request.Meta
e.writeOutputHTTP(request, matcher, nil)
e.Results = true
2020-04-27 23:34:08 +05:30
}
}
2020-07-18 21:42:23 +02:00
}
2020-07-18 21:42:23 +02:00
// All matchers have successfully completed so now start with the
// next task which is extraction of input from matchers.
var extractorResults []string
for _, extractor := range e.bulkHttpRequest.Extractors {
for match := range extractor.Extract(resp, body, headers) {
if _, ok := dynamicvalues[extractor.Name]; !ok {
dynamicvalues[extractor.Name] = match
}
extractorResults = append(extractorResults, match)
}
2020-07-18 21:42:23 +02:00
// probably redundant but ensures we snapshot current payload values when extractors are valid
result.Meta = request.Meta
result.Extractions[extractor.Name] = extractorResults
2020-04-26 05:50:33 +05:30
}
2020-06-22 19:30:01 +05:30
2020-07-18 21:42:23 +02:00
// Write a final string of output if matcher type is
// AND or if we have extractors for the mechanism too.
if len(e.bulkHttpRequest.Extractors) > 0 || matcherCondition == matchers.ANDCondition {
e.writeOutputHTTP(request, nil, extractorResults)
e.Results = true
}
2020-06-22 19:30:01 +05:30
2020-07-18 21:42:23 +02:00
return nil
}
2020-07-16 10:57:28 +02:00
// Close closes the http executer for a template.
func (e *HTTPExecuter) Close() {
e.outputMutex.Lock()
2020-07-18 21:42:23 +02:00
defer e.outputMutex.Unlock()
e.writer.Flush()
2020-04-26 05:50:33 +05:30
}
2020-04-28 00:29:57 +05:30
// makeHTTPClient creates a http client
func makeHTTPClient(proxyURL *url.URL, options *HTTPOptions) *retryablehttp.Client {
retryablehttpOptions := retryablehttp.DefaultOptionsSpraying
retryablehttpOptions.RetryWaitMax = 10 * time.Second
retryablehttpOptions.RetryMax = options.Retries
2020-07-18 21:42:23 +02:00
followRedirects := options.BulkHttpRequest.Redirects
maxRedirects := options.BulkHttpRequest.MaxRedirects
2020-04-28 00:29:57 +05:30
transport := &http.Transport{
MaxIdleConnsPerHost: -1,
TLSClientConfig: &tls.Config{
Renegotiation: tls.RenegotiateOnceAsClient,
InsecureSkipVerify: true,
},
DisableKeepAlives: true,
}
2020-04-28 04:01:25 +02:00
// Attempts to overwrite the dial function with the socks proxied version
if options.ProxySocksURL != "" {
2020-04-28 22:15:26 +02:00
var proxyAuth *proxy.Auth
socksURL, err := url.Parse(options.ProxySocksURL)
if err == nil {
proxyAuth = &proxy.Auth{}
proxyAuth.User = socksURL.User.Username()
proxyAuth.Password, _ = socksURL.User.Password()
}
dialer, err := proxy.SOCKS5("tcp", fmt.Sprintf("%s:%s", socksURL.Hostname(), socksURL.Port()), proxyAuth, proxy.Direct)
2020-04-28 04:01:25 +02:00
if err == nil {
transport.Dial = dialer.Dial
}
}
2020-04-28 00:29:57 +05:30
if proxyURL != nil {
transport.Proxy = http.ProxyURL(proxyURL)
}
return retryablehttp.NewWithHTTPClient(&http.Client{
Transport: transport,
Timeout: time.Duration(options.Timeout) * time.Second,
CheckRedirect: makeCheckRedirectFunc(followRedirects, maxRedirects),
}, retryablehttpOptions)
}
type checkRedirectFunc func(_ *http.Request, requests []*http.Request) error
func makeCheckRedirectFunc(followRedirects bool, maxRedirects int) checkRedirectFunc {
return func(_ *http.Request, requests []*http.Request) error {
if !followRedirects {
return http.ErrUseLastResponse
}
if maxRedirects == 0 {
if len(requests) > 10 {
return http.ErrUseLastResponse
}
return nil
}
if len(requests) > maxRedirects {
return http.ErrUseLastResponse
}
return nil
}
}
2020-05-22 00:23:38 +02:00
2020-07-18 21:42:23 +02:00
func (e *HTTPExecuter) setCustomHeaders(r *requests.HttpRequest) {
2020-05-22 00:23:38 +02:00
for _, customHeader := range e.customHeaders {
// This should be pre-computed somewhere and done only once
tokens := strings.Split(customHeader, ":")
// if it's an invalid header skip it
if len(tokens) < 2 {
continue
}
headerName, headerValue := tokens[0], strings.Join(tokens[1:], "")
headerName = strings.TrimSpace(headerName)
headerValue = strings.TrimSpace(headerValue)
2020-07-17 16:04:13 +02:00
r.Request.Header[headerName] = []string{headerValue}
2020-05-22 00:23:38 +02:00
}
}
2020-07-10 09:04:38 +02:00
type Result struct {
Meta map[string]interface{}
2020-07-10 09:04:38 +02:00
Matches map[string]interface{}
Extractions map[string]interface{}
GotResults bool
Error error
2020-07-19 03:14:19 +02:00
Done bool
2020-07-10 09:04:38 +02:00
}