2020-04-26 05:50:33 +05:30
|
|
|
package executor
|
|
|
|
|
|
|
|
|
|
import (
|
2020-04-26 06:33:59 +05:30
|
|
|
"bufio"
|
2020-04-26 05:50:33 +05:30
|
|
|
"crypto/tls"
|
2020-04-28 22:15:26 +02:00
|
|
|
"fmt"
|
2020-04-26 05:50:33 +05:30
|
|
|
"io"
|
|
|
|
|
"io/ioutil"
|
|
|
|
|
"net/http"
|
2020-06-22 19:30:01 +05:30
|
|
|
"net/http/httputil"
|
2020-04-27 23:49:53 +05:30
|
|
|
"net/url"
|
2020-06-22 19:30:01 +05:30
|
|
|
"os"
|
2020-05-22 00:23:38 +02:00
|
|
|
"strings"
|
2020-04-26 06:33:59 +05:30
|
|
|
"sync"
|
2020-06-22 19:57:32 +05:30
|
|
|
"sync/atomic"
|
2020-04-26 05:50:33 +05:30
|
|
|
"time"
|
|
|
|
|
|
2020-04-26 06:33:59 +05:30
|
|
|
"github.com/pkg/errors"
|
2020-06-22 19:30:01 +05:30
|
|
|
"github.com/projectdiscovery/gologger"
|
2020-07-01 16:17:24 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/matchers"
|
|
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/requests"
|
|
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
2020-04-26 05:50:33 +05:30
|
|
|
"github.com/projectdiscovery/retryablehttp-go"
|
2020-04-28 04:01:25 +02:00
|
|
|
"golang.org/x/net/proxy"
|
2020-04-26 05:50:33 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
|
|
// HTTPExecutor is client for performing HTTP requests
|
|
|
|
|
// for a template.
|
|
|
|
|
type HTTPExecutor struct {
|
2020-06-22 19:30:01 +05:30
|
|
|
debug bool
|
2020-06-22 19:57:32 +05:30
|
|
|
results uint32
|
2020-06-27 20:19:43 +05:30
|
|
|
jsonOutput bool
|
2020-05-22 00:23:38 +02:00
|
|
|
httpClient *retryablehttp.Client
|
|
|
|
|
template *templates.Template
|
|
|
|
|
httpRequest *requests.HTTPRequest
|
|
|
|
|
writer *bufio.Writer
|
|
|
|
|
outputMutex *sync.Mutex
|
|
|
|
|
customHeaders requests.CustomHeaders
|
2020-04-26 06:33:59 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// HTTPOptions contains configuration options for the HTTP executor.
|
|
|
|
|
type HTTPOptions struct {
|
2020-04-28 04:01:25 +02:00
|
|
|
Template *templates.Template
|
|
|
|
|
HTTPRequest *requests.HTTPRequest
|
|
|
|
|
Writer *bufio.Writer
|
|
|
|
|
Timeout int
|
|
|
|
|
Retries int
|
|
|
|
|
ProxyURL string
|
|
|
|
|
ProxySocksURL string
|
2020-06-22 19:30:01 +05:30
|
|
|
Debug bool
|
2020-06-27 20:19:43 +05:30
|
|
|
JSON bool
|
2020-05-22 00:23:38 +02:00
|
|
|
CustomHeaders requests.CustomHeaders
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// NewHTTPExecutor creates a new HTTP executor from a template
|
|
|
|
|
// and a HTTP request query.
|
2020-04-27 23:49:53 +05:30
|
|
|
func NewHTTPExecutor(options *HTTPOptions) (*HTTPExecutor, error) {
|
2020-04-28 00:29:57 +05:30
|
|
|
var proxyURL *url.URL
|
|
|
|
|
var err error
|
2020-04-26 05:50:33 +05:30
|
|
|
|
2020-04-28 00:29:57 +05:30
|
|
|
if options.ProxyURL != "" {
|
|
|
|
|
proxyURL, err = url.Parse(options.ProxyURL)
|
|
|
|
|
}
|
2020-04-27 23:49:53 +05:30
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-26 05:50:33 +05:30
|
|
|
// Create the HTTP Client
|
2020-04-28 00:29:57 +05:30
|
|
|
client := makeHTTPClient(proxyURL, options)
|
2020-04-26 05:50:33 +05:30
|
|
|
client.CheckRetry = retryablehttp.HostSprayRetryPolicy()
|
|
|
|
|
|
|
|
|
|
executer := &HTTPExecutor{
|
2020-06-22 19:30:01 +05:30
|
|
|
debug: options.Debug,
|
2020-06-27 20:19:43 +05:30
|
|
|
jsonOutput: options.JSON,
|
2020-06-22 19:57:32 +05:30
|
|
|
results: 0,
|
2020-05-22 00:23:38 +02:00
|
|
|
httpClient: client,
|
|
|
|
|
template: options.Template,
|
|
|
|
|
httpRequest: options.HTTPRequest,
|
|
|
|
|
outputMutex: &sync.Mutex{},
|
|
|
|
|
writer: options.Writer,
|
|
|
|
|
customHeaders: options.CustomHeaders,
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
2020-04-27 23:49:53 +05:30
|
|
|
return executer, nil
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
|
|
|
|
|
2020-06-22 19:57:32 +05:30
|
|
|
// GotResults returns true if there were any results for the executor
|
|
|
|
|
func (e *HTTPExecutor) GotResults() bool {
|
|
|
|
|
if atomic.LoadUint32(&e.results) == 0 {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-26 05:50:33 +05:30
|
|
|
// ExecuteHTTP executes the HTTP request on a URL
|
2020-04-26 06:33:59 +05:30
|
|
|
func (e *HTTPExecutor) ExecuteHTTP(URL string) error {
|
2020-04-26 05:50:33 +05:30
|
|
|
// Compile each request for the template based on the URL
|
|
|
|
|
compiledRequest, err := e.httpRequest.MakeHTTPRequest(URL)
|
|
|
|
|
if err != nil {
|
2020-04-26 06:33:59 +05:30
|
|
|
return errors.Wrap(err, "could not make http request")
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Send the request to the target servers
|
2020-04-26 06:33:59 +05:30
|
|
|
mainLoop:
|
2020-05-05 21:42:28 +02:00
|
|
|
for compiledRequest := range compiledRequest {
|
|
|
|
|
if compiledRequest.Error != nil {
|
|
|
|
|
return errors.Wrap(err, "could not make http request")
|
|
|
|
|
}
|
2020-05-22 00:23:38 +02:00
|
|
|
e.setCustomHeaders(compiledRequest)
|
2020-05-05 21:42:28 +02:00
|
|
|
req := compiledRequest.Request
|
2020-06-22 19:30:01 +05:30
|
|
|
|
|
|
|
|
if e.debug {
|
|
|
|
|
gologger.Infof("Dumped HTTP request for %s (%s)\n\n", URL, e.template.ID)
|
|
|
|
|
dumpedRequest, err := httputil.DumpRequest(req.Request, true)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return errors.Wrap(err, "could not dump http request")
|
|
|
|
|
}
|
|
|
|
|
fmt.Fprintf(os.Stderr, "%s", string(dumpedRequest))
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-26 05:50:33 +05:30
|
|
|
resp, err := e.httpClient.Do(req)
|
|
|
|
|
if err != nil {
|
|
|
|
|
if resp != nil {
|
|
|
|
|
resp.Body.Close()
|
|
|
|
|
}
|
2020-07-07 17:48:25 +05:30
|
|
|
gologger.Warningf("Could not do request: %s\n", err)
|
|
|
|
|
continue
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
|
|
|
|
|
2020-06-22 19:30:01 +05:30
|
|
|
if e.debug {
|
|
|
|
|
gologger.Infof("Dumped HTTP response for %s (%s)\n\n", URL, e.template.ID)
|
|
|
|
|
dumpedResponse, err := httputil.DumpResponse(resp, true)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return errors.Wrap(err, "could not dump http response")
|
|
|
|
|
}
|
|
|
|
|
fmt.Fprintf(os.Stderr, "%s\n", string(dumpedResponse))
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-26 05:50:33 +05:30
|
|
|
data, err := ioutil.ReadAll(resp.Body)
|
|
|
|
|
if err != nil {
|
|
|
|
|
io.Copy(ioutil.Discard, resp.Body)
|
|
|
|
|
resp.Body.Close()
|
2020-04-26 06:33:59 +05:30
|
|
|
return errors.Wrap(err, "could not read http body")
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
|
|
|
|
resp.Body.Close()
|
|
|
|
|
|
2020-05-18 21:36:00 +02:00
|
|
|
// net/http doesn't automatically decompress the response body if an encoding has been specified by the user in the request
|
|
|
|
|
// so in case we have to manually do it
|
|
|
|
|
data, err = requests.HandleDecompression(compiledRequest.Request, data)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return errors.Wrap(err, "could not decompress http body")
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-26 06:33:59 +05:30
|
|
|
// Convert response body from []byte to string with zero copy
|
2020-04-26 05:50:33 +05:30
|
|
|
body := unsafeToString(data)
|
|
|
|
|
|
|
|
|
|
var headers string
|
2020-04-26 06:33:59 +05:30
|
|
|
matcherCondition := e.httpRequest.GetMatchersCondition()
|
2020-04-26 05:50:33 +05:30
|
|
|
for _, matcher := range e.httpRequest.Matchers {
|
2020-07-06 20:11:19 +02:00
|
|
|
headers = headersToString(resp.Header)
|
2020-04-26 05:50:33 +05:30
|
|
|
// Check if the matcher matched
|
2020-04-26 06:33:59 +05:30
|
|
|
if !matcher.Match(resp, body, headers) {
|
|
|
|
|
// If the condition is AND we haven't matched, try next request.
|
|
|
|
|
if matcherCondition == matchers.ANDCondition {
|
2020-04-26 07:02:38 +05:30
|
|
|
continue mainLoop
|
2020-04-26 06:33:59 +05:30
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// If the matcher has matched, and its an OR
|
|
|
|
|
// write the first output then move to next matcher.
|
|
|
|
|
if matcherCondition == matchers.ORCondition && len(e.httpRequest.Extractors) == 0 {
|
2020-05-14 18:09:36 +02:00
|
|
|
e.writeOutputHTTP(compiledRequest, matcher, nil)
|
2020-06-22 19:57:32 +05:30
|
|
|
atomic.CompareAndSwapUint32(&e.results, 0, 1)
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-04-26 06:33:59 +05:30
|
|
|
|
|
|
|
|
// All matchers have successfully completed so now start with the
|
|
|
|
|
// next task which is extraction of input from matchers.
|
|
|
|
|
var extractorResults []string
|
|
|
|
|
for _, extractor := range e.httpRequest.Extractors {
|
2020-07-06 20:11:19 +02:00
|
|
|
headers = headersToString(resp.Header)
|
2020-04-27 23:34:08 +05:30
|
|
|
for match := range extractor.Extract(body, headers) {
|
|
|
|
|
extractorResults = append(extractorResults, match)
|
|
|
|
|
}
|
2020-04-26 06:33:59 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Write a final string of output if matcher type is
|
|
|
|
|
// AND or if we have extractors for the mechanism too.
|
|
|
|
|
if len(e.httpRequest.Extractors) > 0 || matcherCondition == matchers.ANDCondition {
|
2020-05-14 18:09:36 +02:00
|
|
|
e.writeOutputHTTP(compiledRequest, nil, extractorResults)
|
2020-06-22 19:57:32 +05:30
|
|
|
atomic.CompareAndSwapUint32(&e.results, 0, 1)
|
2020-04-26 06:33:59 +05:30
|
|
|
}
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
2020-06-22 19:30:01 +05:30
|
|
|
|
|
|
|
|
gologger.Verbosef("Sent HTTP request to %s\n", "http-request", URL)
|
|
|
|
|
|
2020-04-26 06:33:59 +05:30
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Close closes the http executor for a template.
|
|
|
|
|
func (e *HTTPExecutor) Close() {
|
|
|
|
|
e.outputMutex.Lock()
|
|
|
|
|
e.writer.Flush()
|
|
|
|
|
e.outputMutex.Unlock()
|
2020-04-26 05:50:33 +05:30
|
|
|
}
|
2020-04-28 00:29:57 +05:30
|
|
|
|
|
|
|
|
// makeHTTPClient creates a http client
|
|
|
|
|
func makeHTTPClient(proxyURL *url.URL, options *HTTPOptions) *retryablehttp.Client {
|
|
|
|
|
retryablehttpOptions := retryablehttp.DefaultOptionsSpraying
|
|
|
|
|
retryablehttpOptions.RetryWaitMax = 10 * time.Second
|
|
|
|
|
retryablehttpOptions.RetryMax = options.Retries
|
|
|
|
|
followRedirects := options.HTTPRequest.Redirects
|
|
|
|
|
maxRedirects := options.HTTPRequest.MaxRedirects
|
|
|
|
|
|
|
|
|
|
transport := &http.Transport{
|
|
|
|
|
MaxIdleConnsPerHost: -1,
|
|
|
|
|
TLSClientConfig: &tls.Config{
|
|
|
|
|
Renegotiation: tls.RenegotiateOnceAsClient,
|
|
|
|
|
InsecureSkipVerify: true,
|
|
|
|
|
},
|
|
|
|
|
DisableKeepAlives: true,
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-28 04:01:25 +02:00
|
|
|
// Attempts to overwrite the dial function with the socks proxied version
|
|
|
|
|
if options.ProxySocksURL != "" {
|
2020-04-28 22:15:26 +02:00
|
|
|
var proxyAuth *proxy.Auth
|
|
|
|
|
socksURL, err := url.Parse(options.ProxySocksURL)
|
|
|
|
|
if err == nil {
|
|
|
|
|
proxyAuth = &proxy.Auth{}
|
|
|
|
|
proxyAuth.User = socksURL.User.Username()
|
|
|
|
|
proxyAuth.Password, _ = socksURL.User.Password()
|
|
|
|
|
}
|
|
|
|
|
dialer, err := proxy.SOCKS5("tcp", fmt.Sprintf("%s:%s", socksURL.Hostname(), socksURL.Port()), proxyAuth, proxy.Direct)
|
2020-04-28 04:01:25 +02:00
|
|
|
if err == nil {
|
|
|
|
|
transport.Dial = dialer.Dial
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-28 00:29:57 +05:30
|
|
|
if proxyURL != nil {
|
|
|
|
|
transport.Proxy = http.ProxyURL(proxyURL)
|
|
|
|
|
}
|
|
|
|
|
return retryablehttp.NewWithHTTPClient(&http.Client{
|
|
|
|
|
Transport: transport,
|
|
|
|
|
Timeout: time.Duration(options.Timeout) * time.Second,
|
|
|
|
|
CheckRedirect: makeCheckRedirectFunc(followRedirects, maxRedirects),
|
|
|
|
|
}, retryablehttpOptions)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type checkRedirectFunc func(_ *http.Request, requests []*http.Request) error
|
|
|
|
|
|
|
|
|
|
func makeCheckRedirectFunc(followRedirects bool, maxRedirects int) checkRedirectFunc {
|
|
|
|
|
return func(_ *http.Request, requests []*http.Request) error {
|
|
|
|
|
if !followRedirects {
|
|
|
|
|
return http.ErrUseLastResponse
|
|
|
|
|
}
|
|
|
|
|
if maxRedirects == 0 {
|
|
|
|
|
if len(requests) > 10 {
|
|
|
|
|
return http.ErrUseLastResponse
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
if len(requests) > maxRedirects {
|
|
|
|
|
return http.ErrUseLastResponse
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-05-22 00:23:38 +02:00
|
|
|
|
|
|
|
|
func (e *HTTPExecutor) setCustomHeaders(r *requests.CompiledHTTP) {
|
|
|
|
|
for _, customHeader := range e.customHeaders {
|
|
|
|
|
// This should be pre-computed somewhere and done only once
|
|
|
|
|
tokens := strings.Split(customHeader, ":")
|
|
|
|
|
// if it's an invalid header skip it
|
|
|
|
|
if len(tokens) < 2 {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
headerName, headerValue := tokens[0], strings.Join(tokens[1:], "")
|
|
|
|
|
headerName = strings.TrimSpace(headerName)
|
|
|
|
|
headerValue = strings.TrimSpace(headerValue)
|
|
|
|
|
r.Request.Header.Set(headerName, headerValue)
|
|
|
|
|
}
|
|
|
|
|
}
|