nuclei/v2/pkg/requests/bulk-http-request.go

373 lines
10 KiB
Go
Raw Normal View History

package requests
import (
2020-04-28 23:02:07 +02:00
"bufio"
2020-04-29 02:57:18 +02:00
"fmt"
2020-04-04 02:50:32 +05:30
"io/ioutil"
"net/http"
2020-04-04 03:26:11 +05:30
"net/url"
"regexp"
2020-04-04 02:50:32 +05:30
"strings"
"github.com/Knetic/govaluate"
2020-07-01 16:17:24 +05:30
"github.com/projectdiscovery/nuclei/v2/pkg/extractors"
"github.com/projectdiscovery/nuclei/v2/pkg/generators"
"github.com/projectdiscovery/nuclei/v2/pkg/matchers"
2020-04-04 17:12:29 +05:30
retryablehttp "github.com/projectdiscovery/retryablehttp-go"
)
2020-07-18 21:42:23 +02:00
// BulkHTTPRequest contains a request to be made from a template
type BulkHTTPRequest struct {
2020-07-10 09:04:38 +02:00
Name string `yaml:"Name,omitempty"`
// AttackType is the attack type
// Sniper, PitchFork and ClusterBomb. Default is Sniper
AttackType string `yaml:"attack,omitempty"`
// attackType is internal attack type
attackType generators.Type
// Path contains the path/s for the request variables
2020-07-13 03:30:07 +02:00
Payloads map[string]interface{} `yaml:"payloads,omitempty"`
// Method is the request method, whether GET, POST, PUT, etc
Method string `yaml:"method"`
// Path contains the path/s for the request
Path []string `yaml:"path"`
// Headers contains headers to send with the request
Headers map[string]string `yaml:"headers,omitempty"`
// Body is an optional parameter which contains the request body for POST methods, etc
Body string `yaml:"body,omitempty"`
2020-07-16 16:15:24 +02:00
// CookieReuse is an optional setting that makes cookies shared within requests
CookieReuse bool `yaml:"cookie-reuse,omitempty"`
// Matchers contains the detection mechanism for the request to identify
// whether the request was successful
Matchers []*matchers.Matcher `yaml:"matchers,omitempty"`
2020-04-26 05:50:33 +05:30
// MatchersCondition is the condition of the matchers
// whether to use AND or OR. Default is OR.
MatchersCondition string `yaml:"matchers-condition,omitempty"`
// matchersCondition is internal condition for the matchers.
matchersCondition matchers.ConditionType
// Extractors contains the extraction mechanism for the request to identify
// and extract parts of the response.
Extractors []*extractors.Extractor `yaml:"extractors,omitempty"`
// Redirects specifies whether redirects should be followed.
Redirects bool `yaml:"redirects,omitempty"`
// MaxRedirects is the maximum number of redirects that should be followed.
MaxRedirects int `yaml:"max-redirects,omitempty"`
// Raw contains raw requests
2020-07-24 13:37:01 +02:00
Raw []string `yaml:"raw,omitempty"`
gsfm *GeneratorFSM
}
2020-04-04 02:50:32 +05:30
// GetMatchersCondition returns the condition for the matcher
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) GetMatchersCondition() matchers.ConditionType {
return r.matchersCondition
}
// SetMatchersCondition sets the condition for the matcher
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) SetMatchersCondition(condition matchers.ConditionType) {
r.matchersCondition = condition
}
// GetAttackType returns the attack
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) GetAttackType() generators.Type {
return r.attackType
}
// SetAttackType sets the attack
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) SetAttackType(attack generators.Type) {
r.attackType = attack
}
2020-07-19 03:14:19 +02:00
func (r *BulkHTTPRequest) MakeHTTPRequest(baseURL string, dynamicValues map[string]interface{}, data string) (*HttpRequest, error) {
2020-04-04 03:26:11 +05:30
parsed, err := url.Parse(baseURL)
if err != nil {
return nil, err
}
2020-07-20 00:41:31 +02:00
hostname := parsed.Host
2020-04-04 02:50:32 +05:30
2020-07-19 03:14:19 +02:00
values := generators.MergeMaps(dynamicValues, map[string]interface{}{
2020-04-29 02:57:18 +02:00
"BaseURL": baseURL,
"Hostname": hostname,
2020-07-19 03:14:19 +02:00
})
2020-04-29 02:57:18 +02:00
2020-07-18 21:42:23 +02:00
// if data contains \n it's a raw request
if strings.Contains(data, "\n") {
return r.makeHTTPRequestFromRaw(baseURL, data, values)
2020-04-29 02:57:18 +02:00
}
2020-07-18 21:42:23 +02:00
return r.makeHTTPRequestFromModel(baseURL, data, values)
2020-04-29 02:57:18 +02:00
}
// MakeHTTPRequestFromModel creates a *http.Request from a request template
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) makeHTTPRequestFromModel(baseURL string, data string, values map[string]interface{}) (*HttpRequest, error) {
replacer := newReplacer(values)
URL := replacer.Replace(data)
2020-04-04 17:12:29 +05:30
2020-07-18 21:42:23 +02:00
// Build a request on the specified URL
req, err := http.NewRequest(r.Method, URL, nil)
if err != nil {
return nil, err
}
2020-07-18 21:42:23 +02:00
request, err := r.fillRequest(req, values)
if err != nil {
return nil, err
}
2020-07-18 21:42:23 +02:00
return &HttpRequest{Request: request}, nil
}
2020-07-24 18:12:16 +02:00
func (r *BulkHTTPRequest) InitGenerator() {
r.gsfm = NewGeneratorFSM(r.attackType, r.Payloads, r.Path, r.Raw)
}
2020-07-23 23:28:34 +02:00
2020-07-24 18:12:16 +02:00
func (r *BulkHTTPRequest) CreateGenerator(URL string) {
2020-07-24 13:37:01 +02:00
r.gsfm.Add(URL)
2020-07-18 21:42:23 +02:00
}
2020-04-04 02:50:32 +05:30
2020-07-25 22:25:21 +02:00
func (r *BulkHTTPRequest) HasGenerator(URL string) bool {
return r.gsfm.Has(URL)
}
2020-07-24 13:37:01 +02:00
func (r *BulkHTTPRequest) ReadOne(URL string) {
r.gsfm.ReadOne(URL)
2020-04-04 02:50:32 +05:30
}
2020-04-28 23:02:07 +02:00
// makeHTTPRequestFromRaw creates a *http.Request from a raw request
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) makeHTTPRequestFromRaw(baseURL string, data string, values map[string]interface{}) (*HttpRequest, error) {
// Add trailing line
data += "\n"
if len(r.Payloads) > 0 {
2020-07-24 13:37:01 +02:00
r.gsfm.InitOrSkip(baseURL)
r.ReadOne(baseURL)
return r.handleRawWithPaylods(data, baseURL, values, r.gsfm.Value(baseURL))
2020-07-18 21:42:23 +02:00
}
// otherwise continue with normal flow
2020-07-19 19:25:05 +02:00
return r.handleRawWithPaylods(data, baseURL, values, nil)
}
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) handleRawWithPaylods(raw string, baseURL string, values, genValues map[string]interface{}) (*HttpRequest, error) {
baseValues := generators.CopyMap(values)
finValues := generators.MergeMaps(baseValues, genValues)
replacer := newReplacer(finValues)
// Replace the dynamic variables in the URL if any
raw = replacer.Replace(raw)
dynamicValues := make(map[string]interface{})
// find all potentials tokens between {{}}
var re = regexp.MustCompile(`(?m)\{\{.+}}`)
for _, match := range re.FindAllString(raw, -1) {
// check if the match contains a dynamic variable
2020-07-20 01:37:07 +02:00
expr := generators.TrimDelimiters(match)
compiled, err := govaluate.NewEvaluableExpressionWithFunctions(expr, generators.HelperFunctions())
if err != nil {
return nil, err
}
result, err := compiled.Evaluate(finValues)
if err != nil {
return nil, err
}
2020-07-20 01:37:07 +02:00
dynamicValues[expr] = result
}
2020-04-28 23:02:07 +02:00
// replace dynamic values
dynamicReplacer := newReplacer(dynamicValues)
raw = dynamicReplacer.Replace(raw)
2020-07-04 14:34:41 +07:00
compiledRequest, err := r.parseRawRequest(raw, baseURL)
if err != nil {
2020-07-18 21:42:23 +02:00
return nil, err
}
2020-07-04 14:34:41 +07:00
req, err := http.NewRequest(compiledRequest.Method, compiledRequest.FullURL, strings.NewReader(compiledRequest.Data))
if err != nil {
2020-07-18 21:42:23 +02:00
return nil, err
}
// copy headers
2020-06-29 19:50:11 +05:30
for key, value := range compiledRequest.Headers {
2020-07-17 16:04:13 +02:00
req.Header[key] = []string{value}
2020-06-29 19:50:11 +05:30
}
request, err := r.fillRequest(req, values)
if err != nil {
2020-07-18 21:42:23 +02:00
return nil, err
}
2020-07-18 21:42:23 +02:00
return &HttpRequest{Request: request, Meta: genValues}, nil
2020-04-29 02:57:18 +02:00
}
2020-04-28 23:02:07 +02:00
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) fillRequest(req *http.Request, values map[string]interface{}) (*retryablehttp.Request, error) {
req.Header.Set("Connection", "close")
req.Close = true
2020-04-30 17:39:33 +02:00
replacer := newReplacer(values)
2020-07-04 14:34:41 +07:00
2020-04-29 02:57:18 +02:00
// Check if the user requested a request body
if r.Body != "" {
req.Body = ioutil.NopCloser(strings.NewReader(r.Body))
}
2020-04-28 23:02:07 +02:00
2020-04-29 02:57:18 +02:00
// Set the header values requested
for header, value := range r.Headers {
2020-07-17 16:04:13 +02:00
req.Header[header] = []string{replacer.Replace(value)}
2020-04-29 02:57:18 +02:00
}
2020-04-28 23:02:07 +02:00
2020-04-29 02:57:18 +02:00
// Set some headers only if the header wasn't supplied by the user
2020-04-29 23:07:19 +02:00
if _, ok := req.Header["User-Agent"]; !ok {
req.Header.Set("User-Agent", "Nuclei - Open-source project (github.com/projectdiscovery/nuclei)")
2020-04-29 02:57:18 +02:00
}
2020-04-29 23:07:19 +02:00
2020-07-04 14:34:41 +07:00
// raw requests are left untouched
if len(r.Raw) > 0 {
return retryablehttp.FromRequest(req)
}
2020-04-29 23:07:19 +02:00
if _, ok := req.Header["Accept"]; !ok {
2020-04-29 02:57:18 +02:00
req.Header.Set("Accept", "*/*")
}
2020-04-29 23:07:19 +02:00
if _, ok := req.Header["Accept-Language"]; !ok {
2020-04-29 02:57:18 +02:00
req.Header.Set("Accept-Language", "en")
2020-04-28 23:02:07 +02:00
}
2020-04-29 02:57:18 +02:00
return retryablehttp.FromRequest(req)
2020-04-28 23:02:07 +02:00
}
2020-07-18 21:42:23 +02:00
type HttpRequest struct {
Request *retryablehttp.Request
2020-05-14 18:09:36 +02:00
Meta map[string]interface{}
}
2020-05-22 00:23:38 +02:00
// CustomHeaders valid for all requests
type CustomHeaders []string
// String returns just a label
func (c *CustomHeaders) String() string {
return "Custom Global Headers"
}
// Set a new global header
func (c *CustomHeaders) Set(value string) error {
*c = append(*c, value)
return nil
}
2020-06-29 19:50:11 +05:30
2020-07-18 21:42:23 +02:00
type RawRequest struct {
2020-07-04 14:34:41 +07:00
FullURL string
2020-06-29 19:50:11 +05:30
Method string
Path string
Data string
Headers map[string]string
}
// parseRawRequest parses the raw request as supplied by the user
2020-07-18 21:42:23 +02:00
func (r *BulkHTTPRequest) parseRawRequest(request string, baseURL string) (*RawRequest, error) {
2020-06-29 19:50:11 +05:30
reader := bufio.NewReader(strings.NewReader(request))
2020-07-18 21:42:23 +02:00
rawRequest := RawRequest{
2020-06-29 19:50:11 +05:30
Headers: make(map[string]string),
}
s, err := reader.ReadString('\n')
if err != nil {
return nil, fmt.Errorf("could not read request: %s", err)
}
parts := strings.Split(s, " ")
if len(parts) < 3 {
return nil, fmt.Errorf("malformed request supplied")
}
// Set the request Method
rawRequest.Method = parts[0]
for {
line, err := reader.ReadString('\n')
line = strings.TrimSpace(line)
if err != nil || line == "" {
break
}
p := strings.SplitN(line, ":", 2)
if len(p) != 2 {
continue
}
if strings.EqualFold(p[0], "content-length") {
continue
}
rawRequest.Headers[strings.TrimSpace(p[0])] = strings.TrimSpace(p[1])
}
// Handle case with the full http url in path. In that case,
// ignore any host header that we encounter and use the path as request URL
if strings.HasPrefix(parts[1], "http") {
parsed, err := url.Parse(parts[1])
if err != nil {
return nil, fmt.Errorf("could not parse request URL: %s", err)
}
rawRequest.Path = parts[1]
rawRequest.Headers["Host"] = parsed.Host
} else {
rawRequest.Path = parts[1]
}
2020-07-04 14:34:41 +07:00
// If raw request doesn't have a Host header and/ path,
// this will be generated from the parsed baseURL
parsedURL, err := url.Parse(baseURL)
if err != nil {
return nil, fmt.Errorf("could not parse request URL: %s", err)
}
var hostURL string
if len(rawRequest.Headers["Host"]) == 0 {
hostURL = parsedURL.Host
} else {
hostURL = rawRequest.Headers["Host"]
}
if len(rawRequest.Path) == 0 {
rawRequest.Path = parsedURL.Path
} else {
// requests generated from http.ReadRequest have incorrect RequestURI, so they
// cannot be used to perform another request directly, we need to generate a new one
// with the new target url
if strings.HasPrefix(rawRequest.Path, "?") {
rawRequest.Path = fmt.Sprintf("%s%s", parsedURL.Path, rawRequest.Path)
}
}
rawRequest.FullURL = fmt.Sprintf("%s://%s%s", parsedURL.Scheme, hostURL, rawRequest.Path)
2020-06-29 19:50:11 +05:30
// Set the request body
b, err := ioutil.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("could not read request body: %s", err)
}
rawRequest.Data = string(b)
return &rawRequest, nil
}
2020-07-18 21:42:23 +02:00
2020-07-23 23:28:34 +02:00
func (r *BulkHTTPRequest) Next(URL string) bool {
2020-07-24 13:37:01 +02:00
return r.gsfm.Next(URL)
2020-07-18 21:42:23 +02:00
}
2020-07-23 23:28:34 +02:00
func (r *BulkHTTPRequest) Position(URL string) int {
2020-07-24 13:37:01 +02:00
return r.gsfm.Position(URL)
2020-07-18 21:42:23 +02:00
}
2020-07-23 23:28:34 +02:00
func (r *BulkHTTPRequest) Reset(URL string) {
2020-07-24 13:37:01 +02:00
r.gsfm.Reset(URL)
2020-07-18 21:42:23 +02:00
}
2020-07-23 23:28:34 +02:00
func (r *BulkHTTPRequest) Current(URL string) string {
2020-07-24 13:37:01 +02:00
return r.gsfm.Current(URL)
2020-07-18 21:42:23 +02:00
}
2020-07-23 23:28:34 +02:00
2020-07-24 13:37:01 +02:00
func (r *BulkHTTPRequest) Total() int {
2020-07-18 21:42:23 +02:00
return len(r.Path) + len(r.Raw)
}
2020-07-23 23:28:34 +02:00
func (r *BulkHTTPRequest) Increment(URL string) {
2020-07-24 13:37:01 +02:00
r.gsfm.Increment(URL)
2020-07-18 21:42:23 +02:00
}