2020-12-21 16:46:25 +05:30
|
|
|
package raw
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"bufio"
|
2021-11-02 14:12:59 +05:30
|
|
|
"bytes"
|
2021-11-08 19:33:54 +01:00
|
|
|
"errors"
|
2020-12-21 16:46:25 +05:30
|
|
|
"fmt"
|
2021-02-08 16:07:16 +05:30
|
|
|
"io"
|
2020-12-21 16:46:25 +05:30
|
|
|
"strings"
|
2021-02-20 00:35:39 +01:00
|
|
|
|
|
|
|
|
"github.com/projectdiscovery/rawhttp/client"
|
2023-01-24 22:04:52 +05:30
|
|
|
errorutil "github.com/projectdiscovery/utils/errors"
|
2022-11-06 21:24:23 +01:00
|
|
|
stringsutil "github.com/projectdiscovery/utils/strings"
|
2023-01-05 16:41:59 +05:30
|
|
|
urlutil "github.com/projectdiscovery/utils/url"
|
2020-12-21 16:46:25 +05:30
|
|
|
)
|
|
|
|
|
|
2020-12-26 14:55:15 +05:30
|
|
|
// Request defines a basic HTTP raw request
|
2020-12-21 16:46:25 +05:30
|
|
|
type Request struct {
|
2021-02-20 02:02:57 +01:00
|
|
|
FullURL string
|
|
|
|
|
Method string
|
|
|
|
|
Path string
|
|
|
|
|
Data string
|
|
|
|
|
Headers map[string]string
|
|
|
|
|
UnsafeHeaders client.Headers
|
|
|
|
|
UnsafeRawBytes []byte
|
2020-12-21 16:46:25 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse parses the raw request as supplied by the user
|
2023-02-01 17:23:28 +05:30
|
|
|
func Parse(request string, inputURL *urlutil.URL, unsafe bool) (*Request, error) {
|
2022-12-13 12:09:31 +05:30
|
|
|
rawrequest, err := readRawRequest(request, unsafe)
|
2021-11-02 14:12:59 +05:30
|
|
|
if err != nil {
|
2022-12-13 12:09:31 +05:30
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch {
|
|
|
|
|
// If path is empty do not tamper input url (see doc)
|
|
|
|
|
// can be omitted but makes things clear
|
|
|
|
|
case rawrequest.Path == "":
|
2023-01-24 22:04:52 +05:30
|
|
|
rawrequest.Path = inputURL.GetRelativePath()
|
2022-12-13 12:09:31 +05:30
|
|
|
|
|
|
|
|
// full url provided instead of rel path
|
|
|
|
|
case strings.HasPrefix(rawrequest.Path, "http") && !unsafe:
|
2023-01-24 22:04:52 +05:30
|
|
|
urlx, err := urlutil.ParseURL(rawrequest.Path, true)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, errorutil.NewWithErr(err).WithTag("raw").Msgf("failed to parse url %v from template", rawrequest.Path)
|
|
|
|
|
}
|
|
|
|
|
cloned := inputURL.Clone()
|
|
|
|
|
parseErr := cloned.MergePath(urlx.GetRelativePath(), true)
|
2022-12-13 12:09:31 +05:30
|
|
|
if parseErr != nil {
|
2023-01-24 22:04:52 +05:30
|
|
|
return nil, errorutil.NewWithTag("raw", "could not automergepath for template path %v", urlx.GetRelativePath()).Wrap(parseErr)
|
2022-12-13 12:09:31 +05:30
|
|
|
}
|
2023-01-24 22:04:52 +05:30
|
|
|
rawrequest.Path = cloned.GetRelativePath()
|
2022-12-13 12:09:31 +05:30
|
|
|
// If unsafe changes must be made in raw request string iteself
|
|
|
|
|
case unsafe:
|
|
|
|
|
prevPath := rawrequest.Path
|
2023-01-24 22:04:52 +05:30
|
|
|
cloned := inputURL.Clone()
|
|
|
|
|
err := cloned.MergePath(rawrequest.Path, true)
|
|
|
|
|
unsafeRelativePath := cloned.GetRelativePath()
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, errorutil.NewWithErr(err).WithTag("raw").Msgf("failed to automerge %v from unsafe template", rawrequest.Path)
|
|
|
|
|
}
|
2022-12-13 12:09:31 +05:30
|
|
|
// replace itself
|
|
|
|
|
rawrequest.UnsafeRawBytes = bytes.Replace(rawrequest.UnsafeRawBytes, []byte(prevPath), []byte(unsafeRelativePath), 1)
|
|
|
|
|
|
|
|
|
|
default:
|
2023-01-24 22:04:52 +05:30
|
|
|
cloned := inputURL.Clone()
|
|
|
|
|
parseErr := cloned.MergePath(rawrequest.Path, true)
|
|
|
|
|
if parseErr != nil {
|
|
|
|
|
return nil, errorutil.NewWithTag("raw", "could not automergepath for template path %v", rawrequest.Path).Wrap(parseErr)
|
|
|
|
|
}
|
|
|
|
|
rawrequest.Path = cloned.GetRelativePath()
|
2022-12-13 12:09:31 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !unsafe {
|
|
|
|
|
if _, ok := rawrequest.Headers["Host"]; !ok {
|
|
|
|
|
rawrequest.Headers["Host"] = inputURL.Host
|
|
|
|
|
}
|
|
|
|
|
rawrequest.FullURL = fmt.Sprintf("%s://%s%s", inputURL.Scheme, strings.TrimSpace(inputURL.Host), rawrequest.Path)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return rawrequest, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// reads raw request line by line following convention
|
|
|
|
|
func readRawRequest(request string, unsafe bool) (*Request, error) {
|
|
|
|
|
rawRequest := &Request{
|
|
|
|
|
Headers: make(map[string]string),
|
2021-11-02 14:12:59 +05:30
|
|
|
}
|
|
|
|
|
|
2022-12-13 12:09:31 +05:30
|
|
|
// store body if it is unsafe request
|
2021-02-20 02:02:57 +01:00
|
|
|
if unsafe {
|
|
|
|
|
rawRequest.UnsafeRawBytes = []byte(request)
|
|
|
|
|
}
|
2022-12-13 12:09:31 +05:30
|
|
|
|
|
|
|
|
// parse raw request
|
2021-02-20 02:02:57 +01:00
|
|
|
reader := bufio.NewReader(strings.NewReader(request))
|
2022-04-04 09:32:41 +02:00
|
|
|
read_line:
|
2020-12-21 16:46:25 +05:30
|
|
|
s, err := reader.ReadString('\n')
|
|
|
|
|
if err != nil {
|
2021-11-25 15:18:46 +02:00
|
|
|
return nil, fmt.Errorf("could not read request: %w", err)
|
2020-12-21 16:46:25 +05:30
|
|
|
}
|
2022-04-04 09:32:41 +02:00
|
|
|
// ignore all annotations
|
|
|
|
|
if stringsutil.HasPrefixAny(s, "@") {
|
|
|
|
|
goto read_line
|
|
|
|
|
}
|
2020-12-21 16:46:25 +05:30
|
|
|
|
|
|
|
|
parts := strings.Split(s, " ")
|
2022-12-13 12:09:31 +05:30
|
|
|
if len(parts) > 0 {
|
|
|
|
|
rawRequest.Method = parts[0]
|
|
|
|
|
if len(parts) == 2 && strings.Contains(parts[1], "HTTP") {
|
|
|
|
|
// When relative path is missing/ not specified it is considered that
|
|
|
|
|
// request is meant to be untampered at path
|
|
|
|
|
// Ex: GET HTTP/1.1
|
|
|
|
|
parts = []string{parts[0], "", parts[1]}
|
|
|
|
|
}
|
|
|
|
|
if len(parts) < 3 && !unsafe {
|
|
|
|
|
// missing a field
|
|
|
|
|
return nil, fmt.Errorf("malformed request specified: %v", s)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// relative path
|
|
|
|
|
rawRequest.Path = parts[1]
|
|
|
|
|
// Note: raw request does not URL Encode if needed `+` should be used
|
|
|
|
|
// this can be also be implemented
|
2021-11-02 14:12:59 +05:30
|
|
|
}
|
2020-12-21 16:46:25 +05:30
|
|
|
|
2021-11-25 18:54:16 +02:00
|
|
|
var multiPartRequest bool
|
2020-12-21 16:46:25 +05:30
|
|
|
// Accepts all malformed headers
|
|
|
|
|
var key, value string
|
|
|
|
|
for {
|
|
|
|
|
line, readErr := reader.ReadString('\n')
|
|
|
|
|
line = strings.TrimSpace(line)
|
|
|
|
|
|
|
|
|
|
if readErr != nil || line == "" {
|
2021-02-08 16:07:16 +05:30
|
|
|
if readErr != io.EOF {
|
|
|
|
|
break
|
|
|
|
|
}
|
2020-12-21 16:46:25 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p := strings.SplitN(line, ":", 2)
|
|
|
|
|
key = p[0]
|
|
|
|
|
if len(p) > 1 {
|
|
|
|
|
value = p[1]
|
|
|
|
|
}
|
2021-02-22 18:59:03 +05:30
|
|
|
if strings.Contains(key, "Content-Type") && strings.Contains(value, "multipart/") {
|
2021-11-25 18:54:16 +02:00
|
|
|
multiPartRequest = true
|
2021-02-22 18:59:03 +05:30
|
|
|
}
|
2020-12-21 16:46:25 +05:30
|
|
|
|
|
|
|
|
// in case of unsafe requests multiple headers should be accepted
|
|
|
|
|
// therefore use the full line as key
|
|
|
|
|
_, found := rawRequest.Headers[key]
|
2021-02-20 02:02:57 +01:00
|
|
|
if unsafe {
|
|
|
|
|
rawRequest.UnsafeHeaders = append(rawRequest.UnsafeHeaders, client.Header{Key: line})
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-21 16:46:25 +05:30
|
|
|
if unsafe && found {
|
|
|
|
|
rawRequest.Headers[line] = ""
|
|
|
|
|
} else {
|
2021-02-20 00:35:39 +01:00
|
|
|
rawRequest.Headers[key] = strings.TrimSpace(value)
|
2020-12-21 16:46:25 +05:30
|
|
|
}
|
2021-02-08 16:07:16 +05:30
|
|
|
if readErr == io.EOF {
|
|
|
|
|
break
|
|
|
|
|
}
|
2020-12-21 16:46:25 +05:30
|
|
|
}
|
|
|
|
|
|
2020-12-26 14:55:15 +05:30
|
|
|
// Set the request body
|
2022-02-23 13:54:46 +01:00
|
|
|
b, err := io.ReadAll(reader)
|
2020-12-26 14:55:15 +05:30
|
|
|
if err != nil {
|
2021-11-25 15:18:46 +02:00
|
|
|
return nil, fmt.Errorf("could not read request body: %w", err)
|
2020-12-26 14:55:15 +05:30
|
|
|
}
|
|
|
|
|
rawRequest.Data = string(b)
|
2021-11-25 18:54:16 +02:00
|
|
|
if !multiPartRequest {
|
2021-02-22 18:59:03 +05:30
|
|
|
rawRequest.Data = strings.TrimSuffix(rawRequest.Data, "\r\n")
|
|
|
|
|
}
|
2020-12-26 14:55:15 +05:30
|
|
|
return rawRequest, nil
|
2022-10-27 20:09:38 +02:00
|
|
|
|
2021-11-02 14:12:59 +05:30
|
|
|
}
|
2021-11-08 19:33:54 +01:00
|
|
|
|
|
|
|
|
// TryFillCustomHeaders after the Host header
|
|
|
|
|
func (r *Request) TryFillCustomHeaders(headers []string) error {
|
|
|
|
|
unsafeBytes := bytes.ToLower(r.UnsafeRawBytes)
|
|
|
|
|
// locate first host header
|
|
|
|
|
hostHeaderIndex := bytes.Index(unsafeBytes, []byte("host:"))
|
|
|
|
|
if hostHeaderIndex > 0 {
|
|
|
|
|
// attempt to locate next newline
|
|
|
|
|
newLineIndex := bytes.Index(unsafeBytes[hostHeaderIndex:], []byte("\r\n"))
|
|
|
|
|
if newLineIndex > 0 {
|
|
|
|
|
newLineIndex += hostHeaderIndex + 2
|
|
|
|
|
// insert custom headers
|
|
|
|
|
var buf bytes.Buffer
|
|
|
|
|
buf.Write(r.UnsafeRawBytes[:newLineIndex])
|
|
|
|
|
for _, header := range headers {
|
|
|
|
|
buf.WriteString(fmt.Sprintf("%s\r\n", header))
|
|
|
|
|
}
|
|
|
|
|
buf.Write(r.UnsafeRawBytes[newLineIndex:])
|
|
|
|
|
r.UnsafeRawBytes = buf.Bytes()
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
return errors.New("no new line found at the end of host header")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return errors.New("no host header found")
|
|
|
|
|
}
|