2020-12-26 14:55:15 +05:30
package http
2020-12-28 20:02:26 +05:30
import (
2021-02-08 01:55:53 +05:30
"bytes"
2021-03-08 19:01:40 +05:30
"fmt"
2020-12-28 20:02:26 +05:30
"io"
"io/ioutil"
"net/http"
"net/http/httputil"
"net/url"
"strings"
"sync"
"time"
"github.com/pkg/errors"
2020-12-29 01:30:07 +05:30
"github.com/projectdiscovery/gologger"
2020-12-28 20:02:26 +05:30
"github.com/projectdiscovery/nuclei/v2/pkg/output"
2021-01-01 19:36:21 +05:30
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
2020-12-28 20:02:26 +05:30
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/generators"
2021-04-18 16:10:10 +05:30
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/interactsh"
2021-01-01 15:28:28 +05:30
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/tostring"
2021-01-16 12:06:27 +05:30
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/http/httpclientpool"
2020-12-28 20:02:26 +05:30
"github.com/projectdiscovery/rawhttp"
"github.com/remeh/sizedwaitgroup"
"go.uber.org/multierr"
)
const defaultMaxWorkers = 150
// executeRaceRequest executes race condition request for a URL
2021-02-26 13:13:11 +05:30
func ( r * Request ) executeRaceRequest ( reqURL string , previous output . InternalEvent , callback protocols . OutputEventCallback ) error {
2021-03-01 05:18:31 +01:00
var requests [ ] * generatedRequest
// Requests within race condition should be dumped once and the output prefilled to allow DSL language to work
// This will introduce a delay and will populate in hacky way the field "request" of outputEvent
2021-02-04 22:00:09 +05:30
generator := r . newGenerator ( )
2021-04-16 16:56:41 +05:30
requestForDump , err := generator . Make ( reqURL , nil , "" )
2021-03-01 05:18:31 +01:00
if err != nil {
return err
}
r . setCustomHeaders ( requestForDump )
dumpedRequest , err := dump ( requestForDump , reqURL )
if err != nil {
return err
}
if r . options . Options . Debug || r . options . Options . DebugRequests {
gologger . Info ( ) . Msgf ( "[%s] Dumped HTTP request for %s\n\n" , r . options . TemplateID , reqURL )
gologger . Print ( ) . Msgf ( "%s" , string ( dumpedRequest ) )
}
previous [ "request" ] = string ( dumpedRequest )
2020-12-26 14:55:15 +05:30
2021-03-01 05:18:31 +01:00
// Pre-Generate requests
for i := 0 ; i < r . RaceNumberRequests ; i ++ {
generator := r . newGenerator ( )
2021-04-16 16:56:41 +05:30
request , err := generator . Make ( reqURL , nil , "" )
2021-03-01 05:18:31 +01:00
if err != nil {
return err
}
requests = append ( requests , request )
}
2020-12-28 20:02:26 +05:30
2021-03-01 05:18:31 +01:00
wg := sync . WaitGroup { }
2020-12-28 20:02:26 +05:30
var requestErr error
2020-12-30 16:49:45 +05:30
mutex := & sync . Mutex { }
2021-02-04 22:00:09 +05:30
for i := 0 ; i < r . RaceNumberRequests ; i ++ {
2021-03-01 05:18:31 +01:00
wg . Add ( 1 )
2020-12-28 20:02:26 +05:30
go func ( httpRequest * generatedRequest ) {
2021-03-01 05:18:31 +01:00
defer wg . Done ( )
2021-03-09 14:45:04 +05:30
err := r . executeRequest ( reqURL , httpRequest , previous , callback , 0 )
2020-12-28 20:02:26 +05:30
mutex . Lock ( )
2020-12-26 14:55:15 +05:30
if err != nil {
2020-12-28 20:02:26 +05:30
requestErr = multierr . Append ( requestErr , err )
2020-12-26 14:55:15 +05:30
}
2020-12-28 20:02:26 +05:30
mutex . Unlock ( )
2021-03-01 05:18:31 +01:00
} ( requests [ i ] )
2021-03-01 04:31:53 +01:00
r . options . Progress . IncrementRequests ( )
2020-12-26 14:55:15 +05:30
}
2021-03-01 05:18:31 +01:00
wg . Wait ( )
2021-01-01 19:36:21 +05:30
return requestErr
2020-12-26 14:55:15 +05:30
}
2021-02-04 22:00:09 +05:30
// executeRaceRequest executes parallel requests for a template
func ( r * Request ) executeParallelHTTP ( reqURL string , dynamicValues , previous output . InternalEvent , callback protocols . OutputEventCallback ) error {
generator := r . newGenerator ( )
2020-12-26 14:55:15 +05:30
// Workers that keeps enqueuing new requests
2021-02-04 22:00:09 +05:30
maxWorkers := r . Threads
2020-12-26 14:55:15 +05:30
swg := sizedwaitgroup . New ( maxWorkers )
2020-12-28 20:02:26 +05:30
var requestErr error
2020-12-30 16:49:45 +05:30
mutex := & sync . Mutex { }
2020-12-28 20:02:26 +05:30
for {
2021-04-16 16:56:41 +05:30
request , err := generator . Make ( reqURL , dynamicValues , "" )
2020-12-28 20:02:26 +05:30
if err == io . EOF {
break
}
if err != nil {
2021-03-02 02:22:15 +01:00
r . options . Progress . IncrementFailedRequestsBy ( int64 ( generator . Total ( ) ) )
2021-01-01 19:36:21 +05:30
return err
2020-12-26 14:55:15 +05:30
}
2020-12-28 20:02:26 +05:30
swg . Add ( )
go func ( httpRequest * generatedRequest ) {
defer swg . Done ( )
2021-02-04 22:00:09 +05:30
r . options . RateLimiter . Take ( )
2021-03-09 14:45:04 +05:30
err := r . executeRequest ( reqURL , httpRequest , previous , callback , 0 )
2020-12-28 20:02:26 +05:30
mutex . Lock ( )
if err != nil {
requestErr = multierr . Append ( requestErr , err )
}
mutex . Unlock ( )
} ( request )
2021-02-04 22:00:09 +05:30
r . options . Progress . IncrementRequests ( )
2020-12-26 14:55:15 +05:30
}
swg . Wait ( )
2021-01-01 19:36:21 +05:30
return requestErr
2020-12-26 14:55:15 +05:30
}
2021-06-03 10:49:13 +05:30
// executeTurboHTTP executes turbo http request for a URL
2021-02-04 22:00:09 +05:30
func ( r * Request ) executeTurboHTTP ( reqURL string , dynamicValues , previous output . InternalEvent , callback protocols . OutputEventCallback ) error {
generator := r . newGenerator ( )
2020-12-26 14:55:15 +05:30
// need to extract the target from the url
URL , err := url . Parse ( reqURL )
if err != nil {
2021-01-01 19:36:21 +05:30
return err
2020-12-26 14:55:15 +05:30
}
pipeOptions := rawhttp . DefaultPipelineOptions
pipeOptions . Host = URL . Host
pipeOptions . MaxConnections = 1
2021-02-04 22:00:09 +05:30
if r . PipelineConcurrentConnections > 0 {
pipeOptions . MaxConnections = r . PipelineConcurrentConnections
2020-12-26 14:55:15 +05:30
}
2021-02-04 22:00:09 +05:30
if r . PipelineRequestsPerConnection > 0 {
pipeOptions . MaxPendingRequests = r . PipelineRequestsPerConnection
2020-12-26 14:55:15 +05:30
}
pipeclient := rawhttp . NewPipelineClient ( pipeOptions )
// defaultMaxWorkers should be a sufficient value to keep queues always full
maxWorkers := defaultMaxWorkers
// in case the queue is bigger increase the workers
if pipeOptions . MaxPendingRequests > maxWorkers {
maxWorkers = pipeOptions . MaxPendingRequests
}
swg := sizedwaitgroup . New ( maxWorkers )
2020-12-28 20:02:26 +05:30
var requestErr error
2020-12-30 16:49:45 +05:30
mutex := & sync . Mutex { }
2020-12-28 20:02:26 +05:30
for {
2021-04-16 16:56:41 +05:30
request , err := generator . Make ( reqURL , dynamicValues , "" )
2020-12-28 20:02:26 +05:30
if err == io . EOF {
break
2020-12-26 14:55:15 +05:30
}
2020-12-28 20:02:26 +05:30
if err != nil {
2021-03-02 02:22:15 +01:00
r . options . Progress . IncrementFailedRequestsBy ( int64 ( generator . Total ( ) ) )
2021-01-01 19:36:21 +05:30
return err
2020-12-28 20:02:26 +05:30
}
request . pipelinedClient = pipeclient
swg . Add ( )
go func ( httpRequest * generatedRequest ) {
defer swg . Done ( )
2020-12-26 14:55:15 +05:30
2021-03-09 14:45:04 +05:30
err := r . executeRequest ( reqURL , httpRequest , previous , callback , 0 )
2020-12-28 20:02:26 +05:30
mutex . Lock ( )
if err != nil {
requestErr = multierr . Append ( requestErr , err )
}
mutex . Unlock ( )
} ( request )
2021-02-04 22:00:09 +05:30
r . options . Progress . IncrementRequests ( )
2020-12-26 14:55:15 +05:30
}
swg . Wait ( )
2021-01-01 19:36:21 +05:30
return requestErr
2020-12-26 14:55:15 +05:30
}
2020-12-29 11:42:46 +05:30
// ExecuteWithResults executes the final request on a URL
2021-01-16 14:10:24 +05:30
func ( r * Request ) ExecuteWithResults ( reqURL string , dynamicValues , previous output . InternalEvent , callback protocols . OutputEventCallback ) error {
2020-12-26 14:55:15 +05:30
// verify if pipeline was requested
2020-12-30 21:14:04 +05:30
if r . Pipeline {
2021-01-16 14:10:24 +05:30
return r . executeTurboHTTP ( reqURL , dynamicValues , previous , callback )
2020-12-26 14:55:15 +05:30
}
// verify if a basic race condition was requested
2020-12-30 21:14:04 +05:30
if r . Race && r . RaceNumberRequests > 0 {
2021-02-26 13:13:11 +05:30
return r . executeRaceRequest ( reqURL , previous , callback )
2020-12-26 14:55:15 +05:30
}
// verify if parallel elaboration was requested
2020-12-30 21:14:04 +05:30
if r . Threads > 0 {
2021-01-16 14:10:24 +05:30
return r . executeParallelHTTP ( reqURL , dynamicValues , previous , callback )
2020-12-26 14:55:15 +05:30
}
2020-12-30 21:14:04 +05:30
generator := r . newGenerator ( )
2020-12-26 14:55:15 +05:30
2021-03-08 19:01:40 +05:30
requestCount := 1
2020-12-28 20:02:26 +05:30
var requestErr error
for {
2021-04-18 17:53:59 +05:30
hasInteractMarkers := interactsh . HasMatchers ( r . CompiledOperators )
2021-04-16 16:56:41 +05:30
2021-04-18 17:53:59 +05:30
var interactURL string
if r . options . Interactsh != nil && hasInteractMarkers {
2021-04-16 16:56:41 +05:30
interactURL = r . options . Interactsh . URL ( )
}
request , err := generator . Make ( reqURL , dynamicValues , interactURL )
2020-12-28 20:02:26 +05:30
if err == io . EOF {
break
}
if err != nil {
2021-03-02 02:22:15 +01:00
r . options . Progress . IncrementFailedRequestsBy ( int64 ( generator . Total ( ) ) )
2021-01-01 19:36:21 +05:30
return err
2020-12-28 20:02:26 +05:30
}
2020-12-26 14:55:15 +05:30
2021-01-01 19:36:21 +05:30
var gotOutput bool
2020-12-30 21:14:04 +05:30
r . options . RateLimiter . Take ( )
2021-02-26 13:13:11 +05:30
err = r . executeRequest ( reqURL , request , previous , func ( event * output . InternalWrappedEvent ) {
2021-01-01 16:52:41 +05:30
// Add the extracts to the dynamic values if any.
2021-01-01 19:36:21 +05:30
if event . OperatorsResult != nil {
gotOutput = true
dynamicValues = generators . MergeMaps ( dynamicValues , event . OperatorsResult . DynamicValues )
2021-01-01 16:52:41 +05:30
}
2021-04-18 17:53:59 +05:30
if hasInteractMarkers && r . options . Interactsh != nil {
r . options . Interactsh . RequestEvent ( interactURL , & interactsh . RequestData {
MakeResultFunc : r . MakeResultEvent ,
Event : event ,
Operators : r . CompiledOperators ,
MatchFunc : r . Match ,
ExtractFunc : r . Extract ,
} )
2021-05-09 01:37:22 +05:30
} else {
callback ( event )
2021-04-16 16:56:41 +05:30
}
2021-03-09 14:45:04 +05:30
} , requestCount )
2021-01-01 19:36:21 +05:30
if err != nil {
requestErr = multierr . Append ( requestErr , err )
2020-12-26 14:55:15 +05:30
}
2021-03-08 19:01:40 +05:30
requestCount ++
2020-12-30 21:14:04 +05:30
r . options . Progress . IncrementRequests ( )
2020-12-26 14:55:15 +05:30
2021-01-01 19:36:21 +05:30
if request . original . options . Options . StopAtFirstMatch && gotOutput {
2021-03-02 02:22:15 +01:00
r . options . Progress . IncrementErrorsBy ( int64 ( generator . Total ( ) ) )
2020-12-26 14:55:15 +05:30
break
}
}
2021-01-01 19:36:21 +05:30
return requestErr
2020-12-26 14:55:15 +05:30
}
2021-02-04 22:09:32 +05:30
const drainReqSize = int64 ( 8 * 1024 )
2021-02-26 13:13:11 +05:30
// executeRequest executes the actual generated request and returns error if occurred
2021-03-09 14:45:04 +05:30
func ( r * Request ) executeRequest ( reqURL string , request * generatedRequest , previous output . InternalEvent , callback protocols . OutputEventCallback , requestCount int ) error {
2020-12-30 21:14:04 +05:30
r . setCustomHeaders ( request )
2020-12-26 14:55:15 +05:30
var (
2021-02-25 02:08:10 +01:00
resp * http . Response
fromcache bool
dumpedRequest [ ] byte
err error
2020-12-26 14:55:15 +05:30
)
2021-02-08 01:43:51 +05:30
2021-03-01 05:18:31 +01:00
// For race conditions we can't dump the request body at this point as it's already waiting the open-gate event, already handled with a similar code within the race function
2021-02-25 02:08:10 +01:00
if ! request . original . Race {
dumpedRequest , err = dump ( request , reqURL )
if err != nil {
return err
}
if r . options . Options . Debug || r . options . Options . DebugRequests {
gologger . Info ( ) . Msgf ( "[%s] Dumped HTTP request for %s\n\n" , r . options . TemplateID , reqURL )
gologger . Print ( ) . Msgf ( "%s" , string ( dumpedRequest ) )
}
2020-12-26 14:55:15 +05:30
}
2021-01-11 19:59:12 +05:30
var formedURL string
2021-01-16 12:06:27 +05:30
var hostname string
2020-12-26 14:55:15 +05:30
timeStart := time . Now ( )
2020-12-28 20:02:26 +05:30
if request . original . Pipeline {
2021-06-03 10:49:13 +05:30
if request . rawRequest != nil {
formedURL = request . rawRequest . FullURL
if parsed , parseErr := url . Parse ( formedURL ) ; parseErr == nil {
hostname = parsed . Host
}
resp , err = request . pipelinedClient . DoRaw ( request . rawRequest . Method , reqURL , request . rawRequest . Path , generators . ExpandMapValues ( request . rawRequest . Headers ) , ioutil . NopCloser ( strings . NewReader ( request . rawRequest . Data ) ) )
} else if request . request != nil {
resp , err = request . pipelinedClient . Dor ( request . request )
2021-01-16 12:06:27 +05:30
}
2021-02-08 16:07:16 +05:30
} else if request . original . Unsafe && request . rawRequest != nil {
2021-01-11 19:59:12 +05:30
formedURL = request . rawRequest . FullURL
2021-02-26 13:13:11 +05:30
if parsed , parseErr := url . Parse ( formedURL ) ; parseErr == nil {
hostname = parsed . Host
2021-01-16 12:06:27 +05:30
}
2020-12-29 01:30:07 +05:30
options := request . original . rawhttpClient . Options
2020-12-30 21:14:04 +05:30
options . FollowRedirects = r . Redirects
2021-02-26 13:13:11 +05:30
options . CustomRawBytes = request . rawRequest . UnsafeRawBytes
2020-12-29 01:30:07 +05:30
resp , err = request . original . rawhttpClient . DoRawWithOptions ( request . rawRequest . Method , reqURL , request . rawRequest . Path , generators . ExpandMapValues ( request . rawRequest . Headers ) , ioutil . NopCloser ( strings . NewReader ( request . rawRequest . Data ) ) , options )
2020-12-26 14:55:15 +05:30
} else {
2021-02-26 13:13:11 +05:30
hostname = request . request . URL . Host
2021-01-11 19:59:12 +05:30
formedURL = request . request . URL . String ( )
2020-12-26 14:55:15 +05:30
// if nuclei-project is available check if the request was already sent previously
2020-12-30 21:14:04 +05:30
if r . options . ProjectFile != nil {
2020-12-26 14:55:15 +05:30
// if unavailable fail silently
fromcache = true
2020-12-30 21:14:04 +05:30
resp , err = r . options . ProjectFile . Get ( dumpedRequest )
2020-12-26 14:55:15 +05:30
if err != nil {
fromcache = false
}
}
if resp == nil {
2020-12-30 21:14:04 +05:30
resp , err = r . httpClient . Do ( request . request )
2020-12-26 14:55:15 +05:30
}
}
2021-02-08 16:07:16 +05:30
if resp == nil {
err = errors . New ( "no response got for request" )
}
2020-12-29 01:30:07 +05:30
if err != nil {
2021-01-12 02:05:41 +05:30
// rawhttp doesn't supports draining response bodies.
if resp != nil && resp . Body != nil && request . rawRequest == nil {
2021-02-07 03:34:07 +05:30
_ , _ = io . CopyN ( ioutil . Discard , resp . Body , drainReqSize )
2020-12-29 01:30:07 +05:30
resp . Body . Close ( )
}
2021-03-08 19:01:40 +05:30
r . options . Output . Request ( r . options . TemplateID , formedURL , "http" , err )
2021-03-02 02:22:15 +01:00
r . options . Progress . IncrementErrorsBy ( 1 )
2021-01-01 19:36:21 +05:30
return err
2020-12-29 01:30:07 +05:30
}
2021-02-26 13:13:11 +05:30
defer func ( ) {
_ , _ = io . CopyN ( ioutil . Discard , resp . Body , drainReqSize )
resp . Body . Close ( )
} ( )
2021-02-05 12:36:01 +05:30
2021-01-11 19:59:12 +05:30
gologger . Verbose ( ) . Msgf ( "[%s] Sent HTTP request to %s" , r . options . TemplateID , formedURL )
2021-03-08 19:01:40 +05:30
r . options . Output . Request ( r . options . TemplateID , formedURL , "http" , err )
2020-12-26 14:55:15 +05:30
duration := time . Since ( timeStart )
2021-02-05 12:36:01 +05:30
2021-02-08 01:55:53 +05:30
dumpedResponseHeaders , err := httputil . DumpResponse ( resp , false )
2021-02-05 12:36:01 +05:30
if err != nil {
return errors . Wrap ( err , "could not dump http response" )
2020-12-26 14:55:15 +05:30
}
2021-02-04 22:09:32 +05:30
var bodyReader io . Reader
if r . MaxSize != 0 {
bodyReader = io . LimitReader ( resp . Body , int64 ( r . MaxSize ) )
} else {
bodyReader = resp . Body
}
data , err := ioutil . ReadAll ( bodyReader )
2020-12-26 14:55:15 +05:30
if err != nil {
2021-01-01 19:36:21 +05:30
return errors . Wrap ( err , "could not read http body" )
2020-12-26 14:55:15 +05:30
}
resp . Body . Close ( )
2021-02-07 03:34:07 +05:30
redirectedResponse , err := dumpResponseWithRedirectChain ( resp , data )
if err != nil {
return errors . Wrap ( err , "could not read http response with redirect chain" )
}
2020-12-29 01:30:07 +05:30
// net/http doesn't automatically decompress the response body if an
// encoding has been specified by the user in the request so in case we have to
// manually do it.
2021-02-08 01:55:53 +05:30
dataOrig := data
2021-02-08 16:13:55 +05:30
data , _ = handleDecompression ( resp , data )
2020-12-26 14:55:15 +05:30
// Dump response - step 2 - replace gzip body with deflated one or with itself (NOP operation)
2021-02-08 01:55:53 +05:30
dumpedResponseBuilder := & bytes . Buffer { }
dumpedResponseBuilder . Write ( dumpedResponseHeaders )
dumpedResponseBuilder . Write ( data )
dumpedResponse := dumpedResponseBuilder . Bytes ( )
redirectedResponse = bytes . ReplaceAll ( redirectedResponse , dataOrig , data )
2021-02-07 03:34:07 +05:30
2021-02-08 01:55:53 +05:30
// Dump response - step 2 - replace gzip body with deflated one or with itself (NOP operation)
if r . options . Options . Debug || r . options . Options . DebugResponse {
2021-01-11 19:59:12 +05:30
gologger . Info ( ) . Msgf ( "[%s] Dumped HTTP response for %s\n\n" , r . options . TemplateID , formedURL )
2021-02-07 03:34:07 +05:30
gologger . Print ( ) . Msgf ( "%s" , string ( redirectedResponse ) )
2020-12-26 14:55:15 +05:30
}
// if nuclei-project is enabled store the response if not previously done
2020-12-30 21:14:04 +05:30
if r . options . ProjectFile != nil && ! fromcache {
err := r . options . ProjectFile . Set ( dumpedRequest , resp , data )
2020-12-26 14:55:15 +05:30
if err != nil {
2021-01-01 19:36:21 +05:30
return errors . Wrap ( err , "could not store in project file" )
2020-12-26 14:55:15 +05:30
}
}
2021-02-25 12:37:47 +05:30
matchedURL := reqURL
if request . rawRequest != nil && request . rawRequest . FullURL != "" {
2020-12-29 11:42:46 +05:30
matchedURL = request . rawRequest . FullURL
}
if request . request != nil {
matchedURL = request . request . URL . String ( )
}
2021-03-09 14:45:04 +05:30
finalEvent := make ( output . InternalEvent )
2021-01-16 12:06:27 +05:30
outputEvent := r . responseToDSLMap ( resp , reqURL , matchedURL , tostring . UnsafeToString ( dumpedRequest ) , tostring . UnsafeToString ( dumpedResponse ) , tostring . UnsafeToString ( data ) , headersToString ( resp . Header ) , duration , request . meta )
2021-04-23 22:20:09 +10:00
if i := strings . LastIndex ( hostname , ":" ) ; i != - 1 {
hostname = hostname [ : i ]
}
2021-01-16 12:06:27 +05:30
outputEvent [ "ip" ] = httpclientpool . Dialer . GetDialedIP ( hostname )
2021-02-07 03:34:07 +05:30
outputEvent [ "redirect-chain" ] = tostring . UnsafeToString ( redirectedResponse )
2021-01-16 14:10:24 +05:30
for k , v := range previous {
2021-03-09 14:45:04 +05:30
finalEvent [ k ] = v
}
for k , v := range outputEvent {
finalEvent [ k ] = v
}
// Add to history the current request number metadata if asked by the user.
if r . ReqCondition {
for k , v := range outputEvent {
key := fmt . Sprintf ( "%s_%d" , k , requestCount )
previous [ key ] = v
finalEvent [ key ] = v
}
2021-01-16 14:10:24 +05:30
}
2020-12-29 01:30:07 +05:30
2021-01-16 12:06:27 +05:30
event := & output . InternalWrappedEvent { InternalEvent : outputEvent }
2021-04-18 16:10:10 +05:30
if ! interactsh . HasMatchers ( r . CompiledOperators ) {
if r . CompiledOperators != nil {
var ok bool
event . OperatorsResult , ok = r . CompiledOperators . Execute ( finalEvent , r . Match , r . Extract )
if ok && event . OperatorsResult != nil {
event . OperatorsResult . PayloadValues = request . meta
event . Results = r . MakeResultEvent ( event )
}
event . InternalEvent = outputEvent
2020-12-26 14:55:15 +05:30
}
}
2021-01-13 12:18:56 +05:30
callback ( event )
2021-01-01 19:36:21 +05:30
return nil
2020-12-29 01:30:07 +05:30
}
2020-12-26 14:55:15 +05:30
2020-12-29 01:30:07 +05:30
// setCustomHeaders sets the custom headers for generated request
2021-02-04 22:00:09 +05:30
func ( r * Request ) setCustomHeaders ( req * generatedRequest ) {
for k , v := range r . customHeaders {
if req . rawRequest != nil {
req . rawRequest . Headers [ k ] = v
2020-12-29 01:30:07 +05:30
} else {
2021-05-04 14:36:04 +02:00
kk , vv := strings . TrimSpace ( k ) , strings . TrimSpace ( v )
req . request . Header . Set ( kk , vv )
if kk == "Host" {
req . request . Host = vv
}
2020-12-29 01:30:07 +05:30
}
2020-12-26 14:55:15 +05:30
}
}