mirror of
https://github.com/projectdiscovery/nuclei.git
synced 2025-12-19 08:26:15 +00:00
Added offline http response processing feature
This commit is contained in:
parent
b485c9407f
commit
ec86542b87
@ -73,6 +73,7 @@ based on templates offering massive extensibility and ease of use.`)
|
|||||||
set.StringVar(&options.ProjectPath, "project-path", "", "Use a user defined project folder, temporary folder is used if not specified but enabled")
|
set.StringVar(&options.ProjectPath, "project-path", "", "Use a user defined project folder, temporary folder is used if not specified but enabled")
|
||||||
set.BoolVarP(&options.NoMeta, "no-meta", "nm", false, "Don't display metadata for the matches")
|
set.BoolVarP(&options.NoMeta, "no-meta", "nm", false, "Don't display metadata for the matches")
|
||||||
set.BoolVarP(&options.TemplatesVersion, "templates-version", "tv", false, "Shows the installed nuclei-templates version")
|
set.BoolVarP(&options.TemplatesVersion, "templates-version", "tv", false, "Shows the installed nuclei-templates version")
|
||||||
|
set.BoolVar(&options.OfflineHTTP, "offline-http", false, "Enable Offline HTTP response processing mode")
|
||||||
set.StringVarP(&options.BurpCollaboratorBiid, "burp-collaborator-biid", "biid", "", "Burp Collaborator BIID")
|
set.StringVarP(&options.BurpCollaboratorBiid, "burp-collaborator-biid", "biid", "", "Burp Collaborator BIID")
|
||||||
set.StringSliceVar(&options.Tags, "tags", []string{}, "Tags to execute templates for")
|
set.StringSliceVar(&options.Tags, "tags", []string{}, "Tags to execute templates for")
|
||||||
_ = set.Parse()
|
_ = set.Parse()
|
||||||
|
|||||||
@ -43,7 +43,7 @@ func (r *Runner) getParsedTemplatesFor(templatePaths []string, severities []stri
|
|||||||
|
|
||||||
// parseTemplateFile returns the parsed template file
|
// parseTemplateFile returns the parsed template file
|
||||||
func (r *Runner) parseTemplateFile(file string) (*templates.Template, error) {
|
func (r *Runner) parseTemplateFile(file string) (*templates.Template, error) {
|
||||||
executerOpts := &protocols.ExecuterOptions{
|
executerOpts := protocols.ExecuterOptions{
|
||||||
Output: r.output,
|
Output: r.output,
|
||||||
Options: r.options,
|
Options: r.options,
|
||||||
Progress: r.progress,
|
Progress: r.progress,
|
||||||
|
|||||||
@ -23,7 +23,7 @@ func TestHTTPRequestsCluster(t *testing.T) {
|
|||||||
protocolinit.Init(&types.Options{})
|
protocolinit.Init(&types.Options{})
|
||||||
list := make(map[string]*templates.Template)
|
list := make(map[string]*templates.Template)
|
||||||
for _, template := range templatesList {
|
for _, template := range templatesList {
|
||||||
executerOpts := &protocols.ExecuterOptions{
|
executerOpts := protocols.ExecuterOptions{
|
||||||
Output: &mockOutput{},
|
Output: &mockOutput{},
|
||||||
Options: &types.Options{},
|
Options: &types.Options{},
|
||||||
Progress: nil,
|
Progress: nil,
|
||||||
|
|||||||
@ -1 +0,0 @@
|
|||||||
package http
|
|
||||||
106
v2/pkg/protocols/offlinehttp/find.go
Normal file
106
v2/pkg/protocols/offlinehttp/find.go
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
package offlinehttp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/karrick/godirwalk"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
// getInputPaths parses the specified input paths and returns a compiled
|
||||||
|
// list of finished absolute paths to the files evaluating any allowlist, denylist,
|
||||||
|
// glob, file or folders, etc.
|
||||||
|
func (r *Request) getInputPaths(target string, callback func(string)) error {
|
||||||
|
processed := make(map[string]struct{})
|
||||||
|
|
||||||
|
// Template input includes a wildcard
|
||||||
|
if strings.Contains(target, "*") {
|
||||||
|
err := r.findGlobPathMatches(target, processed, callback)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "could not find glob matches")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Template input is either a file or a directory
|
||||||
|
file, err := r.findFileMatches(target, processed, callback)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "could not find file")
|
||||||
|
}
|
||||||
|
if file {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively walk down the Templates directory and run all
|
||||||
|
// the template file checks
|
||||||
|
err = r.findDirectoryMatches(target, processed, callback)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "could not find directory matches")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findGlobPathMatches returns the matched files from a glob path
|
||||||
|
func (r *Request) findGlobPathMatches(absPath string, processed map[string]struct{}, callback func(string)) error {
|
||||||
|
matches, err := filepath.Glob(absPath)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Errorf("wildcard found, but unable to glob: %s\n", err)
|
||||||
|
}
|
||||||
|
for _, match := range matches {
|
||||||
|
if path.Ext(match) != ".txt" {
|
||||||
|
continue // only process .txt files
|
||||||
|
}
|
||||||
|
if _, ok := processed[match]; !ok {
|
||||||
|
processed[match] = struct{}{}
|
||||||
|
callback(match)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findFileMatches finds if a path is an absolute file. If the path
|
||||||
|
// is a file, it returns true otherwise false with no errors.
|
||||||
|
func (r *Request) findFileMatches(absPath string, processed map[string]struct{}, callback func(string)) (bool, error) {
|
||||||
|
info, err := os.Stat(absPath)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if !info.Mode().IsRegular() {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
if path.Ext(absPath) != ".txt" {
|
||||||
|
return false, nil // only process .txt files
|
||||||
|
}
|
||||||
|
if _, ok := processed[absPath]; !ok {
|
||||||
|
processed[absPath] = struct{}{}
|
||||||
|
callback(absPath)
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findDirectoryMatches finds matches for templates from a directory
|
||||||
|
func (r *Request) findDirectoryMatches(absPath string, processed map[string]struct{}, callback func(string)) error {
|
||||||
|
err := godirwalk.Walk(absPath, &godirwalk.Options{
|
||||||
|
Unsorted: true,
|
||||||
|
ErrorCallback: func(fsPath string, err error) godirwalk.ErrorAction {
|
||||||
|
return godirwalk.SkipNode
|
||||||
|
},
|
||||||
|
Callback: func(p string, d *godirwalk.Dirent) error {
|
||||||
|
if d.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if path.Ext(p) != ".txt" {
|
||||||
|
return nil // only process .txt files
|
||||||
|
}
|
||||||
|
if _, ok := processed[p]; !ok {
|
||||||
|
callback(p)
|
||||||
|
processed[p] = struct{}{}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
59
v2/pkg/protocols/offlinehttp/find_test.go
Normal file
59
v2/pkg/protocols/offlinehttp/find_test.go
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
package offlinehttp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFindResponses(t *testing.T) {
|
||||||
|
options := testutils.DefaultOptions
|
||||||
|
|
||||||
|
testutils.Init(options)
|
||||||
|
templateID := "testing-offline"
|
||||||
|
request := &Request{}
|
||||||
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
|
ID: templateID,
|
||||||
|
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
||||||
|
})
|
||||||
|
executerOpts.Operators = []*operators.Operators{&operators.Operators{}}
|
||||||
|
err := request.Compile(executerOpts)
|
||||||
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|
||||||
|
tempDir, err := ioutil.TempDir("", "test-*")
|
||||||
|
require.Nil(t, err, "could not create temporary directory")
|
||||||
|
defer os.RemoveAll(tempDir)
|
||||||
|
|
||||||
|
files := map[string]string{
|
||||||
|
"test.go": "TEST",
|
||||||
|
"config.txt": "TEST",
|
||||||
|
"final.txt": "TEST",
|
||||||
|
"image_ignored.png": "TEST",
|
||||||
|
"test.txt": "TEST",
|
||||||
|
}
|
||||||
|
for k, v := range files {
|
||||||
|
err = ioutil.WriteFile(path.Join(tempDir, k), []byte(v), 0777)
|
||||||
|
require.Nil(t, err, "could not write temporary file")
|
||||||
|
}
|
||||||
|
expected := []string{"config.txt", "final.txt", "test.txt"}
|
||||||
|
got := []string{}
|
||||||
|
err = request.getInputPaths(tempDir+"/*", func(item string) {
|
||||||
|
base := path.Base(item)
|
||||||
|
got = append(got, base)
|
||||||
|
})
|
||||||
|
require.Nil(t, err, "could not get input paths for glob")
|
||||||
|
require.ElementsMatch(t, expected, got, "could not get correct file matches for glob")
|
||||||
|
|
||||||
|
got = []string{}
|
||||||
|
err = request.getInputPaths(tempDir, func(item string) {
|
||||||
|
base := path.Base(item)
|
||||||
|
got = append(got, base)
|
||||||
|
})
|
||||||
|
require.Nil(t, err, "could not get input paths for directory")
|
||||||
|
require.ElementsMatch(t, expected, got, "could not get correct file matches for directory")
|
||||||
|
}
|
||||||
35
v2/pkg/protocols/offlinehttp/offlinehttp.go
Normal file
35
v2/pkg/protocols/offlinehttp/offlinehttp.go
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
package offlinehttp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Request is a offline http response processing request
|
||||||
|
type Request struct {
|
||||||
|
options *protocols.ExecuterOptions
|
||||||
|
compiledOperators []*operators.Operators
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetID returns the unique ID of the request if any.
|
||||||
|
func (r *Request) GetID() string {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compile compiles the protocol request for further execution.
|
||||||
|
func (r *Request) Compile(options *protocols.ExecuterOptions) error {
|
||||||
|
for _, operator := range options.Operators {
|
||||||
|
if err := operator.Compile(); err != nil {
|
||||||
|
return errors.Wrap(err, "could not compile operators")
|
||||||
|
}
|
||||||
|
r.compiledOperators = append(r.compiledOperators, operator)
|
||||||
|
}
|
||||||
|
r.options = options
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Requests returns the total number of requests the YAML rule will perform
|
||||||
|
func (r *Request) Requests() int {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
151
v2/pkg/protocols/offlinehttp/operators.go
Normal file
151
v2/pkg/protocols/offlinehttp/operators.go
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
package offlinehttp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Match matches a generic data response again a given matcher
|
||||||
|
func (r *Request) Match(data map[string]interface{}, matcher *matchers.Matcher) bool {
|
||||||
|
item, ok := getMatchPart(matcher.Part, data)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
switch matcher.GetType() {
|
||||||
|
case matchers.StatusMatcher:
|
||||||
|
statusCode, ok := data["status_code"]
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return matcher.Result(matcher.MatchStatusCode(statusCode.(int)))
|
||||||
|
case matchers.SizeMatcher:
|
||||||
|
return matcher.Result(matcher.MatchSize(len(item)))
|
||||||
|
case matchers.WordsMatcher:
|
||||||
|
return matcher.Result(matcher.MatchWords(item))
|
||||||
|
case matchers.RegexMatcher:
|
||||||
|
return matcher.Result(matcher.MatchRegex(item))
|
||||||
|
case matchers.BinaryMatcher:
|
||||||
|
return matcher.Result(matcher.MatchBinary(item))
|
||||||
|
case matchers.DSLMatcher:
|
||||||
|
return matcher.Result(matcher.MatchDSL(data))
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract performs extracting operation for a extractor on model and returns true or false.
|
||||||
|
func (r *Request) Extract(data map[string]interface{}, extractor *extractors.Extractor) map[string]struct{} {
|
||||||
|
item, ok := getMatchPart(extractor.Part, data)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch extractor.GetType() {
|
||||||
|
case extractors.RegexExtractor:
|
||||||
|
return extractor.ExtractRegex(item)
|
||||||
|
case extractors.KValExtractor:
|
||||||
|
return extractor.ExtractKval(data)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getMatchPart returns the match part honoring "all" matchers + others.
|
||||||
|
func getMatchPart(part string, data output.InternalEvent) (string, bool) {
|
||||||
|
if part == "header" {
|
||||||
|
part = "all_headers"
|
||||||
|
}
|
||||||
|
var itemStr string
|
||||||
|
|
||||||
|
if part == "all" {
|
||||||
|
builder := &strings.Builder{}
|
||||||
|
builder.WriteString(types.ToString(data["body"]))
|
||||||
|
builder.WriteString(types.ToString(data["all_headers"]))
|
||||||
|
itemStr = builder.String()
|
||||||
|
} else {
|
||||||
|
item, ok := data[part]
|
||||||
|
if !ok {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
itemStr = types.ToString(item)
|
||||||
|
}
|
||||||
|
return itemStr, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// responseToDSLMap converts a HTTP response to a map for use in DSL matching
|
||||||
|
func (r *Request) responseToDSLMap(resp *http.Response, host, matched, rawReq, rawResp, body, headers string, duration time.Duration, extra map[string]interface{}) map[string]interface{} {
|
||||||
|
data := make(map[string]interface{}, len(extra)+8+len(resp.Header)+len(resp.Cookies()))
|
||||||
|
for k, v := range extra {
|
||||||
|
data[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
data["host"] = host
|
||||||
|
data["matched"] = matched
|
||||||
|
data["request"] = rawReq
|
||||||
|
data["response"] = rawResp
|
||||||
|
data["content_length"] = resp.ContentLength
|
||||||
|
data["status_code"] = resp.StatusCode
|
||||||
|
data["body"] = body
|
||||||
|
for _, cookie := range resp.Cookies() {
|
||||||
|
data[strings.ToLower(cookie.Name)] = cookie.Value
|
||||||
|
}
|
||||||
|
for k, v := range resp.Header {
|
||||||
|
k = strings.ToLower(strings.TrimSpace(k))
|
||||||
|
data[k] = strings.Join(v, " ")
|
||||||
|
}
|
||||||
|
data["all_headers"] = headers
|
||||||
|
data["duration"] = duration.Seconds()
|
||||||
|
data["template-id"] = r.options.TemplateID
|
||||||
|
data["template-info"] = r.options.TemplateInfo
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// MakeResultEvent creates a result event from internal wrapped event
|
||||||
|
func (r *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*output.ResultEvent {
|
||||||
|
if len(wrapped.OperatorsResult.DynamicValues) > 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
results := make([]*output.ResultEvent, 0, len(wrapped.OperatorsResult.Matches)+1)
|
||||||
|
|
||||||
|
// If we have multiple matchers with names, write each of them separately.
|
||||||
|
if len(wrapped.OperatorsResult.Matches) > 0 {
|
||||||
|
for k := range wrapped.OperatorsResult.Matches {
|
||||||
|
data := r.makeResultEventItem(wrapped)
|
||||||
|
data.MatcherName = k
|
||||||
|
results = append(results, data)
|
||||||
|
}
|
||||||
|
} else if len(wrapped.OperatorsResult.Extracts) > 0 {
|
||||||
|
for k, v := range wrapped.OperatorsResult.Extracts {
|
||||||
|
data := r.makeResultEventItem(wrapped)
|
||||||
|
data.ExtractedResults = v
|
||||||
|
data.ExtractorName = k
|
||||||
|
results = append(results, data)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
data := r.makeResultEventItem(wrapped)
|
||||||
|
results = append(results, data)
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent {
|
||||||
|
data := &output.ResultEvent{
|
||||||
|
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
||||||
|
Info: wrapped.InternalEvent["template-info"].(map[string]interface{}),
|
||||||
|
Type: "http",
|
||||||
|
Host: types.ToString(wrapped.InternalEvent["host"]),
|
||||||
|
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
||||||
|
Metadata: wrapped.OperatorsResult.PayloadValues,
|
||||||
|
ExtractedResults: wrapped.OperatorsResult.OutputExtracts,
|
||||||
|
IP: types.ToString(wrapped.InternalEvent["ip"]),
|
||||||
|
}
|
||||||
|
if r.options.Options.JSONRequests {
|
||||||
|
data.Request = types.ToString(wrapped.InternalEvent["request"])
|
||||||
|
data.Response = types.ToString(wrapped.InternalEvent["raw"])
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
}
|
||||||
290
v2/pkg/protocols/offlinehttp/operators_test.go
Normal file
290
v2/pkg/protocols/offlinehttp/operators_test.go
Normal file
@ -0,0 +1,290 @@
|
|||||||
|
package offlinehttp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestResponseToDSLMap(t *testing.T) {
|
||||||
|
options := testutils.DefaultOptions
|
||||||
|
|
||||||
|
testutils.Init(options)
|
||||||
|
templateID := "testing-http"
|
||||||
|
request := &Request{}
|
||||||
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
|
ID: templateID,
|
||||||
|
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
||||||
|
})
|
||||||
|
executerOpts.Operators = []*operators.Operators{&operators.Operators{}}
|
||||||
|
err := request.Compile(executerOpts)
|
||||||
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|
||||||
|
resp := &http.Response{}
|
||||||
|
resp.Header = make(http.Header)
|
||||||
|
resp.Header.Set("Test", "Test-Response")
|
||||||
|
host := "http://example.com/test/"
|
||||||
|
matched := "http://example.com/test/?test=1"
|
||||||
|
|
||||||
|
event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{})
|
||||||
|
require.Len(t, event, 12, "could not get correct number of items in dsl map")
|
||||||
|
require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp")
|
||||||
|
require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHTTPOperatorMatch(t *testing.T) {
|
||||||
|
options := testutils.DefaultOptions
|
||||||
|
|
||||||
|
testutils.Init(options)
|
||||||
|
templateID := "testing-http"
|
||||||
|
request := &Request{}
|
||||||
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
|
ID: templateID,
|
||||||
|
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
||||||
|
})
|
||||||
|
executerOpts.Operators = []*operators.Operators{&operators.Operators{}}
|
||||||
|
err := request.Compile(executerOpts)
|
||||||
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|
||||||
|
resp := &http.Response{}
|
||||||
|
resp.Header = make(http.Header)
|
||||||
|
resp.Header.Set("Test", "Test-Response")
|
||||||
|
host := "http://example.com/test/"
|
||||||
|
matched := "http://example.com/test/?test=1"
|
||||||
|
|
||||||
|
event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{})
|
||||||
|
require.Len(t, event, 12, "could not get correct number of items in dsl map")
|
||||||
|
require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp")
|
||||||
|
require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header")
|
||||||
|
|
||||||
|
t.Run("valid", func(t *testing.T) {
|
||||||
|
matcher := &matchers.Matcher{
|
||||||
|
Part: "body",
|
||||||
|
Type: "word",
|
||||||
|
Words: []string{"1.1.1.1"},
|
||||||
|
}
|
||||||
|
err = matcher.CompileMatchers()
|
||||||
|
require.Nil(t, err, "could not compile matcher")
|
||||||
|
|
||||||
|
matched := request.Match(event, matcher)
|
||||||
|
require.True(t, matched, "could not match valid response")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("negative", func(t *testing.T) {
|
||||||
|
matcher := &matchers.Matcher{
|
||||||
|
Part: "body",
|
||||||
|
Type: "word",
|
||||||
|
Negative: true,
|
||||||
|
Words: []string{"random"},
|
||||||
|
}
|
||||||
|
err := matcher.CompileMatchers()
|
||||||
|
require.Nil(t, err, "could not compile negative matcher")
|
||||||
|
|
||||||
|
matched := request.Match(event, matcher)
|
||||||
|
require.True(t, matched, "could not match valid negative response matcher")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("invalid", func(t *testing.T) {
|
||||||
|
matcher := &matchers.Matcher{
|
||||||
|
Part: "body",
|
||||||
|
Type: "word",
|
||||||
|
Words: []string{"random"},
|
||||||
|
}
|
||||||
|
err := matcher.CompileMatchers()
|
||||||
|
require.Nil(t, err, "could not compile matcher")
|
||||||
|
|
||||||
|
matched := request.Match(event, matcher)
|
||||||
|
require.False(t, matched, "could match invalid response matcher")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHTTPOperatorExtract(t *testing.T) {
|
||||||
|
options := testutils.DefaultOptions
|
||||||
|
|
||||||
|
testutils.Init(options)
|
||||||
|
templateID := "testing-http"
|
||||||
|
request := &Request{}
|
||||||
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
|
ID: templateID,
|
||||||
|
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
||||||
|
})
|
||||||
|
executerOpts.Operators = []*operators.Operators{&operators.Operators{}}
|
||||||
|
err := request.Compile(executerOpts)
|
||||||
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|
||||||
|
resp := &http.Response{}
|
||||||
|
resp.Header = make(http.Header)
|
||||||
|
resp.Header.Set("Test-Header", "Test-Response")
|
||||||
|
host := "http://example.com/test/"
|
||||||
|
matched := "http://example.com/test/?test=1"
|
||||||
|
|
||||||
|
event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{})
|
||||||
|
require.Len(t, event, 12, "could not get correct number of items in dsl map")
|
||||||
|
require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp")
|
||||||
|
require.Equal(t, "Test-Response", event["test-header"], "could not get correct resp for header")
|
||||||
|
|
||||||
|
t.Run("extract", func(t *testing.T) {
|
||||||
|
extractor := &extractors.Extractor{
|
||||||
|
Part: "body",
|
||||||
|
Type: "regex",
|
||||||
|
Regex: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"},
|
||||||
|
}
|
||||||
|
err = extractor.CompileExtractors()
|
||||||
|
require.Nil(t, err, "could not compile extractor")
|
||||||
|
|
||||||
|
data := request.Extract(event, extractor)
|
||||||
|
require.Greater(t, len(data), 0, "could not extractor valid response")
|
||||||
|
require.Equal(t, map[string]struct{}{"1.1.1.1": {}}, data, "could not extract correct data")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("kval", func(t *testing.T) {
|
||||||
|
extractor := &extractors.Extractor{
|
||||||
|
Type: "kval",
|
||||||
|
KVal: []string{"test-header"},
|
||||||
|
}
|
||||||
|
err = extractor.CompileExtractors()
|
||||||
|
require.Nil(t, err, "could not compile kval extractor")
|
||||||
|
|
||||||
|
data := request.Extract(event, extractor)
|
||||||
|
require.Greater(t, len(data), 0, "could not extractor kval valid response")
|
||||||
|
require.Equal(t, map[string]struct{}{"Test-Response": {}}, data, "could not extract correct kval data")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHTTPMakeResult(t *testing.T) {
|
||||||
|
options := testutils.DefaultOptions
|
||||||
|
|
||||||
|
testutils.Init(options)
|
||||||
|
templateID := "testing-http"
|
||||||
|
request := &Request{}
|
||||||
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
|
ID: templateID,
|
||||||
|
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
||||||
|
})
|
||||||
|
executerOpts.Operators = []*operators.Operators{&operators.Operators{
|
||||||
|
Matchers: []*matchers.Matcher{{
|
||||||
|
Name: "test",
|
||||||
|
Part: "body",
|
||||||
|
Type: "word",
|
||||||
|
Words: []string{"1.1.1.1"},
|
||||||
|
}},
|
||||||
|
Extractors: []*extractors.Extractor{{
|
||||||
|
Part: "body",
|
||||||
|
Type: "regex",
|
||||||
|
Regex: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"},
|
||||||
|
}},
|
||||||
|
}}
|
||||||
|
err := request.Compile(executerOpts)
|
||||||
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|
||||||
|
resp := &http.Response{}
|
||||||
|
resp.Header = make(http.Header)
|
||||||
|
resp.Header.Set("Test", "Test-Response")
|
||||||
|
host := "http://example.com/test/"
|
||||||
|
matched := "http://example.com/test/?test=1"
|
||||||
|
|
||||||
|
event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{})
|
||||||
|
require.Len(t, event, 12, "could not get correct number of items in dsl map")
|
||||||
|
require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp")
|
||||||
|
require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header")
|
||||||
|
|
||||||
|
event["ip"] = "192.169.1.1"
|
||||||
|
finalEvent := &output.InternalWrappedEvent{InternalEvent: event}
|
||||||
|
for _, operator := range request.compiledOperators {
|
||||||
|
result, ok := operator.Execute(event, request.Match, request.Extract)
|
||||||
|
if ok && result != nil {
|
||||||
|
finalEvent.OperatorsResult = result
|
||||||
|
finalEvent.Results = request.MakeResultEvent(finalEvent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
require.Equal(t, 1, len(finalEvent.Results), "could not get correct number of results")
|
||||||
|
require.Equal(t, "test", finalEvent.Results[0].MatcherName, "could not get correct matcher name of results")
|
||||||
|
require.Equal(t, "1.1.1.1", finalEvent.Results[0].ExtractedResults[0], "could not get correct extracted results")
|
||||||
|
}
|
||||||
|
|
||||||
|
const exampleRawRequest = `GET / HTTP/1.1
|
||||||
|
Host: example.com
|
||||||
|
Upgrade-Insecure-Requests: 1
|
||||||
|
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36
|
||||||
|
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9
|
||||||
|
Accept-Encoding: gzip, deflate
|
||||||
|
Accept-Language: en-US,en;q=0.9,hi;q=0.8
|
||||||
|
If-None-Match: "3147526947+gzip"
|
||||||
|
If-Modified-Since: Thu, 17 Oct 2019 07:18:26 GMT
|
||||||
|
Connection: close
|
||||||
|
|
||||||
|
`
|
||||||
|
|
||||||
|
const exampleRawResponse = exampleResponseHeader + exampleResponseBody
|
||||||
|
const exampleResponseHeader = `
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
Accept-Ranges: bytes
|
||||||
|
Age: 493322
|
||||||
|
Cache-Control: max-age=604800
|
||||||
|
Content-Type: text/html; charset=UTF-8
|
||||||
|
Date: Thu, 04 Feb 2021 12:15:51 GMT
|
||||||
|
Etag: "3147526947+ident"
|
||||||
|
Expires: Thu, 11 Feb 2021 12:15:51 GMT
|
||||||
|
Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT
|
||||||
|
Server: ECS (nyb/1D1C)
|
||||||
|
Vary: Accept-Encoding
|
||||||
|
X-Cache: HIT
|
||||||
|
Content-Length: 1256
|
||||||
|
Connection: close
|
||||||
|
`
|
||||||
|
|
||||||
|
const exampleResponseBody = `
|
||||||
|
<!doctype html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Example Domain</title>
|
||||||
|
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<style type="text/css">
|
||||||
|
body {
|
||||||
|
background-color: #f0f0f2;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
font-family: -apple-system, system-ui, BlinkMacSystemFont, "Segoe UI", "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||||
|
|
||||||
|
}
|
||||||
|
div {
|
||||||
|
width: 600px;
|
||||||
|
margin: 5em auto;
|
||||||
|
padding: 2em;
|
||||||
|
background-color: #fdfdff;
|
||||||
|
border-radius: 0.5em;
|
||||||
|
box-shadow: 2px 3px 7px 2px rgba(0,0,0,0.02);
|
||||||
|
}
|
||||||
|
a:link, a:visited {
|
||||||
|
color: #38488f;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
@media (max-width: 700px) {
|
||||||
|
div {
|
||||||
|
margin: 0 auto;
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<a>1.1.1.1</a>
|
||||||
|
<body>
|
||||||
|
<div>
|
||||||
|
<h1>Example Domain</h1>
|
||||||
|
<p>This domain is for use in illustrative examples in documents. You may use this
|
||||||
|
domain in literature without prior coordination or asking for permission.</p>
|
||||||
|
<p><a href="https://www.iana.org/domains/example">More information...</a></p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`
|
||||||
18
v2/pkg/protocols/offlinehttp/read_response.go
Normal file
18
v2/pkg/protocols/offlinehttp/read_response.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
package offlinehttp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// readResponseFromString reads a raw http response from a string.
|
||||||
|
func readResponseFromString(data string) (*http.Response, error) {
|
||||||
|
var final string
|
||||||
|
if strings.HasPrefix(data, "HTTP/") {
|
||||||
|
final = data
|
||||||
|
} else {
|
||||||
|
final = data[strings.LastIndex(data, "HTTP/"):] // choose last http/ in case of it being later.
|
||||||
|
}
|
||||||
|
return http.ReadResponse(bufio.NewReader(strings.NewReader(final)), nil)
|
||||||
|
}
|
||||||
85
v2/pkg/protocols/offlinehttp/read_response_test.go
Normal file
85
v2/pkg/protocols/offlinehttp/read_response_test.go
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
package offlinehttp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReadResponseFromString(t *testing.T) {
|
||||||
|
expectedBody := `<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Firing Range</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Version 0.48</h1>
|
||||||
|
<h1>What is the Firing Range?</h1>
|
||||||
|
<p>
|
||||||
|
</body>
|
||||||
|
</body>
|
||||||
|
</html>`
|
||||||
|
|
||||||
|
t.Run("response", func(t *testing.T) {
|
||||||
|
data := `HTTP/1.1 200 OK
|
||||||
|
Age: 0
|
||||||
|
Cache-Control: public, max-age=600
|
||||||
|
Content-Type: text/html
|
||||||
|
Server: Google Frontend
|
||||||
|
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Firing Range</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Version 0.48</h1>
|
||||||
|
<h1>What is the Firing Range?</h1>
|
||||||
|
<p>
|
||||||
|
</body>
|
||||||
|
</body>
|
||||||
|
</html>`
|
||||||
|
resp, err := readResponseFromString(data)
|
||||||
|
require.Nil(t, err, "could not read response from string")
|
||||||
|
|
||||||
|
respData, err := ioutil.ReadAll(resp.Body)
|
||||||
|
require.Nil(t, err, "could not read response body")
|
||||||
|
require.Equal(t, expectedBody, string(respData), "could not get correct parsed body")
|
||||||
|
require.Equal(t, "Google Frontend", resp.Header.Get("Server"), "could not get correct headers")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("request-response", func(t *testing.T) {
|
||||||
|
data := `GET http://public-firing-range.appspot.com/ HTTP/1.1
|
||||||
|
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9
|
||||||
|
Accept-Encoding: gzip, deflate
|
||||||
|
Upgrade-Insecure-Requests: 1
|
||||||
|
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36
|
||||||
|
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
Age: 0
|
||||||
|
Cache-Control: public, max-age=600
|
||||||
|
Content-Type: text/html
|
||||||
|
Server: Google Frontend
|
||||||
|
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Firing Range</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Version 0.48</h1>
|
||||||
|
<h1>What is the Firing Range?</h1>
|
||||||
|
<p>
|
||||||
|
</body>
|
||||||
|
</body>
|
||||||
|
</html>`
|
||||||
|
resp, err := readResponseFromString(data)
|
||||||
|
require.Nil(t, err, "could not read response from string")
|
||||||
|
|
||||||
|
respData, err := ioutil.ReadAll(resp.Body)
|
||||||
|
require.Nil(t, err, "could not read response body")
|
||||||
|
require.Equal(t, expectedBody, string(respData), "could not get correct parsed body")
|
||||||
|
require.Equal(t, "Google Frontend", resp.Header.Get("Server"), "could not get correct headers")
|
||||||
|
})
|
||||||
|
}
|
||||||
128
v2/pkg/protocols/offlinehttp/request.go
Normal file
128
v2/pkg/protocols/offlinehttp/request.go
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
package offlinehttp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httputil"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
"github.com/projectdiscovery/gologger"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/tostring"
|
||||||
|
"github.com/remeh/sizedwaitgroup"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ protocols.Request = &Request{}
|
||||||
|
|
||||||
|
const maxSize = 5 * 1024 * 1024
|
||||||
|
|
||||||
|
// ExecuteWithResults executes the protocol requests and returns results instead of writing them.
|
||||||
|
func (r *Request) ExecuteWithResults(input string, metadata, previous output.InternalEvent, callback protocols.OutputEventCallback) error {
|
||||||
|
wg := sizedwaitgroup.New(r.options.Options.RateLimit)
|
||||||
|
|
||||||
|
err := r.getInputPaths(input, func(data string) {
|
||||||
|
wg.Add()
|
||||||
|
|
||||||
|
go func(data string) {
|
||||||
|
defer wg.Done()
|
||||||
|
|
||||||
|
file, err := os.Open(data)
|
||||||
|
if err != nil {
|
||||||
|
gologger.Error().Msgf("Could not open file path %s: %s\n", data, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
stat, err := file.Stat()
|
||||||
|
if err != nil {
|
||||||
|
gologger.Error().Msgf("Could not stat file path %s: %s\n", data, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if stat.Size() >= int64(maxSize) {
|
||||||
|
gologger.Verbose().Msgf("Could not process path %s: exceeded max size\n", data)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer, err := ioutil.ReadAll(file)
|
||||||
|
if err != nil {
|
||||||
|
gologger.Error().Msgf("Could not read file path %s: %s\n", data, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
dataStr := tostring.UnsafeToString(buffer)
|
||||||
|
|
||||||
|
resp, err := readResponseFromString(dataStr)
|
||||||
|
if err != nil {
|
||||||
|
gologger.Error().Msgf("Could not read raw response %s: %s\n", data, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.options.Options.Debug || r.options.Options.DebugRequests {
|
||||||
|
gologger.Info().Msgf("[%s] Dumped offline-http request for %s", r.options.TemplateID, data)
|
||||||
|
gologger.Print().Msgf("%s", dataStr)
|
||||||
|
}
|
||||||
|
gologger.Verbose().Msgf("[%s] Sent OFFLINE-HTTP request to %s", r.options.TemplateID, data)
|
||||||
|
|
||||||
|
dumpedResponse, err := httputil.DumpResponse(resp, true)
|
||||||
|
if err != nil {
|
||||||
|
gologger.Error().Msgf("Could not dump raw http response %s: %s\n", data, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
gologger.Error().Msgf("Could not read raw http response body %s: %s\n", data, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
outputEvent := r.responseToDSLMap(resp, data, data, data, tostring.UnsafeToString(dumpedResponse), tostring.UnsafeToString(body), headersToString(resp.Header), 0, nil)
|
||||||
|
outputEvent["ip"] = ""
|
||||||
|
for k, v := range previous {
|
||||||
|
outputEvent[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, operator := range r.compiledOperators {
|
||||||
|
event := &output.InternalWrappedEvent{InternalEvent: outputEvent}
|
||||||
|
var ok bool
|
||||||
|
|
||||||
|
event.OperatorsResult, ok = operator.Execute(outputEvent, r.Match, r.Extract)
|
||||||
|
if ok && event.OperatorsResult != nil {
|
||||||
|
event.Results = r.MakeResultEvent(event)
|
||||||
|
}
|
||||||
|
callback(event)
|
||||||
|
}
|
||||||
|
}(data)
|
||||||
|
})
|
||||||
|
wg.Wait()
|
||||||
|
if err != nil {
|
||||||
|
r.options.Output.Request(r.options.TemplateID, input, "file", err)
|
||||||
|
r.options.Progress.DecrementRequests(1)
|
||||||
|
return errors.Wrap(err, "could not send file request")
|
||||||
|
}
|
||||||
|
r.options.Progress.IncrementRequests()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// headersToString converts http headers to string
|
||||||
|
func headersToString(headers http.Header) string {
|
||||||
|
builder := &strings.Builder{}
|
||||||
|
|
||||||
|
for header, values := range headers {
|
||||||
|
builder.WriteString(header)
|
||||||
|
builder.WriteString(": ")
|
||||||
|
|
||||||
|
for i, value := range values {
|
||||||
|
builder.WriteString(value)
|
||||||
|
|
||||||
|
if i != len(values)-1 {
|
||||||
|
builder.WriteRune('\n')
|
||||||
|
builder.WriteString(header)
|
||||||
|
builder.WriteString(": ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
builder.WriteRune('\n')
|
||||||
|
}
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
@ -3,6 +3,7 @@ package protocols
|
|||||||
import (
|
import (
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/progress"
|
"github.com/projectdiscovery/nuclei/v2/internal/progress"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalogue"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalogue"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
@ -43,6 +44,8 @@ type ExecuterOptions struct {
|
|||||||
Catalogue *catalogue.Catalogue
|
Catalogue *catalogue.Catalogue
|
||||||
// ProjectFile is the project file for nuclei
|
// ProjectFile is the project file for nuclei
|
||||||
ProjectFile *projectfile.ProjectFile
|
ProjectFile *projectfile.ProjectFile
|
||||||
|
|
||||||
|
Operators []*operators.Operators // only used by offlinehttp module
|
||||||
}
|
}
|
||||||
|
|
||||||
// Request is an interface implemented any protocol based request generator.
|
// Request is an interface implemented any protocol based request generator.
|
||||||
|
|||||||
@ -6,15 +6,17 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/executer"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/executer"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/offlinehttp"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/workflows"
|
"github.com/projectdiscovery/nuclei/v2/pkg/workflows"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Parse parses a yaml request template file
|
// Parse parses a yaml request template file
|
||||||
func Parse(filePath string, options *protocols.ExecuterOptions) (*Template, error) {
|
func Parse(filePath string, options protocols.ExecuterOptions) (*Template, error) {
|
||||||
template := &Template{}
|
template := &Template{}
|
||||||
|
|
||||||
f, err := os.Open(filePath)
|
f, err := os.Open(filePath)
|
||||||
@ -57,7 +59,7 @@ func Parse(filePath string, options *protocols.ExecuterOptions) (*Template, erro
|
|||||||
// Compile the workflow request
|
// Compile the workflow request
|
||||||
if len(template.Workflows) > 0 {
|
if len(template.Workflows) > 0 {
|
||||||
compiled := &template.Workflow
|
compiled := &template.Workflow
|
||||||
if err := template.compileWorkflow(options, compiled); err != nil {
|
if err := template.compileWorkflow(&options, compiled); err != nil {
|
||||||
return nil, errors.Wrap(err, "could not compile workflow")
|
return nil, errors.Wrap(err, "could not compile workflow")
|
||||||
}
|
}
|
||||||
template.CompiledWorkflow = compiled
|
template.CompiledWorkflow = compiled
|
||||||
@ -69,25 +71,35 @@ func Parse(filePath string, options *protocols.ExecuterOptions) (*Template, erro
|
|||||||
for _, req := range template.RequestsDNS {
|
for _, req := range template.RequestsDNS {
|
||||||
requests = append(requests, req)
|
requests = append(requests, req)
|
||||||
}
|
}
|
||||||
template.Executer = executer.NewExecuter(requests, options)
|
template.Executer = executer.NewExecuter(requests, &options)
|
||||||
}
|
}
|
||||||
if len(template.RequestsHTTP) > 0 {
|
if len(template.RequestsHTTP) > 0 {
|
||||||
for _, req := range template.RequestsHTTP {
|
if options.Options.OfflineHTTP {
|
||||||
requests = append(requests, req)
|
operators := []*operators.Operators{}
|
||||||
|
|
||||||
|
for _, req := range template.RequestsHTTP {
|
||||||
|
operators = append(operators, &req.Operators)
|
||||||
|
}
|
||||||
|
options.Operators = operators
|
||||||
|
template.Executer = executer.NewExecuter([]protocols.Request{&offlinehttp.Request{}}, &options)
|
||||||
|
} else {
|
||||||
|
for _, req := range template.RequestsHTTP {
|
||||||
|
requests = append(requests, req)
|
||||||
|
}
|
||||||
|
template.Executer = executer.NewExecuter(requests, &options)
|
||||||
}
|
}
|
||||||
template.Executer = executer.NewExecuter(requests, options)
|
|
||||||
}
|
}
|
||||||
if len(template.RequestsFile) > 0 {
|
if len(template.RequestsFile) > 0 {
|
||||||
for _, req := range template.RequestsFile {
|
for _, req := range template.RequestsFile {
|
||||||
requests = append(requests, req)
|
requests = append(requests, req)
|
||||||
}
|
}
|
||||||
template.Executer = executer.NewExecuter(requests, options)
|
template.Executer = executer.NewExecuter(requests, &options)
|
||||||
}
|
}
|
||||||
if len(template.RequestsNetwork) > 0 {
|
if len(template.RequestsNetwork) > 0 {
|
||||||
for _, req := range template.RequestsNetwork {
|
for _, req := range template.RequestsNetwork {
|
||||||
requests = append(requests, req)
|
requests = append(requests, req)
|
||||||
}
|
}
|
||||||
template.Executer = executer.NewExecuter(requests, options)
|
template.Executer = executer.NewExecuter(requests, &options)
|
||||||
}
|
}
|
||||||
if template.Executer != nil {
|
if template.Executer != nil {
|
||||||
err := template.Executer.Compile()
|
err := template.Executer.Compile()
|
||||||
@ -136,7 +148,7 @@ func (t *Template) parseWorkflowTemplate(workflow *workflows.WorkflowTemplate, o
|
|||||||
return errors.Wrap(err, "could not get workflow template")
|
return errors.Wrap(err, "could not get workflow template")
|
||||||
}
|
}
|
||||||
for _, path := range paths {
|
for _, path := range paths {
|
||||||
opts := &protocols.ExecuterOptions{
|
opts := protocols.ExecuterOptions{
|
||||||
Output: options.Output,
|
Output: options.Output,
|
||||||
Options: options.Options,
|
Options: options.Options,
|
||||||
Progress: options.Progress,
|
Progress: options.Progress,
|
||||||
|
|||||||
@ -12,7 +12,7 @@ import (
|
|||||||
func ToString(data interface{}) string {
|
func ToString(data interface{}) string {
|
||||||
switch s := data.(type) {
|
switch s := data.(type) {
|
||||||
case nil:
|
case nil:
|
||||||
return "<nil>"
|
return ""
|
||||||
case string:
|
case string:
|
||||||
return s
|
return s
|
||||||
case bool:
|
case bool:
|
||||||
|
|||||||
@ -84,4 +84,8 @@ type Options struct {
|
|||||||
// can be specified with -l flag and -tags can be used in combination with
|
// can be specified with -l flag and -tags can be used in combination with
|
||||||
// the -l flag.
|
// the -l flag.
|
||||||
Tags goflags.StringSlice
|
Tags goflags.StringSlice
|
||||||
|
// OfflineHTTP is a flag that specific offline processing of http response
|
||||||
|
// using same matchers/extractors from http protocol without the need
|
||||||
|
// to send a new request, reading responses from a file.
|
||||||
|
OfflineHTTP bool
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user