mirror of
https://github.com/projectdiscovery/nuclei.git
synced 2025-12-17 19:25:26 +00:00
Merged dev
This commit is contained in:
commit
4cc770f102
15
.gitignore
vendored
15
.gitignore
vendored
@ -1,14 +1,13 @@
|
|||||||
cmd/nuclei/nuclei*
|
|
||||||
v2/cmd/nuclei/nuclei
|
|
||||||
.idea
|
.idea
|
||||||
integration_tests/integration-test
|
|
||||||
integration_tests/nuclei
|
integration_tests/nuclei
|
||||||
v2/cmd/integration-test/integration-test
|
integration_tests/integration-test
|
||||||
bin
|
v2/cmd/nuclei/main
|
||||||
v2/pkg/protocols/common/helpers/deserialization/testdata/Deserialize.class
|
v2/cmd/nuclei/nuclei
|
||||||
v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject.class
|
v2/cmd/integration-test/nuclei
|
||||||
v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject2.ser
|
|
||||||
v2/cmd/functional-test/nuclei_dev
|
v2/cmd/functional-test/nuclei_dev
|
||||||
v2/cmd/functional-test/nuclei_main
|
v2/cmd/functional-test/nuclei_main
|
||||||
v2/cmd/functional-test/functional-test
|
v2/cmd/functional-test/functional-test
|
||||||
v2/cmd/docgen/docgen
|
v2/cmd/docgen/docgen
|
||||||
|
v2/pkg/protocols/common/helpers/deserialization/testdata/Deserialize.class
|
||||||
|
v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject.class
|
||||||
|
v2/pkg/protocols/common/helpers/deserialization/testdata/ValueObject2.ser
|
||||||
|
|||||||
108
.golangci.yml
108
.golangci.yml
@ -1,108 +0,0 @@
|
|||||||
linters-settings:
|
|
||||||
dupl:
|
|
||||||
threshold: 100
|
|
||||||
exhaustive:
|
|
||||||
default-signifies-exhaustive: false
|
|
||||||
# funlen:
|
|
||||||
# lines: 100
|
|
||||||
# statements: 50
|
|
||||||
#goconst:
|
|
||||||
# min-len: 2
|
|
||||||
# min-occurrences: 2
|
|
||||||
gocritic:
|
|
||||||
enabled-tags:
|
|
||||||
- diagnostic
|
|
||||||
- experimental
|
|
||||||
- opinionated
|
|
||||||
- performance
|
|
||||||
- style
|
|
||||||
disabled-checks:
|
|
||||||
- dupImport # https://github.com/go-critic/go-critic/issues/845
|
|
||||||
- ifElseChain
|
|
||||||
# gocyclo:
|
|
||||||
# min-complexity: 15
|
|
||||||
goimports:
|
|
||||||
local-prefixes: github.com/golangci/golangci-lint
|
|
||||||
golint:
|
|
||||||
min-confidence: 0
|
|
||||||
gomnd:
|
|
||||||
settings:
|
|
||||||
mnd:
|
|
||||||
# don't include the "operation" and "assign"
|
|
||||||
checks: argument,case,condition,return
|
|
||||||
govet:
|
|
||||||
check-shadowing: true
|
|
||||||
settings:
|
|
||||||
printf:
|
|
||||||
funcs:
|
|
||||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof
|
|
||||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf
|
|
||||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf
|
|
||||||
- (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf
|
|
||||||
# lll:
|
|
||||||
# line-length: 140
|
|
||||||
maligned:
|
|
||||||
suggest-new: true
|
|
||||||
misspell:
|
|
||||||
locale: US
|
|
||||||
nolintlint:
|
|
||||||
allow-leading-space: true # don't require machine-readable nolint directives (i.e. with no leading space)
|
|
||||||
allow-unused: false # report any unused nolint directives
|
|
||||||
require-explanation: false # don't require an explanation for nolint directives
|
|
||||||
require-specific: false # don't require nolint directives to be specific about which linter is being skipped
|
|
||||||
|
|
||||||
linters:
|
|
||||||
# please, do not use `enable-all`: it's deprecated and will be removed soon.
|
|
||||||
# inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint
|
|
||||||
disable-all: true
|
|
||||||
enable:
|
|
||||||
#- bodyclose
|
|
||||||
- deadcode
|
|
||||||
- dogsled
|
|
||||||
- errcheck
|
|
||||||
- exhaustive
|
|
||||||
- gochecknoinits
|
|
||||||
#- goconst
|
|
||||||
- gocritic
|
|
||||||
- gofmt
|
|
||||||
- goimports
|
|
||||||
#- gomnd
|
|
||||||
- goprintffuncname
|
|
||||||
- gosimple
|
|
||||||
- govet
|
|
||||||
- ineffassign
|
|
||||||
- interfacer
|
|
||||||
- maligned
|
|
||||||
- misspell
|
|
||||||
- nakedret
|
|
||||||
- noctx
|
|
||||||
- nolintlint
|
|
||||||
- rowserrcheck
|
|
||||||
- scopelint
|
|
||||||
- staticcheck
|
|
||||||
- structcheck
|
|
||||||
- stylecheck
|
|
||||||
- typecheck
|
|
||||||
- unconvert
|
|
||||||
- unparam
|
|
||||||
- unused
|
|
||||||
- varcheck
|
|
||||||
- whitespace
|
|
||||||
- revive
|
|
||||||
|
|
||||||
# don't enable:
|
|
||||||
# - depguard
|
|
||||||
# - asciicheck
|
|
||||||
# - funlen
|
|
||||||
# - gochecknoglobals
|
|
||||||
# - gocognit
|
|
||||||
# - gocyclo
|
|
||||||
# - godot
|
|
||||||
# - godox
|
|
||||||
# - goerr113
|
|
||||||
# - gosec
|
|
||||||
# - lll
|
|
||||||
# - nestif
|
|
||||||
# - prealloc
|
|
||||||
# - testpackage
|
|
||||||
# - wsl
|
|
||||||
@ -1,4 +1,4 @@
|
|||||||
FROM golang:1.16.6-alpine as build-env
|
FROM golang:1.16.7-alpine as build-env
|
||||||
RUN GO111MODULE=on go get -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei
|
RUN GO111MODULE=on go get -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei
|
||||||
|
|
||||||
FROM alpine:latest
|
FROM alpine:latest
|
||||||
|
|||||||
@ -82,7 +82,7 @@ Usage:
|
|||||||
|
|
||||||
Flags:
|
Flags:
|
||||||
TARGET:
|
TARGET:
|
||||||
-u, -target string target URL/host to scan
|
-u, -target string[] target URLs/hosts to scan
|
||||||
-l, -list string path to file containing a list of target URLs/hosts to scan (one per line)
|
-l, -list string path to file containing a list of target URLs/hosts to scan (one per line)
|
||||||
|
|
||||||
TEMPLATES:
|
TEMPLATES:
|
||||||
@ -118,9 +118,11 @@ CONFIGURATIONS:
|
|||||||
-config string path to the nuclei configuration file
|
-config string path to the nuclei configuration file
|
||||||
-rc, -report-config string nuclei reporting module configuration file
|
-rc, -report-config string nuclei reporting module configuration file
|
||||||
-H, -header string[] custom headers in header:value format
|
-H, -header string[] custom headers in header:value format
|
||||||
|
-V, -var value custom vars in var=value format
|
||||||
-r, -resolvers string file containing resolver list for nuclei
|
-r, -resolvers string file containing resolver list for nuclei
|
||||||
-system-resolvers use system DNS resolving as error fallback
|
-system-resolvers use system DNS resolving as error fallback
|
||||||
-passive enable passive HTTP response processing mode
|
-passive enable passive HTTP response processing mode
|
||||||
|
-env-vars Enable environment variables support
|
||||||
|
|
||||||
INTERACTSH:
|
INTERACTSH:
|
||||||
-no-interactsh do not use interactsh server for blind interaction polling
|
-no-interactsh do not use interactsh server for blind interaction polling
|
||||||
@ -132,6 +134,7 @@ INTERACTSH:
|
|||||||
|
|
||||||
RATE-LIMIT:
|
RATE-LIMIT:
|
||||||
-rl, -rate-limit int maximum number of requests to send per second (default 150)
|
-rl, -rate-limit int maximum number of requests to send per second (default 150)
|
||||||
|
-rlm, -rate-limit-minute int maximum number of requests to send per minute
|
||||||
-bs, -bulk-size int maximum number of hosts to be analyzed in parallel per template (default 25)
|
-bs, -bulk-size int maximum number of hosts to be analyzed in parallel per template (default 25)
|
||||||
-c, -concurrency int maximum number of templates to be executed in parallel (default 10)
|
-c, -concurrency int maximum number of templates to be executed in parallel (default 10)
|
||||||
|
|
||||||
@ -160,6 +163,7 @@ DEBUG:
|
|||||||
UPDATE:
|
UPDATE:
|
||||||
-update update nuclei to the latest released version
|
-update update nuclei to the latest released version
|
||||||
-ut, -update-templates update the community templates to latest released version
|
-ut, -update-templates update the community templates to latest released version
|
||||||
|
-nut, -no-update-templates Do not check for nuclei-templates updates
|
||||||
-ud, -update-directory string overwrite the default nuclei-templates directory (default "$HOME/nuclei-templates")
|
-ud, -update-directory string overwrite the default nuclei-templates directory (default "$HOME/nuclei-templates")
|
||||||
|
|
||||||
STATISTICS:
|
STATISTICS:
|
||||||
|
|||||||
@ -97,7 +97,7 @@ nuclei -h
|
|||||||
|burp-collaborator-biid|使用burp-collaborator插件|nuclei -burp-collaborator-biid XXXX|
|
|burp-collaborator-biid|使用burp-collaborator插件|nuclei -burp-collaborator-biid XXXX|
|
||||||
|c|并行的最大模板数量(默认10)|nuclei -c 10|
|
|c|并行的最大模板数量(默认10)|nuclei -c 10|
|
||||||
|l|对URL列表进行测试|nuclei -l urls.txt|
|
|l|对URL列表进行测试|nuclei -l urls.txt|
|
||||||
|target|对目标进行测试|nuclei -target hxxps://example.com|
|
|target|对目标进行测试|nuclei -target hxxps://example.com -target hxxps://example2.com|
|
||||||
|t|要检测的模板种类|nuclei -t git-core.yaml -t cves/|
|
|t|要检测的模板种类|nuclei -t git-core.yaml -t cves/|
|
||||||
|no-color|输出不显示颜色|nuclei -no-color|
|
|no-color|输出不显示颜色|nuclei -no-color|
|
||||||
|no-meta|不显示匹配的元数据|nuclei -no-meta|
|
|no-meta|不显示匹配的元数据|nuclei -no-meta|
|
||||||
|
|||||||
1
integration_tests/run.sh
Normal file → Executable file
1
integration_tests/run.sh
Normal file → Executable file
@ -1,5 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
rm integration-test nuclei 2>/dev/null
|
||||||
cd ../v2/cmd/nuclei
|
cd ../v2/cmd/nuclei
|
||||||
go build
|
go build
|
||||||
mv nuclei ../../../integration_tests/nuclei
|
mv nuclei ../../../integration_tests/nuclei
|
||||||
|
|||||||
@ -1,12 +1,14 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
|
||||||
"github.com/projectdiscovery/goflags"
|
"github.com/projectdiscovery/goflags"
|
||||||
"github.com/projectdiscovery/gologger"
|
"github.com/projectdiscovery/gologger"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/runner"
|
"github.com/projectdiscovery/nuclei/v2/internal/runner"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -41,9 +43,13 @@ func readConfig() {
|
|||||||
flagSet.SetDescription(`Nuclei is a fast, template based vulnerability scanner focusing
|
flagSet.SetDescription(`Nuclei is a fast, template based vulnerability scanner focusing
|
||||||
on extensive configurability, massive extensibility and ease of use.`)
|
on extensive configurability, massive extensibility and ease of use.`)
|
||||||
|
|
||||||
|
/* TODO Important: The defined default values, especially for slice/array types are NOT DEFAULT VALUES, but rather implicit values to which the user input is appended.
|
||||||
|
This can be very confusing and should be addressed
|
||||||
|
*/
|
||||||
|
|
||||||
createGroup(flagSet, "input", "Target",
|
createGroup(flagSet, "input", "Target",
|
||||||
flagSet.StringVarP(&options.Target, "target", "u", "", "target URL/host to scan"),
|
flagSet.StringSliceVarP(&options.Targets, "target", "u", []string{}, "target URLs/hosts to scan"),
|
||||||
flagSet.StringVarP(&options.Targets, "list", "l", "", "path to file containing a list of target URLs/hosts to scan (one per line)"),
|
flagSet.StringVarP(&options.TargetsFilePath, "list", "l", "", "path to file containing a list of target URLs/hosts to scan (one per line)"),
|
||||||
)
|
)
|
||||||
|
|
||||||
createGroup(flagSet, "templates", "Templates",
|
createGroup(flagSet, "templates", "Templates",
|
||||||
@ -64,7 +70,7 @@ on extensive configurability, massive extensibility and ease of use.`)
|
|||||||
flagSet.StringSliceVar(&options.IncludeTemplates, "include-templates", []string{}, "templates to be executed even if they are excluded either by default or configuration"),
|
flagSet.StringSliceVar(&options.IncludeTemplates, "include-templates", []string{}, "templates to be executed even if they are excluded either by default or configuration"),
|
||||||
flagSet.StringSliceVarP(&options.ExcludedTemplates, "exclude", "exclude-templates", []string{}, "template or template directory paths to exclude"),
|
flagSet.StringSliceVarP(&options.ExcludedTemplates, "exclude", "exclude-templates", []string{}, "template or template directory paths to exclude"),
|
||||||
|
|
||||||
flagSet.NormalizedStringSliceVarP(&options.Severity, "impact", "severity", []string{}, "execute templates that match the provided severities only"),
|
flagSet.VarP(&options.Severities, "impact", "severity", fmt.Sprintf("Templates to run based on severity. Possible values: %s", severity.GetSupportedSeverities().String())),
|
||||||
flagSet.NormalizedStringSliceVar(&options.Author, "author", []string{}, "execute templates that are (co-)created by the specified authors"),
|
flagSet.NormalizedStringSliceVar(&options.Author, "author", []string{}, "execute templates that are (co-)created by the specified authors"),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -92,9 +98,12 @@ on extensive configurability, massive extensibility and ease of use.`)
|
|||||||
|
|
||||||
flagSet.StringSliceVarP(&options.CustomHeaders, "header", "H", []string{}, "custom headers in header:value format"),
|
flagSet.StringSliceVarP(&options.CustomHeaders, "header", "H", []string{}, "custom headers in header:value format"),
|
||||||
|
|
||||||
|
flagSet.RuntimeMapVarP(&options.Vars, "var", "V", []string{}, "custom vars in var=value format"),
|
||||||
|
|
||||||
flagSet.StringVarP(&options.ResolversFile, "resolvers", "r", "", "file containing resolver list for nuclei"),
|
flagSet.StringVarP(&options.ResolversFile, "resolvers", "r", "", "file containing resolver list for nuclei"),
|
||||||
flagSet.BoolVar(&options.SystemResolvers, "system-resolvers", false, "use system DNS resolving as error fallback"),
|
flagSet.BoolVar(&options.SystemResolvers, "system-resolvers", false, "use system DNS resolving as error fallback"),
|
||||||
flagSet.BoolVar(&options.OfflineHTTP, "passive", false, "enable passive HTTP response processing mode"),
|
flagSet.BoolVar(&options.OfflineHTTP, "passive", false, "enable passive HTTP response processing mode"),
|
||||||
|
flagSet.BoolVar(&options.EnvironmentVariables, "env-vars", false, "Enable environment variables support"),
|
||||||
)
|
)
|
||||||
|
|
||||||
createGroup(flagSet, "interactsh", "interactsh",
|
createGroup(flagSet, "interactsh", "interactsh",
|
||||||
@ -117,6 +126,7 @@ on extensive configurability, massive extensibility and ease of use.`)
|
|||||||
createGroup(flagSet, "optimization", "Optimizations",
|
createGroup(flagSet, "optimization", "Optimizations",
|
||||||
flagSet.IntVar(&options.Timeout, "timeout", 5, "time to wait in seconds before timeout"),
|
flagSet.IntVar(&options.Timeout, "timeout", 5, "time to wait in seconds before timeout"),
|
||||||
flagSet.IntVar(&options.Retries, "retries", 1, "number of times to retry a failed request"),
|
flagSet.IntVar(&options.Retries, "retries", 1, "number of times to retry a failed request"),
|
||||||
|
flagSet.IntVar(&options.HostMaxErrors, "host-max-error", 30, "max errors for a host before skipping from scan"),
|
||||||
|
|
||||||
flagSet.BoolVar(&options.Project, "project", false, "use a project folder to avoid sending same request multiple times"),
|
flagSet.BoolVar(&options.Project, "project", false, "use a project folder to avoid sending same request multiple times"),
|
||||||
flagSet.StringVar(&options.ProjectPath, "project-path", os.TempDir(), "set a specific project path"),
|
flagSet.StringVar(&options.ProjectPath, "project-path", os.TempDir(), "set a specific project path"),
|
||||||
|
|||||||
21
v2/go.mod
21
v2/go.mod
@ -5,8 +5,10 @@ go 1.15
|
|||||||
require (
|
require (
|
||||||
github.com/Knetic/govaluate v3.0.0+incompatible
|
github.com/Knetic/govaluate v3.0.0+incompatible
|
||||||
github.com/andygrunwald/go-jira v1.13.0
|
github.com/andygrunwald/go-jira v1.13.0
|
||||||
|
github.com/antchfx/htmlquery v1.2.3
|
||||||
github.com/apex/log v1.9.0
|
github.com/apex/log v1.9.0
|
||||||
github.com/blang/semver v3.5.1+incompatible
|
github.com/blang/semver v3.5.1+incompatible
|
||||||
|
github.com/bluele/gcache v0.0.2 // indirect
|
||||||
github.com/c4milo/unpackit v0.1.0 // indirect
|
github.com/c4milo/unpackit v0.1.0 // indirect
|
||||||
github.com/corpix/uarand v0.1.1
|
github.com/corpix/uarand v0.1.1
|
||||||
github.com/fatih/structs v1.1.0 // indirect
|
github.com/fatih/structs v1.1.0 // indirect
|
||||||
@ -17,31 +19,36 @@ require (
|
|||||||
github.com/gosuri/uiprogress v0.0.1 // indirect
|
github.com/gosuri/uiprogress v0.0.1 // indirect
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||||
github.com/hashicorp/go-retryablehttp v0.6.8 // indirect
|
github.com/hashicorp/go-retryablehttp v0.6.8 // indirect
|
||||||
github.com/itchyny/gojq v0.12.4
|
|
||||||
github.com/json-iterator/go v1.1.10
|
github.com/json-iterator/go v1.1.10
|
||||||
|
github.com/itchyny/gojq v0.12.4
|
||||||
|
github.com/json-iterator/go v1.1.11
|
||||||
github.com/julienschmidt/httprouter v1.3.0
|
github.com/julienschmidt/httprouter v1.3.0
|
||||||
github.com/karlseguin/ccache v2.0.3+incompatible
|
github.com/karlseguin/ccache v2.0.3+incompatible
|
||||||
github.com/karrick/godirwalk v1.16.1
|
github.com/karrick/godirwalk v1.16.1
|
||||||
github.com/logrusorgru/aurora v2.0.3+incompatible
|
github.com/logrusorgru/aurora v2.0.3+incompatible
|
||||||
github.com/mattn/go-runewidth v0.0.10 // indirect
|
github.com/mattn/go-runewidth v0.0.10 // indirect
|
||||||
github.com/miekg/dns v1.1.38
|
github.com/miekg/dns v1.1.43
|
||||||
github.com/olekukonko/tablewriter v0.0.5
|
github.com/olekukonko/tablewriter v0.0.5
|
||||||
github.com/owenrumney/go-sarif v1.0.4
|
github.com/owenrumney/go-sarif v1.0.4
|
||||||
github.com/pkg/errors v0.9.1
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/projectdiscovery/clistats v0.0.8
|
github.com/projectdiscovery/clistats v0.0.8
|
||||||
github.com/projectdiscovery/fastdialer v0.0.8
|
github.com/projectdiscovery/fastdialer v0.0.8
|
||||||
github.com/projectdiscovery/goflags v0.0.6
|
github.com/projectdiscovery/goflags v0.0.7
|
||||||
github.com/projectdiscovery/gologger v1.1.4
|
github.com/projectdiscovery/gologger v1.1.4
|
||||||
github.com/projectdiscovery/hmap v0.0.1
|
github.com/projectdiscovery/hmap v0.0.1
|
||||||
github.com/projectdiscovery/interactsh v0.0.3
|
github.com/projectdiscovery/interactsh v0.0.4
|
||||||
github.com/projectdiscovery/rawhttp v0.0.7
|
github.com/projectdiscovery/rawhttp v0.0.7
|
||||||
github.com/projectdiscovery/retryabledns v1.0.10
|
github.com/projectdiscovery/retryabledns v1.0.10
|
||||||
github.com/projectdiscovery/retryablehttp-go v1.0.2-0.20210524224054-9fbe1f2b0727
|
github.com/projectdiscovery/retryablehttp-go v1.0.2-0.20210524224054-9fbe1f2b0727
|
||||||
|
<<<<<<< HEAD
|
||||||
github.com/projectdiscovery/stringsutil v0.0.0-20210617141317-00728870f68d
|
github.com/projectdiscovery/stringsutil v0.0.0-20210617141317-00728870f68d
|
||||||
github.com/projectdiscovery/yamldoc-go v1.0.1 // indirect
|
github.com/projectdiscovery/yamldoc-go v1.0.1 // indirect
|
||||||
|
=======
|
||||||
|
github.com/projectdiscovery/stringsutil v0.0.0-20210804142656-fd3c28dbaafe
|
||||||
|
>>>>>>> 37eaadefeaec7c24eb38a0c70888f832f41334ef
|
||||||
github.com/remeh/sizedwaitgroup v1.0.0
|
github.com/remeh/sizedwaitgroup v1.0.0
|
||||||
github.com/rivo/uniseg v0.2.0 // indirect
|
github.com/rivo/uniseg v0.2.0 // indirect
|
||||||
github.com/rs/xid v1.2.1
|
github.com/rs/xid v1.3.0
|
||||||
github.com/segmentio/ksuid v1.0.3
|
github.com/segmentio/ksuid v1.0.3
|
||||||
github.com/shirou/gopsutil/v3 v3.21.5
|
github.com/shirou/gopsutil/v3 v3.21.5
|
||||||
github.com/spaolacci/murmur3 v1.1.0
|
github.com/spaolacci/murmur3 v1.1.0
|
||||||
@ -55,10 +62,8 @@ require (
|
|||||||
go.uber.org/atomic v1.7.0
|
go.uber.org/atomic v1.7.0
|
||||||
go.uber.org/multierr v1.6.0
|
go.uber.org/multierr v1.6.0
|
||||||
go.uber.org/ratelimit v0.2.0
|
go.uber.org/ratelimit v0.2.0
|
||||||
golang.org/x/crypto v0.0.0-20210218145215-b8e89b74b9df // indirect
|
golang.org/x/net v0.0.0-20210614182718-04defd469f4e
|
||||||
golang.org/x/net v0.0.0-20210521195947-fe42d452be8f
|
|
||||||
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99
|
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99
|
||||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 // indirect
|
|
||||||
golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 // indirect
|
golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 // indirect
|
||||||
google.golang.org/appengine v1.6.7 // indirect
|
google.golang.org/appengine v1.6.7 // indirect
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
|
|||||||
48
v2/go.sum
48
v2/go.sum
@ -31,6 +31,8 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl
|
|||||||
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
|
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
|
||||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||||
|
git.mills.io/prologic/smtpd v0.0.0-20210710122116-a525b76c287a h1:3i+FJ7IpSZHL+VAjtpQeZCRhrpP0odl5XfoLBY4fxJ8=
|
||||||
|
git.mills.io/prologic/smtpd v0.0.0-20210710122116-a525b76c287a/go.mod h1:C7hXLmFmPYPjIDGfQl1clsmQ5TMEQfmzWTrJk475bUs=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
github.com/Knetic/govaluate v3.0.0+incompatible h1:7o6+MAPhYTCF0+fdvoz1xDedhRb4f6s9Tn1Tt7/WTEg=
|
github.com/Knetic/govaluate v3.0.0+incompatible h1:7o6+MAPhYTCF0+fdvoz1xDedhRb4f6s9Tn1Tt7/WTEg=
|
||||||
@ -44,6 +46,10 @@ github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 h1:MzBOUgng9or
|
|||||||
github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg=
|
github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg=
|
||||||
github.com/andygrunwald/go-jira v1.13.0 h1:vvIImGgX32bHfoiyUwkNo+/YrPnRczNarvhLOncP6dE=
|
github.com/andygrunwald/go-jira v1.13.0 h1:vvIImGgX32bHfoiyUwkNo+/YrPnRczNarvhLOncP6dE=
|
||||||
github.com/andygrunwald/go-jira v1.13.0/go.mod h1:jYi4kFDbRPZTJdJOVJO4mpMMIwdB+rcZwSO58DzPd2I=
|
github.com/andygrunwald/go-jira v1.13.0/go.mod h1:jYi4kFDbRPZTJdJOVJO4mpMMIwdB+rcZwSO58DzPd2I=
|
||||||
|
github.com/antchfx/htmlquery v1.2.3 h1:sP3NFDneHx2stfNXCKbhHFo8XgNjCACnU/4AO5gWz6M=
|
||||||
|
github.com/antchfx/htmlquery v1.2.3/go.mod h1:B0ABL+F5irhhMWg54ymEZinzMSi0Kt3I2if0BLYa3V0=
|
||||||
|
github.com/antchfx/xpath v1.1.6 h1:6sVh6hB5T6phw1pFpHRQ+C4bd8sNI+O58flqtg7h0R0=
|
||||||
|
github.com/antchfx/xpath v1.1.6/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
|
||||||
github.com/apex/log v1.9.0 h1:FHtw/xuaM8AgmvDDTI9fiwoAL25Sq2cxojnZICUU8l0=
|
github.com/apex/log v1.9.0 h1:FHtw/xuaM8AgmvDDTI9fiwoAL25Sq2cxojnZICUU8l0=
|
||||||
github.com/apex/log v1.9.0/go.mod h1:m82fZlWIuiWzWP04XCTXmnX0xRkYYbCdYn8jbJeLBEA=
|
github.com/apex/log v1.9.0/go.mod h1:m82fZlWIuiWzWP04XCTXmnX0xRkYYbCdYn8jbJeLBEA=
|
||||||
github.com/apex/logs v1.0.0/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo=
|
github.com/apex/logs v1.0.0/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo=
|
||||||
@ -54,6 +60,8 @@ github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN
|
|||||||
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
|
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
|
||||||
github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=
|
github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=
|
||||||
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
|
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
|
||||||
|
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
||||||
|
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
||||||
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 h1:GKTyiRCL6zVf5wWaqKnf+7Qs6GbEPfd4iMOitWzXJx8=
|
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 h1:GKTyiRCL6zVf5wWaqKnf+7Qs6GbEPfd4iMOitWzXJx8=
|
||||||
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8/go.mod h1:spo1JLcs67NmW1aVLEgtA8Yy1elc+X8y5SRW1sFW4Og=
|
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8/go.mod h1:spo1JLcs67NmW1aVLEgtA8Yy1elc+X8y5SRW1sFW4Og=
|
||||||
github.com/c4milo/unpackit v0.1.0 h1:91pWJ6B3svZ4LOE+p3rnyucRK5fZwBdF/yQ/pcZO31I=
|
github.com/c4milo/unpackit v0.1.0 h1:91pWJ6B3svZ4LOE+p3rnyucRK5fZwBdF/yQ/pcZO31I=
|
||||||
@ -109,6 +117,7 @@ github.com/go-rod/rod v0.91.1/go.mod h1:/W4lcZiCALPD603MnJGIvhtywP3R6yRB9EDfFfsH
|
|||||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
|
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
@ -182,6 +191,8 @@ github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT
|
|||||||
github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
|
github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
|
||||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
|
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
|
||||||
|
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||||
github.com/hooklift/assert v0.1.0 h1:UZzFxx5dSb9aBtvMHTtnPuvFnBvcEhHTPb9+0+jpEjs=
|
github.com/hooklift/assert v0.1.0 h1:UZzFxx5dSb9aBtvMHTtnPuvFnBvcEhHTPb9+0+jpEjs=
|
||||||
github.com/hooklift/assert v0.1.0/go.mod h1:pfexfvIHnKCdjh6CkkIZv5ic6dQ6aU2jhKghBlXuwwY=
|
github.com/hooklift/assert v0.1.0/go.mod h1:pfexfvIHnKCdjh6CkkIZv5ic6dQ6aU2jhKghBlXuwwY=
|
||||||
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
||||||
@ -196,16 +207,17 @@ github.com/jasonlvhit/gocron v0.0.1 h1:qTt5qF3b3srDjeOIR4Le1LfeyvoYzJlYpqvG7tJX5
|
|||||||
github.com/jasonlvhit/gocron v0.0.1/go.mod h1:k9a3TV8VcU73XZxfVHCHWMWF9SOqgoku0/QlY2yvlA4=
|
github.com/jasonlvhit/gocron v0.0.1/go.mod h1:k9a3TV8VcU73XZxfVHCHWMWF9SOqgoku0/QlY2yvlA4=
|
||||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||||
github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0=
|
github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0=
|
||||||
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
|
|
||||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
|
github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=
|
||||||
|
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||||
github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=
|
github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=
|
||||||
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
|
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
|
||||||
github.com/karlseguin/ccache v2.0.3+incompatible h1:j68C9tWOROiOLWTS/kCGg9IcJG+ACqn5+0+t8Oh83UU=
|
github.com/karlseguin/ccache v2.0.3+incompatible h1:j68C9tWOROiOLWTS/kCGg9IcJG+ACqn5+0+t8Oh83UU=
|
||||||
github.com/karlseguin/ccache v2.0.3+incompatible/go.mod h1:CM9tNPzT6EdRh14+jiW8mEF9mkNZuuE51qmgGYUB93w=
|
github.com/karlseguin/ccache v2.0.3+incompatible/go.mod h1:CM9tNPzT6EdRh14+jiW8mEF9mkNZuuE51qmgGYUB93w=
|
||||||
github.com/karlseguin/ccache/v2 v2.0.7 h1:y5Pfi4eiyYCOD6LS/Kj+o6Nb4M5Ngpw9qFQs+v44ZYM=
|
github.com/karlseguin/ccache/v2 v2.0.8 h1:lT38cE//uyf6KcFok0rlgXtGFBWxkI6h/qg4tbFyDnA=
|
||||||
github.com/karlseguin/ccache/v2 v2.0.7/go.mod h1:2BDThcfQMf/c0jnZowt16eW405XIqZPavt+HoYEtcxQ=
|
github.com/karlseguin/ccache/v2 v2.0.8/go.mod h1:2BDThcfQMf/c0jnZowt16eW405XIqZPavt+HoYEtcxQ=
|
||||||
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 h1:vJ0Snvo+SLMY72r5J4sEfkuE7AFbixEP2qRbEcum/wA=
|
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 h1:vJ0Snvo+SLMY72r5J4sEfkuE7AFbixEP2qRbEcum/wA=
|
||||||
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8=
|
github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8=
|
||||||
github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw=
|
github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw=
|
||||||
@ -226,7 +238,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
|||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
|
|
||||||
github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8=
|
github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8=
|
||||||
github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
|
github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
|
||||||
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
|
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
|
||||||
@ -240,9 +251,9 @@ github.com/mattn/go-runewidth v0.0.10 h1:CoZ3S2P7pvtP45xOtBw+/mDL2z0RKI576gSkzRR
|
|||||||
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||||
github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
||||||
github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
|
||||||
github.com/miekg/dns v1.1.38 h1:MtIY+fmHUVVgv1AXzmKMWcwdCYxTRPG1EDjpqF4RCEw=
|
|
||||||
github.com/miekg/dns v1.1.38/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
github.com/miekg/dns v1.1.38/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
||||||
|
github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg=
|
||||||
|
github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4=
|
||||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
@ -273,15 +284,14 @@ github.com/projectdiscovery/clistats v0.0.8 h1:tjmWb15mqsPf/yrQXVHLe2ThZX/5+mgKS
|
|||||||
github.com/projectdiscovery/clistats v0.0.8/go.mod h1:lV6jUHAv2bYWqrQstqW8iVIydKJhWlVaLl3Xo9ioVGg=
|
github.com/projectdiscovery/clistats v0.0.8/go.mod h1:lV6jUHAv2bYWqrQstqW8iVIydKJhWlVaLl3Xo9ioVGg=
|
||||||
github.com/projectdiscovery/fastdialer v0.0.8 h1:mEMc8bfXV5hc1PUEkJiUnR5imYQe6+839Zezd5jLkc0=
|
github.com/projectdiscovery/fastdialer v0.0.8 h1:mEMc8bfXV5hc1PUEkJiUnR5imYQe6+839Zezd5jLkc0=
|
||||||
github.com/projectdiscovery/fastdialer v0.0.8/go.mod h1:AuaV0dzrNeBLHqjNnzpFSnTXnHGIZAlGQE+WUMmSIW4=
|
github.com/projectdiscovery/fastdialer v0.0.8/go.mod h1:AuaV0dzrNeBLHqjNnzpFSnTXnHGIZAlGQE+WUMmSIW4=
|
||||||
github.com/projectdiscovery/goflags v0.0.6 h1:4ErduTfSC55cRR3TmUg+TQirBlCuBdBadrluAsy1pew=
|
github.com/projectdiscovery/goflags v0.0.7 h1:aykmRkrOgDyRwcvGrK3qp+9aqcjGfAMs/+LtRmtyxwk=
|
||||||
github.com/projectdiscovery/goflags v0.0.6/go.mod h1:Ae1mJ5MIIqjys0lFe3GiMZ10Z8VLaxkYJ1ySA4Zv8HA=
|
github.com/projectdiscovery/goflags v0.0.7/go.mod h1:Jjwsf4eEBPXDSQI2Y+6fd3dBumJv/J1U0nmpM+hy2YY=
|
||||||
github.com/projectdiscovery/gologger v1.1.3/go.mod h1:jdXflz3TLB8bcVNzb0v26TztI9KPz8Lr4BVdUhNUs6E=
|
|
||||||
github.com/projectdiscovery/gologger v1.1.4 h1:qWxGUq7ukHWT849uGPkagPKF3yBPYAsTtMKunQ8O2VI=
|
github.com/projectdiscovery/gologger v1.1.4 h1:qWxGUq7ukHWT849uGPkagPKF3yBPYAsTtMKunQ8O2VI=
|
||||||
github.com/projectdiscovery/gologger v1.1.4/go.mod h1:Bhb6Bdx2PV1nMaFLoXNBmHIU85iROS9y1tBuv7T5pMY=
|
github.com/projectdiscovery/gologger v1.1.4/go.mod h1:Bhb6Bdx2PV1nMaFLoXNBmHIU85iROS9y1tBuv7T5pMY=
|
||||||
github.com/projectdiscovery/hmap v0.0.1 h1:VAONbJw5jP+syI5smhsfkrq9XPGn4aiYy5pR6KR1wog=
|
github.com/projectdiscovery/hmap v0.0.1 h1:VAONbJw5jP+syI5smhsfkrq9XPGn4aiYy5pR6KR1wog=
|
||||||
github.com/projectdiscovery/hmap v0.0.1/go.mod h1:VDEfgzkKQdq7iGTKz8Ooul0NuYHQ8qiDs6r8bPD1Sb0=
|
github.com/projectdiscovery/hmap v0.0.1/go.mod h1:VDEfgzkKQdq7iGTKz8Ooul0NuYHQ8qiDs6r8bPD1Sb0=
|
||||||
github.com/projectdiscovery/interactsh v0.0.3 h1:PUkWk+NzSyd5glMqfORmuqizhsd7c3WdTYBOto/MQIU=
|
github.com/projectdiscovery/interactsh v0.0.4 h1:3BtCZrrTovGYiqdFktXJ4NxKAQFvUvzcEI5pJIuShM8=
|
||||||
github.com/projectdiscovery/interactsh v0.0.3/go.mod h1:dWnKO14d2FLP3kLhI9DecEsiAC/aZiJoUBGFjGhDskY=
|
github.com/projectdiscovery/interactsh v0.0.4/go.mod h1:PtJrddeBW1/LeOVgTvvnjUl3Hu/17jTkoIi8rXeEODE=
|
||||||
github.com/projectdiscovery/rawhttp v0.0.7 h1:5m4peVgjbl7gqDcRYMTVEuX+Xs/nh76ohTkkvufucLg=
|
github.com/projectdiscovery/rawhttp v0.0.7 h1:5m4peVgjbl7gqDcRYMTVEuX+Xs/nh76ohTkkvufucLg=
|
||||||
github.com/projectdiscovery/rawhttp v0.0.7/go.mod h1:PQERZAhAv7yxI/hR6hdDPgK1WTU56l204BweXrBec+0=
|
github.com/projectdiscovery/rawhttp v0.0.7/go.mod h1:PQERZAhAv7yxI/hR6hdDPgK1WTU56l204BweXrBec+0=
|
||||||
github.com/projectdiscovery/retryabledns v1.0.7/go.mod h1:/UzJn4I+cPdQl6pKiiQfvVAT636YZvJQYZhYhGB0dUQ=
|
github.com/projectdiscovery/retryabledns v1.0.7/go.mod h1:/UzJn4I+cPdQl6pKiiQfvVAT636YZvJQYZhYhGB0dUQ=
|
||||||
@ -327,7 +337,6 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
|
|||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
|
||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
@ -387,8 +396,6 @@ go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
|
|||||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||||
go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=
|
go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=
|
||||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||||
go.uber.org/ratelimit v0.1.0 h1:U2AruXqeTb4Eh9sYQSTrMhH8Cb7M0Ian2ibBOnBcnAw=
|
|
||||||
go.uber.org/ratelimit v0.1.0/go.mod h1:2X8KaoNd1J0lZV+PxJk/5+DGbO/tpwLR1m++a7FnB/Y=
|
|
||||||
go.uber.org/ratelimit v0.2.0 h1:UQE2Bgi7p2B85uP5dC2bbRtig0C+OeNRnNEafLjsLPA=
|
go.uber.org/ratelimit v0.2.0 h1:UQE2Bgi7p2B85uP5dC2bbRtig0C+OeNRnNEafLjsLPA=
|
||||||
go.uber.org/ratelimit v0.2.0/go.mod h1:YYBV4e4naJvhpitQrWJu1vCpgB7CboMe0qhltKt6mUg=
|
go.uber.org/ratelimit v0.2.0/go.mod h1:YYBV4e4naJvhpitQrWJu1vCpgB7CboMe0qhltKt6mUg=
|
||||||
golang.org/x/arch v0.0.0-20180920145803-b19384d3c130/go.mod h1:cYlCBUl1MsqxdiKgmc4uh7TxZfWSFLOGSRR090WDxt8=
|
golang.org/x/arch v0.0.0-20180920145803-b19384d3c130/go.mod h1:cYlCBUl1MsqxdiKgmc4uh7TxZfWSFLOGSRR090WDxt8=
|
||||||
@ -398,11 +405,8 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U
|
|||||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
|
||||||
golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||||
golang.org/x/crypto v0.0.0-20210218145215-b8e89b74b9df h1:y7QZzfUiTwWam+xBn29Ulb8CBwVN5UdzmMDavl9Whlw=
|
|
||||||
golang.org/x/crypto v0.0.0-20210218145215-b8e89b74b9df/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
@ -456,6 +460,7 @@ golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLL
|
|||||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
|
golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
@ -465,8 +470,10 @@ golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81R
|
|||||||
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20210521195947-fe42d452be8f h1:Si4U+UcgJzya9kpiEUJKQvjr512OLli+gL4poHrz93U=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20210521195947-fe42d452be8f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20210521195947-fe42d452be8f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
|
golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q=
|
||||||
|
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
@ -483,8 +490,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ
|
|||||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 h1:SQFwaSi55rU7vdNs9Yr0Z324VNlrF+0wMqRXT4St8ck=
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
|
||||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180903190138-2b024373dcd9/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180903190138-2b024373dcd9/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
@ -523,6 +530,7 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210217105451-b926d437f341/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210217105451-b926d437f341/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210601080250-7ecdf8ef093b h1:qh4f65QIVFjq9eBURLEYWqaEXmOyqdUyiBSgaXWccWk=
|
golang.org/x/sys v0.0.0-20210601080250-7ecdf8ef093b h1:qh4f65QIVFjq9eBURLEYWqaEXmOyqdUyiBSgaXWccWk=
|
||||||
golang.org/x/sys v0.0.0-20210601080250-7ecdf8ef093b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210601080250-7ecdf8ef093b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
|||||||
@ -1,24 +1,40 @@
|
|||||||
package colorizer
|
package colorizer
|
||||||
|
|
||||||
import "github.com/logrusorgru/aurora"
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
// Colorizer returns a colorized severity printer
|
"github.com/logrusorgru/aurora"
|
||||||
type Colorizer struct {
|
"github.com/projectdiscovery/gologger"
|
||||||
Data map[string]string
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
}
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
fgOrange uint8 = 208
|
fgOrange uint8 = 208
|
||||||
)
|
)
|
||||||
|
|
||||||
// New returns a new severity based colorizer
|
func GetColor(colorizer aurora.Aurora, templateSeverity fmt.Stringer) string {
|
||||||
func New(colorizer aurora.Aurora) *Colorizer {
|
var method func(arg interface{}) aurora.Value
|
||||||
severityMap := map[string]string{
|
switch templateSeverity {
|
||||||
"info": colorizer.Blue("info").String(),
|
case severity.Info:
|
||||||
"low": colorizer.Green("low").String(),
|
method = colorizer.Blue
|
||||||
"medium": colorizer.Yellow("medium").String(),
|
case severity.Low:
|
||||||
"high": colorizer.Index(fgOrange, "high").String(),
|
method = colorizer.Green
|
||||||
"critical": colorizer.Red("critical").String(),
|
case severity.Medium:
|
||||||
|
method = colorizer.Yellow
|
||||||
|
case severity.High:
|
||||||
|
method = func(stringValue interface{}) aurora.Value { return colorizer.Index(fgOrange, stringValue) }
|
||||||
|
case severity.Critical:
|
||||||
|
method = colorizer.Red
|
||||||
|
default:
|
||||||
|
gologger.Warning().Msgf("The '%s' severity does not have an color associated!", templateSeverity)
|
||||||
|
method = colorizer.White
|
||||||
|
}
|
||||||
|
|
||||||
|
return method(templateSeverity.String()).String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(colorizer aurora.Aurora) func(severity.Severity) string {
|
||||||
|
return func(severity severity.Severity) string {
|
||||||
|
return GetColor(colorizer, severity)
|
||||||
}
|
}
|
||||||
return &Colorizer{Data: severityMap}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1 +0,0 @@
|
|||||||
package runner
|
|
||||||
@ -14,6 +14,10 @@ func (r *Runner) processTemplateWithList(template *templates.Template) bool {
|
|||||||
r.hostMap.Scan(func(k, _ []byte) error {
|
r.hostMap.Scan(func(k, _ []byte) error {
|
||||||
URL := string(k)
|
URL := string(k)
|
||||||
|
|
||||||
|
// Skip if the host has had errors
|
||||||
|
if r.hostErrors != nil && r.hostErrors.Check(URL) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
wg.Add()
|
wg.Add()
|
||||||
go func(URL string) {
|
go func(URL string) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
@ -37,6 +41,11 @@ func (r *Runner) processWorkflowWithList(template *templates.Template) bool {
|
|||||||
|
|
||||||
r.hostMap.Scan(func(k, _ []byte) error {
|
r.hostMap.Scan(func(k, _ []byte) error {
|
||||||
URL := string(k)
|
URL := string(k)
|
||||||
|
|
||||||
|
// Skip if the host has had errors
|
||||||
|
if r.hostErrors != nil && r.hostErrors.Check(URL) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
wg.Add()
|
wg.Add()
|
||||||
go func(URL string) {
|
go func(URL string) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
|
|||||||
@ -10,17 +10,26 @@ import (
|
|||||||
|
|
||||||
"github.com/logrusorgru/aurora"
|
"github.com/logrusorgru/aurora"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
"github.com/remeh/sizedwaitgroup"
|
||||||
|
"github.com/rs/xid"
|
||||||
|
"go.uber.org/atomic"
|
||||||
|
"go.uber.org/ratelimit"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/projectdiscovery/gologger"
|
"github.com/projectdiscovery/gologger"
|
||||||
"github.com/projectdiscovery/hmap/store/hybrid"
|
"github.com/projectdiscovery/hmap/store/hybrid"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/colorizer"
|
"github.com/projectdiscovery/nuclei/v2/internal/colorizer"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/config"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/config"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/parsers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
|
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/projectfile"
|
"github.com/projectdiscovery/nuclei/v2/pkg/projectfile"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/clusterer"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/clusterer"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/hosterrorscache"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/interactsh"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/interactsh"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/protocolinit"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/protocolinit"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/headless/engine"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/headless/engine"
|
||||||
@ -29,11 +38,7 @@ import (
|
|||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/sarif"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/sarif"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||||
"github.com/remeh/sizedwaitgroup"
|
"github.com/projectdiscovery/nuclei/v2/pkg/utils"
|
||||||
"github.com/rs/xid"
|
|
||||||
"go.uber.org/atomic"
|
|
||||||
"go.uber.org/ratelimit"
|
|
||||||
"gopkg.in/yaml.v2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Runner is a client for running the enumeration process.
|
// Runner is a client for running the enumeration process.
|
||||||
@ -49,9 +54,10 @@ type Runner struct {
|
|||||||
progress progress.Progress
|
progress progress.Progress
|
||||||
colorizer aurora.Aurora
|
colorizer aurora.Aurora
|
||||||
issuesClient *reporting.Client
|
issuesClient *reporting.Client
|
||||||
severityColors *colorizer.Colorizer
|
addColor func(severity.Severity) string
|
||||||
browser *engine.Browser
|
browser *engine.Browser
|
||||||
ratelimiter ratelimit.Limiter
|
ratelimiter ratelimit.Limiter
|
||||||
|
hostErrors *hosterrorscache.Cache
|
||||||
}
|
}
|
||||||
|
|
||||||
// New creates a new client for running enumeration process.
|
// New creates a new client for running enumeration process.
|
||||||
@ -118,14 +124,14 @@ func New(options *types.Options) (*Runner, error) {
|
|||||||
// output coloring
|
// output coloring
|
||||||
useColor := !options.NoColor
|
useColor := !options.NoColor
|
||||||
runner.colorizer = aurora.NewAurora(useColor)
|
runner.colorizer = aurora.NewAurora(useColor)
|
||||||
runner.severityColors = colorizer.New(runner.colorizer)
|
runner.addColor = colorizer.New(runner.colorizer)
|
||||||
|
|
||||||
if options.TemplateList {
|
if options.TemplateList {
|
||||||
runner.listAvailableTemplates()
|
runner.listAvailableTemplates()
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (len(options.Templates) == 0 || !options.NewTemplates || (options.Targets == "" && !options.Stdin && options.Target == "")) && options.UpdateTemplates {
|
if (len(options.Templates) == 0 || !options.NewTemplates || (options.TargetsFilePath == "" && !options.Stdin && len(options.Targets) == 0)) && options.UpdateTemplates {
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
hm, err := hybrid.New(hybrid.DefaultDiskOptions)
|
hm, err := hybrid.New(hybrid.DefaultDiskOptions)
|
||||||
@ -137,11 +143,23 @@ func New(options *types.Options) (*Runner, error) {
|
|||||||
runner.inputCount = 0
|
runner.inputCount = 0
|
||||||
dupeCount := 0
|
dupeCount := 0
|
||||||
|
|
||||||
// Handle single target
|
// Handle multiple targets
|
||||||
if options.Target != "" {
|
if len(options.Targets) != 0 {
|
||||||
|
for _, target := range options.Targets {
|
||||||
|
url := strings.TrimSpace(target)
|
||||||
|
if url == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := runner.hostMap.Get(url); ok {
|
||||||
|
dupeCount++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
runner.inputCount++
|
runner.inputCount++
|
||||||
// nolint:errcheck // ignoring error
|
// nolint:errcheck // ignoring error
|
||||||
runner.hostMap.Set(options.Target, nil)
|
runner.hostMap.Set(url, nil)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle stdin
|
// Handle stdin
|
||||||
@ -152,19 +170,21 @@ func New(options *types.Options) (*Runner, error) {
|
|||||||
if url == "" {
|
if url == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := runner.hostMap.Get(url); ok {
|
if _, ok := runner.hostMap.Get(url); ok {
|
||||||
dupeCount++
|
dupeCount++
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
runner.inputCount++
|
runner.inputCount++
|
||||||
// nolint:errcheck // ignoring error
|
// nolint:errcheck // ignoring error
|
||||||
runner.hostMap.Set(url, nil)
|
runner.hostMap.Set(url, nil)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle taget file
|
// Handle target file
|
||||||
if options.Targets != "" {
|
if options.TargetsFilePath != "" {
|
||||||
input, inputErr := os.Open(options.Targets)
|
input, inputErr := os.Open(options.TargetsFilePath)
|
||||||
if inputErr != nil {
|
if inputErr != nil {
|
||||||
return nil, errors.Wrap(inputErr, "could not open targets file")
|
return nil, errors.Wrap(inputErr, "could not open targets file")
|
||||||
}
|
}
|
||||||
@ -212,7 +232,7 @@ func New(options *types.Options) (*Runner, error) {
|
|||||||
// create project file if requested or load existing one
|
// create project file if requested or load existing one
|
||||||
if options.Project {
|
if options.Project {
|
||||||
var projectFileErr error
|
var projectFileErr error
|
||||||
runner.projectFile, projectFileErr = projectfile.New(&projectfile.Options{Path: options.ProjectPath, Cleanup: options.ProjectPath == ""})
|
runner.projectFile, projectFileErr = projectfile.New(&projectfile.Options{Path: options.ProjectPath, Cleanup: utils.IsBlank(options.ProjectPath)})
|
||||||
if projectFileErr != nil {
|
if projectFileErr != nil {
|
||||||
return nil, projectFileErr
|
return nil, projectFileErr
|
||||||
}
|
}
|
||||||
@ -275,6 +295,11 @@ func (r *Runner) RunEnumeration() error {
|
|||||||
r.options.ExcludeTags = append(r.options.ExcludeTags, ignoreFile.Tags...)
|
r.options.ExcludeTags = append(r.options.ExcludeTags, ignoreFile.Tags...)
|
||||||
r.options.ExcludedTemplates = append(r.options.ExcludedTemplates, ignoreFile.Files...)
|
r.options.ExcludedTemplates = append(r.options.ExcludedTemplates, ignoreFile.Files...)
|
||||||
|
|
||||||
|
var cache *hosterrorscache.Cache
|
||||||
|
if r.options.HostMaxErrors > 0 {
|
||||||
|
cache = hosterrorscache.New(r.options.HostMaxErrors, hosterrorscache.DefaultMaxHostsCount).SetVerbose(r.options.Verbose)
|
||||||
|
}
|
||||||
|
r.hostErrors = cache
|
||||||
executerOpts := protocols.ExecuterOptions{
|
executerOpts := protocols.ExecuterOptions{
|
||||||
Output: r.output,
|
Output: r.output,
|
||||||
Options: r.options,
|
Options: r.options,
|
||||||
@ -285,7 +310,16 @@ func (r *Runner) RunEnumeration() error {
|
|||||||
Interactsh: r.interactsh,
|
Interactsh: r.interactsh,
|
||||||
ProjectFile: r.projectFile,
|
ProjectFile: r.projectFile,
|
||||||
Browser: r.browser,
|
Browser: r.browser,
|
||||||
|
HostErrorsCache: cache,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
workflowLoader, err := parsers.NewLoader(&executerOpts)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "Could not create loader.")
|
||||||
|
}
|
||||||
|
|
||||||
|
executerOpts.WorkflowLoader = workflowLoader
|
||||||
|
|
||||||
loaderConfig := loader.Config{
|
loaderConfig := loader.Config{
|
||||||
Templates: r.options.Templates,
|
Templates: r.options.Templates,
|
||||||
Workflows: r.options.Workflows,
|
Workflows: r.options.Workflows,
|
||||||
@ -294,7 +328,7 @@ func (r *Runner) RunEnumeration() error {
|
|||||||
ExcludeTags: r.options.ExcludeTags,
|
ExcludeTags: r.options.ExcludeTags,
|
||||||
IncludeTemplates: r.options.IncludeTemplates,
|
IncludeTemplates: r.options.IncludeTemplates,
|
||||||
Authors: r.options.Author,
|
Authors: r.options.Author,
|
||||||
Severities: r.options.Severity,
|
Severities: r.options.Severities,
|
||||||
IncludeTags: r.options.IncludeTags,
|
IncludeTags: r.options.IncludeTags,
|
||||||
TemplatesDirectory: r.options.TemplatesDirectory,
|
TemplatesDirectory: r.options.TemplatesDirectory,
|
||||||
Catalog: r.catalog,
|
Catalog: r.catalog,
|
||||||
@ -331,7 +365,7 @@ func (r *Runner) RunEnumeration() error {
|
|||||||
|
|
||||||
gologger.Info().Msgf("Using Nuclei Engine %s%s", config.Version, messageStr)
|
gologger.Info().Msgf("Using Nuclei Engine %s%s", config.Version, messageStr)
|
||||||
|
|
||||||
if r.templatesConfig != nil && r.templatesConfig.NucleiTemplatesLatestVersion != "" {
|
if r.templatesConfig != nil && r.templatesConfig.NucleiTemplatesLatestVersion != "" { // TODO extract duplicated logic
|
||||||
builder.WriteString(" (")
|
builder.WriteString(" (")
|
||||||
|
|
||||||
if r.templatesConfig.CurrentVersion == r.templatesConfig.NucleiTemplatesLatestVersion {
|
if r.templatesConfig.CurrentVersion == r.templatesConfig.NucleiTemplatesLatestVersion {
|
||||||
@ -362,7 +396,7 @@ func (r *Runner) RunEnumeration() error {
|
|||||||
var unclusteredRequests int64
|
var unclusteredRequests int64
|
||||||
for _, template := range store.Templates() {
|
for _, template := range store.Templates() {
|
||||||
// workflows will dynamically adjust the totals while running, as
|
// workflows will dynamically adjust the totals while running, as
|
||||||
// it can't be know in advance which requests will be called
|
// it can't be known in advance which requests will be called
|
||||||
if len(template.Workflows) > 0 {
|
if len(template.Workflows) > 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -396,6 +430,7 @@ func (r *Runner) RunEnumeration() error {
|
|||||||
Browser: r.browser,
|
Browser: r.browser,
|
||||||
ProjectFile: r.projectFile,
|
ProjectFile: r.projectFile,
|
||||||
Interactsh: r.interactsh,
|
Interactsh: r.interactsh,
|
||||||
|
HostErrorsCache: cache,
|
||||||
}
|
}
|
||||||
clusterID := fmt.Sprintf("cluster-%s", xid.New().String())
|
clusterID := fmt.Sprintf("cluster-%s", xid.New().String())
|
||||||
|
|
||||||
@ -430,6 +465,11 @@ func (r *Runner) RunEnumeration() error {
|
|||||||
return errors.New("no templates were found")
|
return errors.New("no templates were found")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
TODO does it make sense to run the logic below if there are no targets specified?
|
||||||
|
Can we safely assume the user is just experimenting with the template/workflow filters before running them?
|
||||||
|
*/
|
||||||
|
|
||||||
results := &atomic.Bool{}
|
results := &atomic.Bool{}
|
||||||
wgtemplates := sizedwaitgroup.New(r.options.TemplateThreads)
|
wgtemplates := sizedwaitgroup.New(r.options.TemplateThreads)
|
||||||
|
|
||||||
|
|||||||
@ -8,10 +8,12 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/karrick/godirwalk"
|
"github.com/karrick/godirwalk"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/projectdiscovery/gologger"
|
"github.com/projectdiscovery/gologger"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||||
"gopkg.in/yaml.v2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// parseTemplateFile returns the parsed template file
|
// parseTemplateFile returns the parsed template file
|
||||||
@ -35,16 +37,13 @@ func (r *Runner) parseTemplateFile(file string) (*templates.Template, error) {
|
|||||||
return template, nil
|
return template, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Runner) templateLogMsg(id, name, author, severity string) string {
|
func (r *Runner) templateLogMsg(id, name, author string, templateSeverity severity.Severity) string {
|
||||||
// Display the message for the template
|
// Display the message for the template
|
||||||
message := fmt.Sprintf("[%s] %s (%s)",
|
return fmt.Sprintf("[%s] %s (%s) [%s]",
|
||||||
r.colorizer.BrightBlue(id).String(),
|
r.colorizer.BrightBlue(id).String(),
|
||||||
r.colorizer.Bold(name).String(),
|
r.colorizer.Bold(name).String(),
|
||||||
r.colorizer.BrightYellow(appendAtSignToAuthors(author)).String())
|
r.colorizer.BrightYellow(appendAtSignToAuthors(author)).String(),
|
||||||
if severity != "" {
|
r.addColor(templateSeverity))
|
||||||
message += " [" + r.severityColors.Data[severity] + "]"
|
|
||||||
}
|
|
||||||
return message
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// appendAtSignToAuthors appends @ before each author and returns final string
|
// appendAtSignToAuthors appends @ before each author and returns final string
|
||||||
@ -75,7 +74,10 @@ func (r *Runner) logAvailableTemplate(tplPath string) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
gologger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
|
gologger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
|
||||||
} else {
|
} else {
|
||||||
gologger.Info().Msgf("%s\n", r.templateLogMsg(t.ID, types.ToString(t.Info["name"]), types.ToString(t.Info["author"]), types.ToString(t.Info["severity"])))
|
gologger.Print().Msgf("%s\n", r.templateLogMsg(t.ID,
|
||||||
|
types.ToString(t.Info.Name),
|
||||||
|
types.ToString(t.Info.Authors),
|
||||||
|
t.Info.SeverityHolder.Severity))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
54
v2/internal/severity/misc.go
Normal file
54
v2/internal/severity/misc.go
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
package severity
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Severities []Severity
|
||||||
|
|
||||||
|
func (severities Severities) String() string {
|
||||||
|
return strings.Join(severities.ToStringArray(), ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (severities *Severities) Set(value string) error {
|
||||||
|
inputSeverities := toStringSlice(value)
|
||||||
|
|
||||||
|
for _, inputSeverity := range inputSeverities {
|
||||||
|
if err := setSeverity(severities, inputSeverity); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func setSeverity(severities *Severities, value string) error {
|
||||||
|
computedSeverity, err := toSeverity(value)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("'%s' is not a valid severity", value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO change the Severities type to map[Severity]interface{}, where the values are struct{}{}, to "simulates" a "set" data structure
|
||||||
|
*severities = append(*severities, computedSeverity)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (severities *Severities) ToStringArray() []string {
|
||||||
|
var result []string
|
||||||
|
for _, severity := range *severities {
|
||||||
|
result = append(result, severity.String())
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func toStringSlice(value string) []string {
|
||||||
|
var result []string
|
||||||
|
if strings.Contains(value, ",") {
|
||||||
|
slices := strings.Split(value, ",")
|
||||||
|
result = append(result, slices...)
|
||||||
|
} else {
|
||||||
|
result = []string{value}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
78
v2/internal/severity/severity.go
Normal file
78
v2/internal/severity/severity.go
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
package severity
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Severity int
|
||||||
|
|
||||||
|
const (
|
||||||
|
Info Severity = iota
|
||||||
|
Low
|
||||||
|
Medium
|
||||||
|
High
|
||||||
|
Critical
|
||||||
|
limit
|
||||||
|
)
|
||||||
|
|
||||||
|
var severityMappings = map[Severity]string{
|
||||||
|
Info: "info",
|
||||||
|
Low: "low",
|
||||||
|
Medium: "medium",
|
||||||
|
High: "high",
|
||||||
|
Critical: "critical",
|
||||||
|
}
|
||||||
|
|
||||||
|
func toSeverity(valueToMap string) (Severity, error) {
|
||||||
|
normalizedValue := normalizeValue(valueToMap)
|
||||||
|
for key, currentValue := range severityMappings {
|
||||||
|
if normalizedValue == currentValue {
|
||||||
|
return key, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1, errors.New("Invalid severity: " + valueToMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetSupportedSeverities() Severities {
|
||||||
|
var result []Severity
|
||||||
|
for index := Severity(0); index < limit; index++ {
|
||||||
|
result = append(result, index)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeValue(value string) string {
|
||||||
|
return strings.TrimSpace(strings.ToLower(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (severity Severity) String() string {
|
||||||
|
return severityMappings[severity]
|
||||||
|
}
|
||||||
|
|
||||||
|
//nolint:exported,revive //prefer to be explicit about the name, and make it refactor-safe
|
||||||
|
//goland:noinspection GoNameStartsWithPackageName
|
||||||
|
type SeverityHolder struct {
|
||||||
|
Severity Severity
|
||||||
|
}
|
||||||
|
|
||||||
|
func (severityHolder *SeverityHolder) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
|
var marshalledSeverity string
|
||||||
|
if err := unmarshal(&marshalledSeverity); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
computedSeverity, err := toSeverity(marshalledSeverity)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
severityHolder.Severity = computedSeverity
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (severityHolder *SeverityHolder) MarshalJSON() ([]byte, error) {
|
||||||
|
return json.Marshal(severityHolder.Severity.String())
|
||||||
|
}
|
||||||
52
v2/internal/severity/severity_test.go
Normal file
52
v2/internal/severity/severity_test.go
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
package severity
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestYamlUnmarshal(t *testing.T) {
|
||||||
|
testUnmarshal(t, yaml.Unmarshal, func(value string) string { return value })
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYamlUnmarshalFail(t *testing.T) {
|
||||||
|
testUnmarshalFail(t, yaml.Unmarshal, createYAML)
|
||||||
|
}
|
||||||
|
|
||||||
|
func testUnmarshal(t *testing.T, unmarshaller func(data []byte, v interface{}) error, payloadCreator func(value string) string) {
|
||||||
|
payloads := [...]string{
|
||||||
|
payloadCreator("Info"),
|
||||||
|
payloadCreator("info"),
|
||||||
|
payloadCreator("inFo "),
|
||||||
|
payloadCreator("infO "),
|
||||||
|
payloadCreator(" INFO "),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, payload := range payloads { // nolint:scopelint // false-positive
|
||||||
|
t.Run(payload, func(t *testing.T) {
|
||||||
|
result := unmarshal(payload, unmarshaller)
|
||||||
|
assert.Equal(t, result.Severity, Info)
|
||||||
|
assert.Equal(t, result.Severity.String(), "info")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func testUnmarshalFail(t *testing.T, unmarshaller func(data []byte, v interface{}) error, payloadCreator func(value string) string) {
|
||||||
|
assert.Panics(t, func() { unmarshal(payloadCreator("invalid"), unmarshaller) })
|
||||||
|
}
|
||||||
|
|
||||||
|
func unmarshal(value string, unmarshaller func(data []byte, v interface{}) error) SeverityHolder {
|
||||||
|
severityStruct := SeverityHolder{}
|
||||||
|
var err = unmarshaller([]byte(value), &severityStruct)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return severityStruct
|
||||||
|
}
|
||||||
|
|
||||||
|
func createYAML(value string) string {
|
||||||
|
return "severity: " + value + "\n"
|
||||||
|
}
|
||||||
@ -3,7 +3,9 @@ package testutils
|
|||||||
import (
|
import (
|
||||||
"github.com/logrusorgru/aurora"
|
"github.com/logrusorgru/aurora"
|
||||||
"github.com/projectdiscovery/gologger/levels"
|
"github.com/projectdiscovery/gologger/levels"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
|
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
@ -44,9 +46,9 @@ var DefaultOptions = &types.Options{
|
|||||||
Retries: 1,
|
Retries: 1,
|
||||||
RateLimit: 150,
|
RateLimit: 150,
|
||||||
ProjectPath: "",
|
ProjectPath: "",
|
||||||
Severity: []string{},
|
Severities: severity.Severities{},
|
||||||
Target: "",
|
Targets: []string{},
|
||||||
Targets: "",
|
TargetsFilePath: "",
|
||||||
Output: "",
|
Output: "",
|
||||||
ProxyURL: "",
|
ProxyURL: "",
|
||||||
ProxySocksURL: "",
|
ProxySocksURL: "",
|
||||||
@ -95,7 +97,7 @@ func (m *MockOutputWriter) Request(templateID, url, requestType string, err erro
|
|||||||
// TemplateInfo contains info for a mock executed template.
|
// TemplateInfo contains info for a mock executed template.
|
||||||
type TemplateInfo struct {
|
type TemplateInfo struct {
|
||||||
ID string
|
ID string
|
||||||
Info map[string]interface{}
|
Info model.Info
|
||||||
Path string
|
Path string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -28,7 +28,7 @@ type Config struct {
|
|||||||
const nucleiConfigFilename = ".templates-config.json"
|
const nucleiConfigFilename = ".templates-config.json"
|
||||||
|
|
||||||
// Version is the current version of nuclei
|
// Version is the current version of nuclei
|
||||||
const Version = `2.4.3-dev`
|
const Version = `2.4.4-dev`
|
||||||
|
|
||||||
func getConfigDetails() (string, error) {
|
func getConfigDetails() (string, error) {
|
||||||
homeDir, err := os.UserHomeDir()
|
homeDir, err := os.UserHomeDir()
|
||||||
|
|||||||
@ -88,7 +88,7 @@ func (c *Catalog) convertPathToAbsolute(t string) (string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return path.Join(absPath, file), nil
|
return path.Join(absPath, file), nil // TODO this might rather be filepath.Join to make it OS agnostic. Search for other occurrences
|
||||||
}
|
}
|
||||||
return c.ResolvePath(t, "")
|
return c.ResolvePath(t, "")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,18 +3,20 @@ package filter
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
)
|
)
|
||||||
|
|
||||||
// TagFilter is used to filter nuclei templates for tag based execution
|
// TagFilter is used to filter nuclei templates for tag based execution
|
||||||
type TagFilter struct {
|
type TagFilter struct {
|
||||||
allowedTags map[string]struct{}
|
allowedTags map[string]struct{}
|
||||||
severities map[string]struct{}
|
severities map[severity.Severity]struct{}
|
||||||
authors map[string]struct{}
|
authors map[string]struct{}
|
||||||
block map[string]struct{}
|
block map[string]struct{}
|
||||||
matchAllows map[string]struct{}
|
matchAllows map[string]struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ErrExcluded is returned for execluded templates
|
// ErrExcluded is returned for excluded templates
|
||||||
var ErrExcluded = errors.New("the template was excluded")
|
var ErrExcluded = errors.New("the template was excluded")
|
||||||
|
|
||||||
// Match takes a tag and whether the template was matched from user
|
// Match takes a tag and whether the template was matched from user
|
||||||
@ -25,92 +27,102 @@ var ErrExcluded = errors.New("the template was excluded")
|
|||||||
// matchAllows section.
|
// matchAllows section.
|
||||||
//
|
//
|
||||||
// It returns true if the tag is specified, or false.
|
// It returns true if the tag is specified, or false.
|
||||||
func (t *TagFilter) Match(tag, author, severity string) (bool, error) {
|
func (tagFilter *TagFilter) Match(templateTags, templateAuthors []string, templateSeverity severity.Severity) (bool, error) {
|
||||||
matchedAny := false
|
for _, templateTag := range templateTags {
|
||||||
if len(t.allowedTags) > 0 {
|
_, blocked := tagFilter.block[templateTag]
|
||||||
_, ok := t.allowedTags[tag]
|
_, allowed := tagFilter.matchAllows[templateTag]
|
||||||
if ok {
|
|
||||||
matchedAny = true
|
if blocked && !allowed { // the whitelist has precedence over the blacklist
|
||||||
}
|
|
||||||
}
|
|
||||||
_, ok := t.block[tag]
|
|
||||||
if ok {
|
|
||||||
if _, allowOk := t.matchAllows[tag]; allowOk {
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
return false, ErrExcluded
|
return false, ErrExcluded
|
||||||
}
|
}
|
||||||
if len(t.authors) > 0 {
|
}
|
||||||
_, ok = t.authors[author]
|
|
||||||
if !ok {
|
if !isTagMatch(templateTags, tagFilter) {
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
matchedAny = true
|
|
||||||
}
|
if !isAuthorMatch(templateAuthors, tagFilter) {
|
||||||
if len(t.severities) > 0 {
|
|
||||||
_, ok = t.severities[severity]
|
|
||||||
if !ok {
|
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
matchedAny = true
|
|
||||||
|
if len(tagFilter.severities) > 0 {
|
||||||
|
if _, ok := tagFilter.severities[templateSeverity]; !ok {
|
||||||
|
return false, nil
|
||||||
}
|
}
|
||||||
if len(t.allowedTags) == 0 && len(t.authors) == 0 && len(t.severities) == 0 {
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
|
||||||
return matchedAny, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// MatchWithAllowedTags takes an addition list of allowed tags
|
func isAuthorMatch(templateAuthors []string, tagFilter *TagFilter) bool {
|
||||||
// and returns true if the match was successful.
|
if len(tagFilter.authors) == 0 {
|
||||||
func (t *TagFilter) MatchWithAllowedTags(allowed []string, tag, author, severity string) (bool, error) {
|
return true
|
||||||
matchedAny := false
|
}
|
||||||
|
|
||||||
allowedMap := make(map[string]struct{})
|
for _, templateAuthor := range templateAuthors {
|
||||||
for _, tag := range allowed {
|
if _, ok := tagFilter.authors[templateAuthor]; ok {
|
||||||
for _, val := range splitCommaTrim(tag) {
|
return true
|
||||||
if _, ok := allowedMap[val]; !ok {
|
|
||||||
allowedMap[val] = struct{}{}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isTagMatch(templateTags []string, tagFilter *TagFilter) bool {
|
||||||
|
if len(tagFilter.allowedTags) == 0 {
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
if len(allowedMap) > 0 {
|
|
||||||
_, ok := allowedMap[tag]
|
for _, templateTag := range templateTags {
|
||||||
if ok {
|
if _, ok := tagFilter.allowedTags[templateTag]; ok {
|
||||||
matchedAny = true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_, ok := t.block[tag]
|
|
||||||
if ok && !matchedAny {
|
return false
|
||||||
if _, allowOk := t.matchAllows[tag]; allowOk {
|
}
|
||||||
return true, nil
|
|
||||||
}
|
// MatchWithWorkflowTags takes an addition list of allowed tags and returns true if the match was successful.
|
||||||
|
func (tagFilter *TagFilter) MatchWithWorkflowTags(templateTags, templateAuthors []string, templateSeverity severity.Severity, workflowTags []string) (bool, error) {
|
||||||
|
for _, templateTag := range templateTags {
|
||||||
|
_, blocked := tagFilter.block[templateTag]
|
||||||
|
_, allowed := tagFilter.matchAllows[templateTag]
|
||||||
|
|
||||||
|
if blocked && !allowed { // the whitelist has precedence over the blacklist
|
||||||
return false, ErrExcluded
|
return false, ErrExcluded
|
||||||
}
|
}
|
||||||
if len(t.authors) > 0 {
|
}
|
||||||
_, ok = t.authors[author]
|
|
||||||
if !ok {
|
templatesTagMap := toMap(templateTags)
|
||||||
|
for _, workflowTag := range workflowTags {
|
||||||
|
if _, ok := templatesTagMap[workflowTag]; !ok {
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
matchedAny = true
|
|
||||||
}
|
}
|
||||||
if len(t.severities) > 0 {
|
|
||||||
_, ok = t.severities[severity]
|
if len(tagFilter.authors) > 0 {
|
||||||
if !ok {
|
templateAuthorTagMap := toMap(templateAuthors)
|
||||||
|
for requiredAuthor := range tagFilter.authors {
|
||||||
|
if _, ok := templateAuthorTagMap[requiredAuthor]; !ok {
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
matchedAny = true
|
|
||||||
}
|
}
|
||||||
if len(allowedMap) == 0 && len(t.authors) == 0 && len(t.severities) == 0 {
|
}
|
||||||
|
|
||||||
|
if len(tagFilter.severities) > 0 {
|
||||||
|
if _, ok := tagFilter.severities[templateSeverity]; !ok {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
|
||||||
return matchedAny, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Tags []string
|
Tags []string
|
||||||
ExcludeTags []string
|
ExcludeTags []string
|
||||||
Authors []string
|
Authors []string
|
||||||
Severities []string
|
Severities severity.Severities
|
||||||
IncludeTags []string
|
IncludeTags []string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,7 +133,7 @@ func New(config *Config) *TagFilter {
|
|||||||
filter := &TagFilter{
|
filter := &TagFilter{
|
||||||
allowedTags: make(map[string]struct{}),
|
allowedTags: make(map[string]struct{}),
|
||||||
authors: make(map[string]struct{}),
|
authors: make(map[string]struct{}),
|
||||||
severities: make(map[string]struct{}),
|
severities: make(map[severity.Severity]struct{}),
|
||||||
block: make(map[string]struct{}),
|
block: make(map[string]struct{}),
|
||||||
matchAllows: make(map[string]struct{}),
|
matchAllows: make(map[string]struct{}),
|
||||||
}
|
}
|
||||||
@ -133,10 +145,8 @@ func New(config *Config) *TagFilter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, tag := range config.Severities {
|
for _, tag := range config.Severities {
|
||||||
for _, val := range splitCommaTrim(tag) {
|
if _, ok := filter.severities[tag]; !ok {
|
||||||
if _, ok := filter.severities[val]; !ok {
|
filter.severities[tag] = struct{}{}
|
||||||
filter.severities[val] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, tag := range config.Authors {
|
for _, tag := range config.Authors {
|
||||||
@ -165,6 +175,11 @@ func New(config *Config) *TagFilter {
|
|||||||
return filter
|
return filter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
TODO similar logic is used over and over again. It should be extracted and reused
|
||||||
|
Changing []string and string data types that hold string slices to StringSlice would be the preferred solution,
|
||||||
|
which implicitly does the normalization before any other calls starting to use it.
|
||||||
|
*/
|
||||||
func splitCommaTrim(value string) []string {
|
func splitCommaTrim(value string) []string {
|
||||||
if !strings.Contains(value, ",") {
|
if !strings.Contains(value, ",") {
|
||||||
return []string{strings.ToLower(value)}
|
return []string{strings.ToLower(value)}
|
||||||
@ -176,3 +191,13 @@ func splitCommaTrim(value string) []string {
|
|||||||
}
|
}
|
||||||
return final
|
return final
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func toMap(slice []string) map[string]struct{} {
|
||||||
|
result := make(map[string]struct{})
|
||||||
|
for _, value := range slice {
|
||||||
|
if _, ok := result[value]; !ok {
|
||||||
|
result[value] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|||||||
@ -4,6 +4,8 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTagBasedFilter(t *testing.T) {
|
func TestTagBasedFilter(t *testing.T) {
|
||||||
@ -13,11 +15,11 @@ func TestTagBasedFilter(t *testing.T) {
|
|||||||
filter := New(config)
|
filter := New(config)
|
||||||
|
|
||||||
t.Run("true", func(t *testing.T) {
|
t.Run("true", func(t *testing.T) {
|
||||||
matched, _ := filter.Match("jira", "pdteam", "low")
|
matched, _ := filter.Match([]string{"jira"}, []string{"pdteam"}, severity.Low)
|
||||||
require.True(t, matched, "could not get correct match")
|
require.True(t, matched, "could not get correct match")
|
||||||
})
|
})
|
||||||
t.Run("false", func(t *testing.T) {
|
t.Run("false", func(t *testing.T) {
|
||||||
matched, _ := filter.Match("consul", "pdteam", "low")
|
matched, _ := filter.Match([]string{"consul"}, []string{"pdteam"}, severity.Low)
|
||||||
require.False(t, matched, "could not get correct match")
|
require.False(t, matched, "could not get correct match")
|
||||||
})
|
})
|
||||||
t.Run("not-match-excludes", func(t *testing.T) {
|
t.Run("not-match-excludes", func(t *testing.T) {
|
||||||
@ -25,7 +27,7 @@ func TestTagBasedFilter(t *testing.T) {
|
|||||||
ExcludeTags: []string{"dos"},
|
ExcludeTags: []string{"dos"},
|
||||||
}
|
}
|
||||||
filter := New(config)
|
filter := New(config)
|
||||||
matched, err := filter.Match("dos", "pdteam", "low")
|
matched, err := filter.Match([]string{"dos"}, []string{"pdteam"}, severity.Low)
|
||||||
require.False(t, matched, "could not get correct match")
|
require.False(t, matched, "could not get correct match")
|
||||||
require.Equal(t, ErrExcluded, err, "could not get correct error")
|
require.Equal(t, ErrExcluded, err, "could not get correct error")
|
||||||
})
|
})
|
||||||
@ -36,7 +38,17 @@ func TestTagBasedFilter(t *testing.T) {
|
|||||||
IncludeTags: []string{"fuzz"},
|
IncludeTags: []string{"fuzz"},
|
||||||
}
|
}
|
||||||
filter := New(config)
|
filter := New(config)
|
||||||
matched, err := filter.Match("fuzz", "pdteam", "low")
|
matched, err := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low)
|
||||||
|
require.Nil(t, err, "could not get match")
|
||||||
|
require.True(t, matched, "could not get correct match")
|
||||||
|
})
|
||||||
|
t.Run("match-includes", func(t *testing.T) {
|
||||||
|
config := &Config{
|
||||||
|
Tags: []string{"fuzz"},
|
||||||
|
ExcludeTags: []string{"fuzz"},
|
||||||
|
}
|
||||||
|
filter := New(config)
|
||||||
|
matched, err := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low)
|
||||||
require.Nil(t, err, "could not get match")
|
require.Nil(t, err, "could not get match")
|
||||||
require.True(t, matched, "could not get correct match")
|
require.True(t, matched, "could not get correct match")
|
||||||
})
|
})
|
||||||
@ -45,15 +57,15 @@ func TestTagBasedFilter(t *testing.T) {
|
|||||||
Authors: []string{"pdteam"},
|
Authors: []string{"pdteam"},
|
||||||
}
|
}
|
||||||
filter := New(config)
|
filter := New(config)
|
||||||
matched, _ := filter.Match("fuzz", "pdteam", "low")
|
matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low)
|
||||||
require.True(t, matched, "could not get correct match")
|
require.True(t, matched, "could not get correct match")
|
||||||
})
|
})
|
||||||
t.Run("match-severity", func(t *testing.T) {
|
t.Run("match-severity", func(t *testing.T) {
|
||||||
config := &Config{
|
config := &Config{
|
||||||
Severities: []string{"high"},
|
Severities: severity.Severities{severity.High},
|
||||||
}
|
}
|
||||||
filter := New(config)
|
filter := New(config)
|
||||||
matched, _ := filter.Match("fuzz", "pdteam", "high")
|
matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.High)
|
||||||
require.True(t, matched, "could not get correct match")
|
require.True(t, matched, "could not get correct match")
|
||||||
})
|
})
|
||||||
t.Run("match-exclude-with-tags", func(t *testing.T) {
|
t.Run("match-exclude-with-tags", func(t *testing.T) {
|
||||||
@ -62,23 +74,23 @@ func TestTagBasedFilter(t *testing.T) {
|
|||||||
ExcludeTags: []string{"another"},
|
ExcludeTags: []string{"another"},
|
||||||
}
|
}
|
||||||
filter := New(config)
|
filter := New(config)
|
||||||
matched, _ := filter.Match("another", "pdteam", "high")
|
matched, _ := filter.Match([]string{"another"}, []string{"pdteam"}, severity.High)
|
||||||
require.False(t, matched, "could not get correct match")
|
require.False(t, matched, "could not get correct match")
|
||||||
})
|
})
|
||||||
t.Run("match-conditions", func(t *testing.T) {
|
t.Run("match-conditions", func(t *testing.T) {
|
||||||
config := &Config{
|
config := &Config{
|
||||||
Authors: []string{"pdteam"},
|
Authors: []string{"pdteam"},
|
||||||
Tags: []string{"jira"},
|
Tags: []string{"jira"},
|
||||||
Severities: []string{"high"},
|
Severities: severity.Severities{severity.High},
|
||||||
}
|
}
|
||||||
filter := New(config)
|
filter := New(config)
|
||||||
matched, _ := filter.Match("jira", "pdteam", "high")
|
matched, _ := filter.Match([]string{"jira"}, []string{"pdteam"}, severity.High)
|
||||||
require.True(t, matched, "could not get correct match")
|
require.True(t, matched, "could not get correct match")
|
||||||
matched, _ = filter.Match("jira", "pdteam", "low")
|
matched, _ = filter.Match([]string{"jira"}, []string{"pdteam"}, severity.Low)
|
||||||
require.False(t, matched, "could not get correct match")
|
require.False(t, matched, "could not get correct match")
|
||||||
matched, _ = filter.Match("jira", "random", "low")
|
matched, _ = filter.Match([]string{"jira"}, []string{"random"}, severity.Low)
|
||||||
require.False(t, matched, "could not get correct match")
|
require.False(t, matched, "could not get correct match")
|
||||||
matched, _ = filter.Match("consul", "random", "low")
|
matched, _ = filter.Match([]string{"consul"}, []string{"random"}, severity.Low)
|
||||||
require.False(t, matched, "could not get correct match")
|
require.False(t, matched, "could not get correct match")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,100 +0,0 @@
|
|||||||
package load
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter"
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
|
||||||
"gopkg.in/yaml.v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Load loads a template by parsing metadata and running
|
|
||||||
// all tag and path based filters on the template.
|
|
||||||
func Load(templatePath string, workflow bool, customTags []string, tagFilter *filter.TagFilter) (bool, error) {
|
|
||||||
f, err := os.Open(templatePath)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
data, err := ioutil.ReadAll(f)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
template := make(map[string]interface{})
|
|
||||||
err = yaml.NewDecoder(bytes.NewReader(data)).Decode(template)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
info, ok := template["info"]
|
|
||||||
if !ok {
|
|
||||||
return false, errors.New("no template info field provided")
|
|
||||||
}
|
|
||||||
infoMap, ok := info.(map[interface{}]interface{})
|
|
||||||
if !ok {
|
|
||||||
return false, errors.New("could not get info")
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, nameOk := infoMap["name"]; !nameOk {
|
|
||||||
return false, errors.New("no template name field provided")
|
|
||||||
}
|
|
||||||
author, ok := infoMap["author"]
|
|
||||||
if !ok {
|
|
||||||
return false, errors.New("no template author field provided")
|
|
||||||
}
|
|
||||||
severity, ok := infoMap["severity"]
|
|
||||||
if !ok {
|
|
||||||
severity = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
templateTags, ok := infoMap["tags"]
|
|
||||||
if !ok {
|
|
||||||
templateTags = ""
|
|
||||||
}
|
|
||||||
tagStr := types.ToString(templateTags)
|
|
||||||
|
|
||||||
tags := strings.Split(tagStr, ",")
|
|
||||||
severityStr := strings.ToLower(types.ToString(severity))
|
|
||||||
authors := strings.Split(types.ToString(author), ",")
|
|
||||||
|
|
||||||
matched := false
|
|
||||||
|
|
||||||
_, workflowsFound := template["workflows"]
|
|
||||||
if !workflowsFound && workflow {
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
if workflow {
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
for _, tag := range tags {
|
|
||||||
for _, author := range authors {
|
|
||||||
var match bool
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if len(customTags) == 0 {
|
|
||||||
match, err = tagFilter.Match(strings.ToLower(strings.TrimSpace(tag)), strings.ToLower(strings.TrimSpace(author)), severityStr)
|
|
||||||
} else {
|
|
||||||
match, err = tagFilter.MatchWithAllowedTags(customTags, strings.ToLower(strings.TrimSpace(tag)), strings.ToLower(strings.TrimSpace(author)), severityStr)
|
|
||||||
}
|
|
||||||
if err == filter.ErrExcluded {
|
|
||||||
return false, filter.ErrExcluded
|
|
||||||
}
|
|
||||||
if !matched && match {
|
|
||||||
matched = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !matched {
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
if workflowsFound && !workflow {
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
@ -4,9 +4,10 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/projectdiscovery/gologger"
|
"github.com/projectdiscovery/gologger"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/load"
|
"github.com/projectdiscovery/nuclei/v2/pkg/parsers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
||||||
)
|
)
|
||||||
@ -21,7 +22,7 @@ type Config struct {
|
|||||||
Tags []string
|
Tags []string
|
||||||
ExcludeTags []string
|
ExcludeTags []string
|
||||||
Authors []string
|
Authors []string
|
||||||
Severities []string
|
Severities severity.Severities
|
||||||
IncludeTags []string
|
IncludeTags []string
|
||||||
|
|
||||||
Catalog *catalog.Catalog
|
Catalog *catalog.Catalog
|
||||||
@ -176,7 +177,7 @@ func (s *Store) LoadTemplates(templatesList []string) []*templates.Template {
|
|||||||
|
|
||||||
// LoadWorkflows takes a list of workflows and returns paths for them
|
// LoadWorkflows takes a list of workflows and returns paths for them
|
||||||
func (s *Store) LoadWorkflows(workflowsList []string) []*templates.Template {
|
func (s *Store) LoadWorkflows(workflowsList []string) []*templates.Template {
|
||||||
includedWorkflows := s.config.Catalog.GetTemplatesPath(s.config.Workflows)
|
includedWorkflows := s.config.Catalog.GetTemplatesPath(workflowsList)
|
||||||
workflowsMap := s.pathFilter.Match(includedWorkflows)
|
workflowsMap := s.pathFilter.Match(includedWorkflows)
|
||||||
|
|
||||||
loadedWorkflows := make([]*templates.Template, 0, len(workflowsMap))
|
loadedWorkflows := make([]*templates.Template, 0, len(workflowsMap))
|
||||||
@ -197,6 +198,6 @@ func (s *Store) LoadWorkflows(workflowsList []string) []*templates.Template {
|
|||||||
return loadedWorkflows
|
return loadedWorkflows
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Store) loadTemplate(templatePath string, workflow bool) (bool, error) {
|
func (s *Store) loadTemplate(templatePath string, isWorkflow bool) (bool, error) {
|
||||||
return load.Load(templatePath, workflow, nil, s.tagFilter)
|
return parsers.Load(templatePath, isWorkflow, nil, s.tagFilter) // TODO consider separating template and workflow loading logic
|
||||||
}
|
}
|
||||||
|
|||||||
90
v2/pkg/model/model.go
Normal file
90
v2/pkg/model/model.go
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
package model
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Info struct {
|
||||||
|
Name string `json:"name" yaml:"name"`
|
||||||
|
Authors StringSlice `json:"author" yaml:"author"`
|
||||||
|
Tags StringSlice `json:"tags" yaml:"tags"`
|
||||||
|
Description string `json:"description" yaml:"description"`
|
||||||
|
Reference StringSlice `json:"reference" yaml:"reference"`
|
||||||
|
SeverityHolder severity.SeverityHolder `json:"severity" yaml:"severity"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringSlice represents a single (in-lined) or multiple string value(s).
|
||||||
|
// The unmarshaller does not automatically convert in-lined strings to []string, hence the interface{} type is required.
|
||||||
|
type StringSlice struct {
|
||||||
|
Value interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stringSlice *StringSlice) IsEmpty() bool {
|
||||||
|
return len(stringSlice.ToSlice()) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stringSlice StringSlice) ToSlice() []string {
|
||||||
|
switch value := stringSlice.Value.(type) {
|
||||||
|
case string:
|
||||||
|
return []string{value}
|
||||||
|
case []string:
|
||||||
|
return value
|
||||||
|
case nil:
|
||||||
|
return []string{}
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("Unexpected StringSlice type: '%T'", value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stringSlice *StringSlice) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
|
marshalledSlice, err := marshalStringToSlice(unmarshal)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]string, len(marshalledSlice))
|
||||||
|
//nolint:gosimple,nolintlint //cannot be replaced with result = append(result, slices...) because the values are being normalized
|
||||||
|
for _, value := range marshalledSlice {
|
||||||
|
result = append(result, strings.ToLower(strings.TrimSpace(value))) // TODO do we need to introduce RawStringSlice and/or NormalizedStringSlices?
|
||||||
|
}
|
||||||
|
stringSlice.Value = result
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalStringToSlice(unmarshal func(interface{}) error) ([]string, error) {
|
||||||
|
var marshalledValueAsString string
|
||||||
|
var marshalledValuesAsSlice []string
|
||||||
|
|
||||||
|
sliceMarshalError := unmarshal(&marshalledValuesAsSlice)
|
||||||
|
if sliceMarshalError != nil {
|
||||||
|
stringMarshalError := unmarshal(&marshalledValueAsString)
|
||||||
|
if stringMarshalError != nil {
|
||||||
|
return nil, stringMarshalError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var result []string
|
||||||
|
if len(marshalledValuesAsSlice) > 0 {
|
||||||
|
result = marshalledValuesAsSlice
|
||||||
|
} else if utils.IsNotBlank(marshalledValueAsString) {
|
||||||
|
result = strings.Split(marshalledValueAsString, ",")
|
||||||
|
} else {
|
||||||
|
result = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stringSlice StringSlice) MarshalYAML() (interface{}, error) {
|
||||||
|
return yaml.Marshal(stringSlice.Value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stringSlice StringSlice) MarshalJSON() ([]byte, error) {
|
||||||
|
return json.Marshal(stringSlice.Value)
|
||||||
|
}
|
||||||
26
v2/pkg/model/model_test.go
Normal file
26
v2/pkg/model/model_test.go
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
package model
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestInfoJsonMarshal(t *testing.T) {
|
||||||
|
info := Info{
|
||||||
|
Name: "Test Template Name",
|
||||||
|
Authors: StringSlice{[]string{"forgedhallpass", "ice3man"}},
|
||||||
|
Description: "Test description",
|
||||||
|
SeverityHolder: severity.SeverityHolder{Severity: severity.High},
|
||||||
|
Tags: StringSlice{[]string{"cve", "misc"}},
|
||||||
|
Reference: StringSlice{"reference1"},
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := json.Marshal(&info)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
expected := `{"name":"Test Template Name","author":["forgedhallpass","ice3man"],"tags":["cve","misc"],"description":"Test description","reference":"reference1","severity":"high"}`
|
||||||
|
assert.Equal(t, expected, string(result))
|
||||||
|
}
|
||||||
10
v2/pkg/model/worflow_loader.go
Normal file
10
v2/pkg/model/worflow_loader.go
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
package model
|
||||||
|
|
||||||
|
// WorkflowLoader is a loader interface required for workflow initialization.
|
||||||
|
type WorkflowLoader interface {
|
||||||
|
// ListTags lists a list of templates for tags from the provided templates directory
|
||||||
|
ListTags(workflowTags []string) []string
|
||||||
|
|
||||||
|
// ListTemplates takes a list of templates and returns paths for them
|
||||||
|
ListTemplates(templatesList []string, noValidate bool) []string
|
||||||
|
}
|
||||||
@ -52,7 +52,7 @@ var functions = map[string]govaluate.ExpressionFunction{
|
|||||||
return compiled.ReplaceAllString(types.ToString(args[0]), types.ToString(args[2])), nil
|
return compiled.ReplaceAllString(types.ToString(args[0]), types.ToString(args[2])), nil
|
||||||
},
|
},
|
||||||
"trim": func(args ...interface{}) (interface{}, error) {
|
"trim": func(args ...interface{}) (interface{}, error) {
|
||||||
return strings.Trim(types.ToString(args[0]), types.ToString(args[2])), nil
|
return strings.Trim(types.ToString(args[0]), types.ToString(args[1])), nil
|
||||||
},
|
},
|
||||||
"trimleft": func(args ...interface{}) (interface{}, error) {
|
"trimleft": func(args ...interface{}) (interface{}, error) {
|
||||||
return strings.TrimLeft(types.ToString(args[0]), types.ToString(args[1])), nil
|
return strings.TrimLeft(types.ToString(args[0]), types.ToString(args[1])), nil
|
||||||
@ -162,7 +162,7 @@ var functions = map[string]govaluate.ExpressionFunction{
|
|||||||
base := letters + numbers
|
base := letters + numbers
|
||||||
|
|
||||||
if len(args) >= 1 {
|
if len(args) >= 1 {
|
||||||
l = args[0].(int)
|
l = int(args[0].(float64))
|
||||||
}
|
}
|
||||||
if len(args) >= withCutSetArgsSize {
|
if len(args) >= withCutSetArgsSize {
|
||||||
bad = types.ToString(args[1])
|
bad = types.ToString(args[1])
|
||||||
@ -179,7 +179,7 @@ var functions = map[string]govaluate.ExpressionFunction{
|
|||||||
chars := letters + numbers
|
chars := letters + numbers
|
||||||
|
|
||||||
if len(args) >= 1 {
|
if len(args) >= 1 {
|
||||||
l = args[0].(int)
|
l = int(args[0].(float64))
|
||||||
}
|
}
|
||||||
if len(args) >= withCutSetArgsSize {
|
if len(args) >= withCutSetArgsSize {
|
||||||
bad = types.ToString(args[1])
|
bad = types.ToString(args[1])
|
||||||
@ -193,7 +193,7 @@ var functions = map[string]govaluate.ExpressionFunction{
|
|||||||
chars := letters
|
chars := letters
|
||||||
|
|
||||||
if len(args) >= 1 {
|
if len(args) >= 1 {
|
||||||
l = args[0].(int)
|
l = int(args[0].(float64))
|
||||||
}
|
}
|
||||||
if len(args) >= withCutSetArgsSize {
|
if len(args) >= withCutSetArgsSize {
|
||||||
bad = types.ToString(args[1])
|
bad = types.ToString(args[1])
|
||||||
@ -207,7 +207,7 @@ var functions = map[string]govaluate.ExpressionFunction{
|
|||||||
chars := numbers
|
chars := numbers
|
||||||
|
|
||||||
if len(args) >= 1 {
|
if len(args) >= 1 {
|
||||||
l = args[0].(int)
|
l = int(args[0].(float64))
|
||||||
}
|
}
|
||||||
if len(args) >= withCutSetArgsSize {
|
if len(args) >= withCutSetArgsSize {
|
||||||
bad = types.ToString(args[1])
|
bad = types.ToString(args[1])
|
||||||
@ -220,10 +220,10 @@ var functions = map[string]govaluate.ExpressionFunction{
|
|||||||
max := math.MaxInt32
|
max := math.MaxInt32
|
||||||
|
|
||||||
if len(args) >= 1 {
|
if len(args) >= 1 {
|
||||||
min = args[0].(int)
|
min = int(args[0].(float64))
|
||||||
}
|
}
|
||||||
if len(args) >= withMaxRandArgsSize {
|
if len(args) >= withMaxRandArgsSize {
|
||||||
max = args[1].(int)
|
max = int(args[1].(float64))
|
||||||
}
|
}
|
||||||
return rand.Intn(max-min) + min, nil
|
return rand.Intn(max-min) + min, nil
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,8 +1,12 @@
|
|||||||
package extractors
|
package extractors
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/antchfx/htmlquery"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -45,6 +49,35 @@ func (e *Extractor) ExtractKval(data map[string]interface{}) map[string]struct{}
|
|||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ExtractHTML extracts items from text using XPath selectors
|
||||||
|
func (e *Extractor) ExtractHTML(corpus string) map[string]struct{} {
|
||||||
|
results := make(map[string]struct{})
|
||||||
|
|
||||||
|
doc, err := htmlquery.Parse(strings.NewReader(corpus))
|
||||||
|
if err != nil {
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
for _, k := range e.XPath {
|
||||||
|
nodes, err := htmlquery.QueryAll(doc, k)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, node := range nodes {
|
||||||
|
var value string
|
||||||
|
|
||||||
|
if e.Attribute != "" {
|
||||||
|
value = htmlquery.SelectAttr(node, e.Attribute)
|
||||||
|
} else {
|
||||||
|
value = htmlquery.InnerText(node)
|
||||||
|
}
|
||||||
|
if _, ok := results[value]; !ok {
|
||||||
|
results[value] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
// ExtractJSON extracts text from a corpus using JQ queries and returns it
|
// ExtractJSON extracts text from a corpus using JQ queries and returns it
|
||||||
func (e *Extractor) ExtractJSON(corpus string) map[string]struct{} {
|
func (e *Extractor) ExtractJSON(corpus string) map[string]struct{} {
|
||||||
results := make(map[string]struct{})
|
results := make(map[string]struct{})
|
||||||
|
|||||||
@ -79,6 +79,22 @@ type Extractor struct {
|
|||||||
// - value: >
|
// - value: >
|
||||||
// []string{".batters | .batter | .[] | .id"}
|
// []string{".batters | .batter | .[] | .id"}
|
||||||
JSON []string `yaml:"json,omitempty"`
|
JSON []string `yaml:"json,omitempty"`
|
||||||
|
// description: |
|
||||||
|
// XPath allows using xpath expressions to extract items from html response
|
||||||
|
//
|
||||||
|
// examples:
|
||||||
|
// - value: >
|
||||||
|
// []string{"/html/body/div/p[2]/a"}
|
||||||
|
// - value: >
|
||||||
|
// []string{".batters | .batter | .[] | .id"}
|
||||||
|
XPath []string `yaml:"xpath,omitempty"`
|
||||||
|
// description: |
|
||||||
|
// Attribute is an optional attribute to extract from response XPath.
|
||||||
|
//
|
||||||
|
// examples:
|
||||||
|
// - value: "\"href\""
|
||||||
|
Attribute string `yaml:"attribute,omitempty"`
|
||||||
|
|
||||||
// jsonCompiled is the compiled variant
|
// jsonCompiled is the compiled variant
|
||||||
jsonCompiled []*gojq.Code
|
jsonCompiled []*gojq.Code
|
||||||
|
|
||||||
@ -96,6 +112,8 @@ const (
|
|||||||
RegexExtractor ExtractorType = iota + 1
|
RegexExtractor ExtractorType = iota + 1
|
||||||
// KValExtractor extracts responses with key:value
|
// KValExtractor extracts responses with key:value
|
||||||
KValExtractor
|
KValExtractor
|
||||||
|
// XPathExtractor extracts responses with Xpath selectors
|
||||||
|
XPathExtractor
|
||||||
// JSONExtractor extracts responses with json
|
// JSONExtractor extracts responses with json
|
||||||
JSONExtractor
|
JSONExtractor
|
||||||
)
|
)
|
||||||
@ -104,6 +122,7 @@ const (
|
|||||||
var ExtractorTypes = map[string]ExtractorType{
|
var ExtractorTypes = map[string]ExtractorType{
|
||||||
"regex": RegexExtractor,
|
"regex": RegexExtractor,
|
||||||
"kval": KValExtractor,
|
"kval": KValExtractor,
|
||||||
|
"xpath": XPathExtractor,
|
||||||
"json": JSONExtractor,
|
"json": JSONExtractor,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package operators
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -31,7 +31,7 @@ func (w *StandardWriter) formatScreen(output *ResultEvent) []byte {
|
|||||||
builder.WriteString("] ")
|
builder.WriteString("] ")
|
||||||
|
|
||||||
builder.WriteString("[")
|
builder.WriteString("[")
|
||||||
builder.WriteString(w.severityColors.Data[types.ToString(output.Info["severity"])])
|
builder.WriteString(w.severityColors(output.Info.SeverityHolder.Severity))
|
||||||
builder.WriteString("] ")
|
builder.WriteString("] ")
|
||||||
}
|
}
|
||||||
builder.WriteString(output.Matched)
|
builder.WriteString(output.Matched)
|
||||||
|
|||||||
@ -6,11 +6,15 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
"github.com/logrusorgru/aurora"
|
"github.com/logrusorgru/aurora"
|
||||||
"github.com/pkg/errors"
|
|
||||||
"github.com/projectdiscovery/interactsh/pkg/server"
|
"github.com/projectdiscovery/interactsh/pkg/server"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/colorizer"
|
"github.com/projectdiscovery/nuclei/v2/internal/colorizer"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -35,7 +39,7 @@ type StandardWriter struct {
|
|||||||
outputMutex *sync.Mutex
|
outputMutex *sync.Mutex
|
||||||
traceFile *fileWriter
|
traceFile *fileWriter
|
||||||
traceMutex *sync.Mutex
|
traceMutex *sync.Mutex
|
||||||
severityColors *colorizer.Colorizer
|
severityColors func(severity.Severity) string
|
||||||
}
|
}
|
||||||
|
|
||||||
var decolorizerRegex = regexp.MustCompile(`\x1B\[[0-9;]*[a-zA-Z]`)
|
var decolorizerRegex = regexp.MustCompile(`\x1B\[[0-9;]*[a-zA-Z]`)
|
||||||
@ -57,7 +61,7 @@ type ResultEvent struct {
|
|||||||
// TemplatePath is the path of template
|
// TemplatePath is the path of template
|
||||||
TemplatePath string `json:"-"`
|
TemplatePath string `json:"-"`
|
||||||
// Info contains information block of the template for the result.
|
// Info contains information block of the template for the result.
|
||||||
Info map[string]interface{} `json:"info,inline"`
|
Info model.Info `json:"info,inline"`
|
||||||
// MatcherName is the name of the matcher matched if any.
|
// MatcherName is the name of the matcher matched if any.
|
||||||
MatcherName string `json:"matcher_name,omitempty"`
|
MatcherName string `json:"matcher_name,omitempty"`
|
||||||
// ExtractorName is the name of the extractor matched if any.
|
// ExtractorName is the name of the extractor matched if any.
|
||||||
|
|||||||
97
v2/pkg/parsers/parser.go
Normal file
97
v2/pkg/parsers/parser.go
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
package parsers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
const mandatoryFieldMissingTemplate = "mandatory '%s' field is missing"
|
||||||
|
|
||||||
|
// Load loads a template by parsing metadata and running all tag and path based filters on the template.
|
||||||
|
func Load(templatePath string, isWorkflow bool, workflowTags []string, tagFilter *filter.TagFilter) (bool, error) {
|
||||||
|
template, templateParseError := parseTemplate(templatePath)
|
||||||
|
if templateParseError != nil {
|
||||||
|
return false, templateParseError
|
||||||
|
}
|
||||||
|
|
||||||
|
templateInfo := template.Info
|
||||||
|
if validationError := validateMandatoryInfoFields(&templateInfo); validationError != nil {
|
||||||
|
return false, validationError
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(template.Workflows) > 0 {
|
||||||
|
if isWorkflow {
|
||||||
|
return true, nil // if a workflow is declared and this template is a workflow, then load
|
||||||
|
} else { //nolint:indent-error-flow,revive // preferred: readability and extensibility
|
||||||
|
return false, nil // if a workflow is declared and this template is not a workflow then do not load
|
||||||
|
}
|
||||||
|
} else if isWorkflow {
|
||||||
|
return false, nil // if no workflows are declared and this template is a workflow then do not load
|
||||||
|
} else { // if workflows are not declared and the template is not a workflow then parse it
|
||||||
|
return isInfoMetadataMatch(tagFilter, &templateInfo, workflowTags)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isInfoMetadataMatch(tagFilter *filter.TagFilter, templateInfo *model.Info, workflowTags []string) (bool, error) {
|
||||||
|
templateTags := templateInfo.Tags.ToSlice()
|
||||||
|
templateAuthors := templateInfo.Authors.ToSlice()
|
||||||
|
templateSeverity := templateInfo.SeverityHolder.Severity
|
||||||
|
|
||||||
|
var match bool
|
||||||
|
var err error
|
||||||
|
if len(workflowTags) == 0 {
|
||||||
|
match, err = tagFilter.Match(templateTags, templateAuthors, templateSeverity)
|
||||||
|
} else {
|
||||||
|
match, err = tagFilter.MatchWithWorkflowTags(templateTags, templateAuthors, templateSeverity, workflowTags)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == filter.ErrExcluded {
|
||||||
|
return false, filter.ErrExcluded
|
||||||
|
}
|
||||||
|
|
||||||
|
return match, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateMandatoryInfoFields(info *model.Info) error {
|
||||||
|
if info == nil {
|
||||||
|
return fmt.Errorf(mandatoryFieldMissingTemplate, "info")
|
||||||
|
}
|
||||||
|
|
||||||
|
if utils.IsBlank(info.Name) {
|
||||||
|
return fmt.Errorf(mandatoryFieldMissingTemplate, "name")
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.Authors.IsEmpty() {
|
||||||
|
return fmt.Errorf(mandatoryFieldMissingTemplate, "author")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseTemplate(templatePath string) (*templates.Template, error) {
|
||||||
|
f, err := os.Open(templatePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
data, err := ioutil.ReadAll(f)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
template := &templates.Template{}
|
||||||
|
err = yaml.NewDecoder(bytes.NewReader(data)).Decode(template)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return template, nil
|
||||||
|
}
|
||||||
@ -1,21 +1,12 @@
|
|||||||
package compile
|
package parsers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/projectdiscovery/gologger"
|
"github.com/projectdiscovery/gologger"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/load"
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
)
|
)
|
||||||
|
|
||||||
// WorkflowLoader is a loader interface required for workflow
|
|
||||||
// initialization.
|
|
||||||
type WorkflowLoader interface {
|
|
||||||
// ListTags lists a list of templates for tags from the provided templates directory
|
|
||||||
ListTags(tags []string) []string
|
|
||||||
// ListTemplates takes a list of templates and returns paths for them
|
|
||||||
ListTemplates(templatesList []string, noValidate bool) []string
|
|
||||||
}
|
|
||||||
|
|
||||||
type workflowLoader struct {
|
type workflowLoader struct {
|
||||||
pathFilter *filter.PathFilter
|
pathFilter *filter.PathFilter
|
||||||
tagFilter *filter.TagFilter
|
tagFilter *filter.TagFilter
|
||||||
@ -23,12 +14,12 @@ type workflowLoader struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewLoader returns a new workflow loader structure
|
// NewLoader returns a new workflow loader structure
|
||||||
func NewLoader(options *protocols.ExecuterOptions) (WorkflowLoader, error) {
|
func NewLoader(options *protocols.ExecuterOptions) (model.WorkflowLoader, error) {
|
||||||
tagFilter := filter.New(&filter.Config{
|
tagFilter := filter.New(&filter.Config{
|
||||||
Tags: options.Options.Tags,
|
Tags: options.Options.Tags,
|
||||||
ExcludeTags: options.Options.ExcludeTags,
|
ExcludeTags: options.Options.ExcludeTags,
|
||||||
Authors: options.Options.Author,
|
Authors: options.Options.Author,
|
||||||
Severities: options.Options.Severity,
|
Severities: options.Options.Severities,
|
||||||
IncludeTags: options.Options.IncludeTags,
|
IncludeTags: options.Options.IncludeTags,
|
||||||
})
|
})
|
||||||
pathFilter := filter.NewPathFilter(&filter.PathFilterConfig{
|
pathFilter := filter.NewPathFilter(&filter.PathFilterConfig{
|
||||||
@ -39,13 +30,13 @@ func NewLoader(options *protocols.ExecuterOptions) (WorkflowLoader, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ListTags lists a list of templates for tags from the provided templates directory
|
// ListTags lists a list of templates for tags from the provided templates directory
|
||||||
func (w *workflowLoader) ListTags(tags []string) []string {
|
func (w *workflowLoader) ListTags(workflowTags []string) []string {
|
||||||
includedTemplates := w.options.Catalog.GetTemplatesPath([]string{w.options.Options.TemplatesDirectory})
|
includedTemplates := w.options.Catalog.GetTemplatesPath([]string{w.options.Options.TemplatesDirectory})
|
||||||
templatesMap := w.pathFilter.Match(includedTemplates)
|
templatesMap := w.pathFilter.Match(includedTemplates)
|
||||||
|
|
||||||
loadedTemplates := make([]string, 0, len(templatesMap))
|
loadedTemplates := make([]string, 0, len(templatesMap))
|
||||||
for k := range templatesMap {
|
for k := range templatesMap {
|
||||||
loaded, err := load.Load(k, false, tags, w.tagFilter)
|
loaded, err := Load(k, false, workflowTags, w.tagFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
gologger.Warning().Msgf("Could not load template %s: %s\n", k, err)
|
gologger.Warning().Msgf("Could not load template %s: %s\n", k, err)
|
||||||
} else if loaded {
|
} else if loaded {
|
||||||
@ -62,7 +53,7 @@ func (w *workflowLoader) ListTemplates(templatesList []string, noValidate bool)
|
|||||||
|
|
||||||
loadedTemplates := make([]string, 0, len(templatesMap))
|
loadedTemplates := make([]string, 0, len(templatesMap))
|
||||||
for k := range templatesMap {
|
for k := range templatesMap {
|
||||||
matched, err := load.Load(k, false, nil, w.tagFilter)
|
matched, err := Load(k, false, nil, w.tagFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
gologger.Warning().Msgf("Could not load template %s: %s\n", k, err)
|
gologger.Warning().Msgf("Could not load template %s: %s\n", k, err)
|
||||||
} else if matched || noValidate {
|
} else if matched || noValidate {
|
||||||
@ -2,6 +2,7 @@ package clusterer
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/projectdiscovery/gologger"
|
"github.com/projectdiscovery/gologger"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
@ -23,7 +24,7 @@ type Executer struct {
|
|||||||
type clusteredOperator struct {
|
type clusteredOperator struct {
|
||||||
templateID string
|
templateID string
|
||||||
templatePath string
|
templatePath string
|
||||||
templateInfo map[string]interface{}
|
templateInfo model.Info
|
||||||
operator *operators.Operators
|
operator *operators.Operators
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,6 +87,9 @@ func (e *Executer) Execute(input string) (bool, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
if err != nil && e.options.HostErrorsCache != nil && e.options.HostErrorsCache.CheckError(err) {
|
||||||
|
e.options.HostErrorsCache.MarkFailed(input)
|
||||||
|
}
|
||||||
return results, err
|
return results, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,5 +109,8 @@ func (e *Executer) ExecuteWithResults(input string, callback protocols.OutputEve
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
if err != nil && e.options.HostErrorsCache != nil && e.options.HostErrorsCache.CheckError(err) {
|
||||||
|
e.options.HostErrorsCache.MarkFailed(input)
|
||||||
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -77,6 +77,11 @@ func (e *Executer) Execute(input string) (bool, error) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if e.options.HostErrorsCache != nil {
|
||||||
|
if e.options.HostErrorsCache.CheckError(err) {
|
||||||
|
e.options.HostErrorsCache.MarkFailed(input)
|
||||||
|
}
|
||||||
|
}
|
||||||
gologger.Warning().Msgf("[%s] Could not execute request for %s: %s\n", e.options.TemplateID, input, err)
|
gologger.Warning().Msgf("[%s] Could not execute request for %s: %s\n", e.options.TemplateID, input, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -109,6 +114,11 @@ func (e *Executer) ExecuteWithResults(input string, callback protocols.OutputEve
|
|||||||
callback(event)
|
callback(event)
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if e.options.HostErrorsCache != nil {
|
||||||
|
if e.options.HostErrorsCache.CheckError(err) {
|
||||||
|
e.options.HostErrorsCache.MarkFailed(input)
|
||||||
|
}
|
||||||
|
}
|
||||||
gologger.Warning().Msgf("[%s] Could not execute request for %s: %s\n", e.options.TemplateID, input, err)
|
gologger.Warning().Msgf("[%s] Could not execute request for %s: %s\n", e.options.TemplateID, input, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
28
v2/pkg/protocols/common/generators/env.go
Normal file
28
v2/pkg/protocols/common/generators/env.go
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
package generators
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/stringsutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
var envVars map[string]interface{}
|
||||||
|
|
||||||
|
func parseEnvVars() map[string]interface{} {
|
||||||
|
sliceEnvVars := os.Environ()
|
||||||
|
parsedEnvVars := make(map[string]interface{}, len(sliceEnvVars))
|
||||||
|
for _, envVar := range sliceEnvVars {
|
||||||
|
key, val := stringsutil.Before(envVar, "="), stringsutil.After(envVar, "=")
|
||||||
|
parsedEnvVars[key] = val
|
||||||
|
}
|
||||||
|
return parsedEnvVars
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnvVars returns a map with all environment variables into a map
|
||||||
|
func EnvVars() map[string]interface{} {
|
||||||
|
if envVars == nil {
|
||||||
|
envVars = parseEnvVars()
|
||||||
|
}
|
||||||
|
|
||||||
|
return envVars
|
||||||
|
}
|
||||||
@ -49,10 +49,10 @@ func New(payloads map[string]interface{}, payloadType Type, templatePath string)
|
|||||||
if payloadType == PitchFork {
|
if payloadType == PitchFork {
|
||||||
var totalLength int
|
var totalLength int
|
||||||
for v := range compiled {
|
for v := range compiled {
|
||||||
if totalLength != 0 && totalLength != len(v) {
|
if totalLength != 0 && totalLength != len(compiled[v]) {
|
||||||
return nil, errors.New("pitchfork payloads must be of equal number")
|
return nil, errors.New("pitchfork payloads must be of equal number")
|
||||||
}
|
}
|
||||||
totalLength = len(v)
|
totalLength = len(compiled[v])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return generator, nil
|
return generator, nil
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
package generators
|
package generators
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
// MergeMaps merges two maps into a new map
|
// MergeMaps merges two maps into a new map
|
||||||
func MergeMaps(m1, m2 map[string]interface{}) map[string]interface{} {
|
func MergeMaps(m1, m2 map[string]interface{}) map[string]interface{} {
|
||||||
|
|||||||
16
v2/pkg/protocols/common/generators/slice.go
Normal file
16
v2/pkg/protocols/common/generators/slice.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
package generators
|
||||||
|
|
||||||
|
import "github.com/projectdiscovery/stringsutil"
|
||||||
|
|
||||||
|
// SliceToMap converts a slice of strings to map of string splitting each item at sep as "key sep value"
|
||||||
|
func SliceToMap(s []string, sep string) map[string]interface{} {
|
||||||
|
m := make(map[string]interface{})
|
||||||
|
for _, sliceItem := range s {
|
||||||
|
key := stringsutil.Before(sliceItem, sep)
|
||||||
|
value := stringsutil.After(sliceItem, sep)
|
||||||
|
if key != "" {
|
||||||
|
m[key] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
126
v2/pkg/protocols/common/hosterrorscache/hosterrorscache.go
Normal file
126
v2/pkg/protocols/common/hosterrorscache/hosterrorscache.go
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
package hosterrorscache
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
"net/url"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bluele/gcache"
|
||||||
|
"github.com/projectdiscovery/gologger"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Cache is a cache for host based errors. It allows skipping
|
||||||
|
// certain hosts based on an error threshold.
|
||||||
|
//
|
||||||
|
// It uses an LRU cache internally for skipping unresponsive hosts
|
||||||
|
// that remain so for a duration.
|
||||||
|
type Cache struct {
|
||||||
|
hostMaxErrors int
|
||||||
|
verbose bool
|
||||||
|
failedTargets gcache.Cache
|
||||||
|
}
|
||||||
|
|
||||||
|
const DefaultMaxHostsCount = 10000
|
||||||
|
|
||||||
|
// New returns a new host max errors cache
|
||||||
|
func New(hostMaxErrors, maxHostsCount int) *Cache {
|
||||||
|
gc := gcache.New(maxHostsCount).
|
||||||
|
ARC().
|
||||||
|
Build()
|
||||||
|
return &Cache{failedTargets: gc, hostMaxErrors: hostMaxErrors}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetVerbose sets the cache to log at verbose level
|
||||||
|
func (c *Cache) SetVerbose(verbose bool) *Cache {
|
||||||
|
c.verbose = verbose
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close closes the host errors cache
|
||||||
|
func (c *Cache) Close() {
|
||||||
|
c.failedTargets.Purge()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Cache) normalizeCacheValue(value string) string {
|
||||||
|
finalValue := value
|
||||||
|
if strings.HasPrefix(value, "http") {
|
||||||
|
if parsed, err := url.Parse(value); err == nil {
|
||||||
|
|
||||||
|
hostname := parsed.Host
|
||||||
|
finalPort := parsed.Port()
|
||||||
|
if finalPort == "" {
|
||||||
|
if parsed.Scheme == "https" {
|
||||||
|
finalPort = "443"
|
||||||
|
} else {
|
||||||
|
finalPort = "80"
|
||||||
|
}
|
||||||
|
hostname = net.JoinHostPort(parsed.Host, finalPort)
|
||||||
|
}
|
||||||
|
finalValue = hostname
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return finalValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrUnresponsiveHost is returned when a host is unresponsive
|
||||||
|
//var ErrUnresponsiveHost = errors.New("skipping as host is unresponsive")
|
||||||
|
|
||||||
|
// Check returns true if a host should be skipped as it has been
|
||||||
|
// unresponsive for a certain number of times.
|
||||||
|
//
|
||||||
|
// The value can be many formats -
|
||||||
|
// - URL: https?:// type
|
||||||
|
// - Host:port type
|
||||||
|
// - host type
|
||||||
|
func (c *Cache) Check(value string) bool {
|
||||||
|
finalValue := c.normalizeCacheValue(value)
|
||||||
|
if !c.failedTargets.Has(finalValue) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
numberOfErrors, err := c.failedTargets.GetIFPresent(finalValue)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
numberOfErrorsValue := numberOfErrors.(int)
|
||||||
|
|
||||||
|
if numberOfErrors == -1 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if numberOfErrorsValue >= c.hostMaxErrors {
|
||||||
|
_ = c.failedTargets.Set(finalValue, -1)
|
||||||
|
if c.verbose {
|
||||||
|
gologger.Verbose().Msgf("Skipping %s as previously unresponsive %d times", finalValue, numberOfErrorsValue)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarkFailed marks a host as failed previously
|
||||||
|
func (c *Cache) MarkFailed(value string) {
|
||||||
|
finalValue := c.normalizeCacheValue(value)
|
||||||
|
if !c.failedTargets.Has(finalValue) {
|
||||||
|
_ = c.failedTargets.Set(finalValue, 1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
numberOfErrors, err := c.failedTargets.GetIFPresent(finalValue)
|
||||||
|
if err != nil || numberOfErrors == nil {
|
||||||
|
_ = c.failedTargets.Set(finalValue, 1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
numberOfErrorsValue := numberOfErrors.(int)
|
||||||
|
|
||||||
|
_ = c.failedTargets.Set(finalValue, numberOfErrorsValue+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
var checkErrorRegexp = regexp.MustCompile(`(no address found for host|Client\.Timeout exceeded while awaiting headers|could not resolve host)`)
|
||||||
|
|
||||||
|
// CheckError checks if an error represents a type that should be
|
||||||
|
// added to the host skipping table.
|
||||||
|
func (c *Cache) CheckError(err error) bool {
|
||||||
|
errString := err.Error()
|
||||||
|
return checkErrorRegexp.MatchString(errString)
|
||||||
|
}
|
||||||
@ -0,0 +1,30 @@
|
|||||||
|
package hosterrorscache
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCacheCheckMarkFailed(t *testing.T) {
|
||||||
|
cache := New(3, DefaultMaxHostsCount)
|
||||||
|
|
||||||
|
cache.MarkFailed("http://example.com:80")
|
||||||
|
if value, err := cache.failedTargets.Get("http://example.com:80"); err == nil && value != nil {
|
||||||
|
require.Equal(t, 1, value, "could not get correct markfailed")
|
||||||
|
}
|
||||||
|
cache.MarkFailed("example.com:80")
|
||||||
|
if value, err := cache.failedTargets.Get("example.com:80"); err == nil && value != nil {
|
||||||
|
require.Equal(t, 2, value, "could not get correct markfailed")
|
||||||
|
}
|
||||||
|
cache.MarkFailed("example.com")
|
||||||
|
if value, err := cache.failedTargets.Get("example.com"); err == nil && value != nil {
|
||||||
|
require.Equal(t, 1, value, "could not get correct markfailed")
|
||||||
|
}
|
||||||
|
for i := 0; i < 3; i++ {
|
||||||
|
cache.MarkFailed("test")
|
||||||
|
}
|
||||||
|
|
||||||
|
value := cache.Check("test")
|
||||||
|
require.Equal(t, true, value, "could not get checked value")
|
||||||
|
}
|
||||||
@ -3,8 +3,11 @@ package dns
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestDNSCompileMake(t *testing.T) {
|
func TestDNSCompileMake(t *testing.T) {
|
||||||
@ -22,7 +25,7 @@ func TestDNSCompileMake(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile dns request")
|
require.Nil(t, err, "could not compile dns request")
|
||||||
|
|||||||
@ -5,6 +5,8 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/miekg/dns"
|
"github.com/miekg/dns"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
@ -139,7 +141,7 @@ func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *out
|
|||||||
data := &output.ResultEvent{
|
data := &output.ResultEvent{
|
||||||
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
||||||
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
||||||
Info: wrapped.InternalEvent["template-info"].(map[string]interface{}),
|
Info: wrapped.InternalEvent["template-info"].(model.Info),
|
||||||
Type: "dns",
|
Type: "dns",
|
||||||
Host: types.ToString(wrapped.InternalEvent["host"]),
|
Host: types.ToString(wrapped.InternalEvent["host"]),
|
||||||
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
||||||
|
|||||||
@ -6,12 +6,15 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/miekg/dns"
|
"github.com/miekg/dns"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestResponseToDSLMap(t *testing.T) {
|
func TestResponseToDSLMap(t *testing.T) {
|
||||||
@ -29,7 +32,7 @@ func TestResponseToDSLMap(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile dns request")
|
require.Nil(t, err, "could not compile dns request")
|
||||||
@ -61,7 +64,7 @@ func TestDNSOperatorMatch(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile dns request")
|
require.Nil(t, err, "could not compile dns request")
|
||||||
@ -144,7 +147,7 @@ func TestDNSOperatorExtract(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile dns request")
|
require.Nil(t, err, "could not compile dns request")
|
||||||
@ -214,7 +217,7 @@ func TestDNSMakeResult(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile dns request")
|
require.Nil(t, err, "could not compile dns request")
|
||||||
|
|||||||
@ -3,12 +3,15 @@ package dns
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestDNSExecuteWithResults(t *testing.T) {
|
func TestDNSExecuteWithResults(t *testing.T) {
|
||||||
@ -39,7 +42,7 @@ func TestDNSExecuteWithResults(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile dns request")
|
require.Nil(t, err, "could not compile dns request")
|
||||||
|
|||||||
@ -3,8 +3,11 @@ package file
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFileCompile(t *testing.T) {
|
func TestFileCompile(t *testing.T) {
|
||||||
@ -21,7 +24,7 @@ func TestFileCompile(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|||||||
@ -6,8 +6,11 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFindInputPaths(t *testing.T) {
|
func TestFindInputPaths(t *testing.T) {
|
||||||
@ -24,7 +27,7 @@ func TestFindInputPaths(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
@ -136,7 +137,7 @@ func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *out
|
|||||||
data := &output.ResultEvent{
|
data := &output.ResultEvent{
|
||||||
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
||||||
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
||||||
Info: wrapped.InternalEvent["template-info"].(map[string]interface{}),
|
Info: wrapped.InternalEvent["template-info"].(model.Info),
|
||||||
Type: "file",
|
Type: "file",
|
||||||
Path: types.ToString(wrapped.InternalEvent["path"]),
|
Path: types.ToString(wrapped.InternalEvent["path"]),
|
||||||
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
||||||
|
|||||||
@ -3,12 +3,15 @@ package file
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestResponseToDSLMap(t *testing.T) {
|
func TestResponseToDSLMap(t *testing.T) {
|
||||||
@ -25,7 +28,7 @@ func TestResponseToDSLMap(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
@ -50,7 +53,7 @@ func TestFileOperatorMatch(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
@ -115,7 +118,7 @@ func TestFileOperatorExtract(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
@ -180,7 +183,7 @@ func TestFileMakeResult(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|||||||
@ -6,7 +6,9 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
@ -41,7 +43,7 @@ func TestFileExecuteWithResults(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package headless
|
|||||||
import (
|
import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
@ -108,7 +109,7 @@ func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *out
|
|||||||
data := &output.ResultEvent{
|
data := &output.ResultEvent{
|
||||||
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
||||||
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
||||||
Info: wrapped.InternalEvent["template-info"].(map[string]interface{}),
|
Info: wrapped.InternalEvent["template-info"].(model.Info),
|
||||||
Type: "headless",
|
Type: "headless",
|
||||||
Host: types.ToString(wrapped.InternalEvent["host"]),
|
Host: types.ToString(wrapped.InternalEvent["host"]),
|
||||||
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package http
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net"
|
"net"
|
||||||
@ -15,7 +16,6 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/expressions"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/expressions"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/generators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/generators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/replacer"
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/http/race"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/http/race"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/http/raw"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/http/raw"
|
||||||
"github.com/projectdiscovery/rawhttp"
|
"github.com/projectdiscovery/rawhttp"
|
||||||
@ -51,23 +51,30 @@ func (r *requestGenerator) Make(baseURL string, dynamicValues map[string]interfa
|
|||||||
}
|
}
|
||||||
|
|
||||||
data, parsed = baseURLWithTemplatePrefs(data, parsed)
|
data, parsed = baseURLWithTemplatePrefs(data, parsed)
|
||||||
values := generators.MergeMaps(dynamicValues, map[string]interface{}{
|
|
||||||
"Hostname": parsed.Host,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
trailingSlash := false
|
||||||
isRawRequest := len(r.request.Raw) > 0
|
isRawRequest := len(r.request.Raw) > 0
|
||||||
if !isRawRequest && strings.HasSuffix(parsed.Path, "/") && strings.Contains(data, "{{BaseURL}}/") {
|
if !isRawRequest && strings.HasSuffix(parsed.Path, "/") && strings.Contains(data, "{{BaseURL}}/") {
|
||||||
parsed.Path = strings.TrimSuffix(parsed.Path, "/")
|
trailingSlash = true
|
||||||
|
}
|
||||||
|
values := generators.MergeMaps(dynamicValues, generateVariables(parsed, trailingSlash))
|
||||||
|
|
||||||
|
// merge with vars
|
||||||
|
if !r.options.Options.Vars.IsEmpty() {
|
||||||
|
values = generators.MergeMaps(values, r.options.Options.Vars.AsMap())
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge with env vars
|
||||||
|
if r.options.Options.EnvironmentVariables {
|
||||||
|
values = generators.MergeMaps(generators.EnvVars(), values)
|
||||||
}
|
}
|
||||||
parsedString := parsed.String()
|
|
||||||
values["BaseURL"] = parsedString
|
|
||||||
|
|
||||||
// If data contains \n it's a raw request, process it like raw. Else
|
// If data contains \n it's a raw request, process it like raw. Else
|
||||||
// continue with the template based request flow.
|
// continue with the template based request flow.
|
||||||
if isRawRequest {
|
if isRawRequest {
|
||||||
return r.makeHTTPRequestFromRaw(ctx, parsedString, data, values, payloads, interactURL)
|
return r.makeHTTPRequestFromRaw(ctx, parsed.String(), data, values, payloads, interactURL)
|
||||||
}
|
}
|
||||||
return r.makeHTTPRequestFromModel(ctx, data, values, interactURL)
|
return r.makeHTTPRequestFromModel(ctx, data, values, payloads, interactURL)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Total returns the total number of requests for the generator
|
// Total returns the total number of requests for the generator
|
||||||
@ -96,23 +103,38 @@ func baseURLWithTemplatePrefs(data string, parsed *url.URL) (string, *url.URL) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// MakeHTTPRequestFromModel creates a *http.Request from a request template
|
// MakeHTTPRequestFromModel creates a *http.Request from a request template
|
||||||
func (r *requestGenerator) makeHTTPRequestFromModel(ctx context.Context, data string, values map[string]interface{}, interactURL string) (*generatedRequest, error) {
|
func (r *requestGenerator) makeHTTPRequestFromModel(ctx context.Context, data string, values, generatorValues map[string]interface{}, interactURL string) (*generatedRequest, error) {
|
||||||
final := replacer.Replace(data, values)
|
|
||||||
if interactURL != "" {
|
if interactURL != "" {
|
||||||
final = r.options.Interactsh.ReplaceMarkers(final, interactURL)
|
data = r.options.Interactsh.ReplaceMarkers(data, interactURL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Combine the template payloads along with base
|
||||||
|
// request values.
|
||||||
|
finalValues := generators.MergeMaps(generatorValues, values)
|
||||||
|
|
||||||
|
// Evaulate the expressions for the request if any.
|
||||||
|
var err error
|
||||||
|
data, err = expressions.Evaluate(data, finalValues)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "could not evaluate helper expressions")
|
||||||
|
}
|
||||||
|
|
||||||
|
method, err := expressions.Evaluate(r.request.Method, finalValues)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "could not evaluate helper expressions")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build a request on the specified URL
|
// Build a request on the specified URL
|
||||||
req, err := http.NewRequestWithContext(ctx, r.request.Method, final, nil)
|
req, err := http.NewRequestWithContext(ctx, method, data, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
request, err := r.fillRequest(req, values, interactURL)
|
request, err := r.fillRequest(req, finalValues, interactURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return &generatedRequest{request: request, original: r.request}, nil
|
return &generatedRequest{request: request, meta: generatorValues, original: r.request}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// makeHTTPRequestFromRaw creates a *http.Request from a raw request
|
// makeHTTPRequestFromRaw creates a *http.Request from a raw request
|
||||||
@ -168,7 +190,7 @@ func (r *requestGenerator) handleRawWithPayloads(ctx context.Context, rawRequest
|
|||||||
req.Host = value
|
req.Host = value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
request, err := r.fillRequest(req, values, "")
|
request, err := r.fillRequest(req, finalValues, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -183,9 +205,13 @@ func (r *requestGenerator) fillRequest(req *http.Request, values map[string]inte
|
|||||||
if interactURL != "" {
|
if interactURL != "" {
|
||||||
value = r.options.Interactsh.ReplaceMarkers(value, interactURL)
|
value = r.options.Interactsh.ReplaceMarkers(value, interactURL)
|
||||||
}
|
}
|
||||||
req.Header[header] = []string{replacer.Replace(value, values)}
|
value, err := expressions.Evaluate(value, values)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "could not evaluate helper expressions")
|
||||||
|
}
|
||||||
|
req.Header[header] = []string{value}
|
||||||
if header == "Host" {
|
if header == "Host" {
|
||||||
req.Host = replacer.Replace(value, values)
|
req.Host = value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,6 +226,10 @@ func (r *requestGenerator) fillRequest(req *http.Request, values map[string]inte
|
|||||||
if interactURL != "" {
|
if interactURL != "" {
|
||||||
body = r.options.Interactsh.ReplaceMarkers(body, interactURL)
|
body = r.options.Interactsh.ReplaceMarkers(body, interactURL)
|
||||||
}
|
}
|
||||||
|
body, err := expressions.Evaluate(body, values)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "could not evaluate helper expressions")
|
||||||
|
}
|
||||||
req.Body = ioutil.NopCloser(strings.NewReader(body))
|
req.Body = ioutil.NopCloser(strings.NewReader(body))
|
||||||
}
|
}
|
||||||
setHeader(req, "User-Agent", uarand.GetRandom())
|
setHeader(req, "User-Agent", uarand.GetRandom())
|
||||||
@ -221,3 +251,34 @@ func setHeader(req *http.Request, name, value string) {
|
|||||||
req.Host = value
|
req.Host = value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// generateVariables will create default variables after parsing a url
|
||||||
|
func generateVariables(parsed *url.URL, trailingSlash bool) map[string]interface{} {
|
||||||
|
domain := parsed.Host
|
||||||
|
if strings.Contains(parsed.Host, ":") {
|
||||||
|
domain = strings.Split(parsed.Host, ":")[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
port := parsed.Port()
|
||||||
|
if port == "" {
|
||||||
|
if parsed.Scheme == "https" {
|
||||||
|
port = "443"
|
||||||
|
} else if parsed.Scheme == "http" {
|
||||||
|
port = "80"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if trailingSlash {
|
||||||
|
parsed.Path = strings.TrimSuffix(parsed.Path, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
return map[string]interface{}{
|
||||||
|
"BaseURL": parsed.String(),
|
||||||
|
"RootURL": fmt.Sprintf("%s://%s", parsed.Scheme, parsed.Host),
|
||||||
|
"Hostname": parsed.Host,
|
||||||
|
"Host": domain,
|
||||||
|
"Port": port,
|
||||||
|
"Path": parsed.EscapedPath(),
|
||||||
|
"Scheme": parsed.Scheme,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -4,7 +4,9 @@ import (
|
|||||||
"net/url"
|
"net/url"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -18,6 +20,44 @@ func TestBaseURLWithTemplatePrefs(t *testing.T) {
|
|||||||
require.Equal(t, "{{BaseURL}}/newpath", data, "could not get correct data")
|
require.Equal(t, "{{BaseURL}}/newpath", data, "could not get correct data")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestVariables(t *testing.T) {
|
||||||
|
baseURL := "http://localhost:9001/test/123"
|
||||||
|
parsed, _ := url.Parse(baseURL)
|
||||||
|
values := generateVariables(parsed, true)
|
||||||
|
|
||||||
|
require.Equal(t, values["BaseURL"], parsed.String(), "incorrect baseurl")
|
||||||
|
require.Equal(t, values["RootURL"], "http://localhost:9001", "incorrect rootURL")
|
||||||
|
require.Equal(t, values["Host"], "localhost", "incorrect domain name")
|
||||||
|
require.Equal(t, values["Path"], "/test/123", "incorrect path")
|
||||||
|
require.Equal(t, values["Port"], "9001", "incorrect port number")
|
||||||
|
require.Equal(t, values["Scheme"], "http", "incorrect scheme")
|
||||||
|
require.Equal(t, values["Hostname"], "localhost:9001", "incorrect hostname")
|
||||||
|
|
||||||
|
baseURL = "https://example.com"
|
||||||
|
parsed, _ = url.Parse(baseURL)
|
||||||
|
values = generateVariables(parsed, false)
|
||||||
|
|
||||||
|
require.Equal(t, values["BaseURL"], parsed.String(), "incorrect baseurl")
|
||||||
|
require.Equal(t, values["Host"], "example.com", "incorrect domain name")
|
||||||
|
require.Equal(t, values["RootURL"], "https://example.com", "incorrect rootURL")
|
||||||
|
require.Equal(t, values["Path"], "", "incorrect path")
|
||||||
|
require.Equal(t, values["Port"], "443", "incorrect port number")
|
||||||
|
require.Equal(t, values["Scheme"], "https", "incorrect scheme")
|
||||||
|
require.Equal(t, values["Hostname"], "example.com", "incorrect hostname")
|
||||||
|
|
||||||
|
baseURL = "ftp://foobar.com/"
|
||||||
|
parsed, _ = url.Parse(baseURL)
|
||||||
|
values = generateVariables(parsed, true)
|
||||||
|
|
||||||
|
require.Equal(t, values["BaseURL"], parsed.String(), "incorrect baseurl")
|
||||||
|
require.Equal(t, values["Host"], "foobar.com", "incorrect domain name")
|
||||||
|
require.Equal(t, values["RootURL"], "ftp://foobar.com", "incorrect rootURL")
|
||||||
|
require.Equal(t, values["Path"], "", "incorrect path")
|
||||||
|
require.Equal(t, values["Port"], "", "incorrect port number") // Unsupported protocol results in a blank port
|
||||||
|
require.Equal(t, values["Scheme"], "ftp", "incorrect scheme")
|
||||||
|
require.Equal(t, values["Hostname"], "foobar.com", "incorrect hostname")
|
||||||
|
}
|
||||||
|
|
||||||
func TestMakeRequestFromModal(t *testing.T) {
|
func TestMakeRequestFromModal(t *testing.T) {
|
||||||
options := testutils.DefaultOptions
|
options := testutils.DefaultOptions
|
||||||
|
|
||||||
@ -36,7 +76,7 @@ func TestMakeRequestFromModal(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile http request")
|
require.Nil(t, err, "could not compile http request")
|
||||||
@ -63,7 +103,7 @@ func TestMakeRequestFromModalTrimSuffixSlash(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile http request")
|
require.Nil(t, err, "could not compile http request")
|
||||||
@ -101,7 +141,7 @@ Accept-Encoding: gzip`},
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile http request")
|
require.Nil(t, err, "could not compile http request")
|
||||||
@ -140,7 +180,7 @@ Accept-Encoding: gzip`},
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile http request")
|
require.Nil(t, err, "could not compile http request")
|
||||||
|
|||||||
@ -178,12 +178,19 @@ func (r *Request) GetID() string {
|
|||||||
|
|
||||||
// Compile compiles the protocol request for further execution.
|
// Compile compiles the protocol request for further execution.
|
||||||
func (r *Request) Compile(options *protocols.ExecuterOptions) error {
|
func (r *Request) Compile(options *protocols.ExecuterOptions) error {
|
||||||
client, err := httpclientpool.Get(options.Options, &httpclientpool.Configuration{
|
connectionConfiguration := &httpclientpool.Configuration{
|
||||||
Threads: r.Threads,
|
Threads: r.Threads,
|
||||||
MaxRedirects: r.MaxRedirects,
|
MaxRedirects: r.MaxRedirects,
|
||||||
FollowRedirects: r.Redirects,
|
FollowRedirects: r.Redirects,
|
||||||
CookieReuse: r.CookieReuse,
|
CookieReuse: r.CookieReuse,
|
||||||
})
|
}
|
||||||
|
|
||||||
|
// if the headers contain "Connection" we need to disable the automatic keep alive of the standard library
|
||||||
|
if _, hasConnectionHeader := r.Headers["Connection"]; hasConnectionHeader {
|
||||||
|
connectionConfiguration.Connection = &httpclientpool.ConnectionConfiguration{DisableKeepAlive: false}
|
||||||
|
}
|
||||||
|
|
||||||
|
client, err := httpclientpool.Get(options.Options, connectionConfiguration)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "could not get dns client")
|
return errors.Wrap(err, "could not get dns client")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,8 +3,11 @@ package http
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestHTTPCompile(t *testing.T) {
|
func TestHTTPCompile(t *testing.T) {
|
||||||
@ -29,7 +32,7 @@ Accept-Encoding: gzip`},
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile http request")
|
require.Nil(t, err, "could not compile http request")
|
||||||
|
|||||||
@ -50,6 +50,12 @@ func Init(options *types.Options) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// // Configuration contains the custom configuration options for a connection
|
||||||
|
type ConnectionConfiguration struct {
|
||||||
|
// DisableKeepAlive of the connection
|
||||||
|
DisableKeepAlive bool
|
||||||
|
}
|
||||||
|
|
||||||
// Configuration contains the custom configuration options for a client
|
// Configuration contains the custom configuration options for a client
|
||||||
type Configuration struct {
|
type Configuration struct {
|
||||||
// Threads contains the threads for the client
|
// Threads contains the threads for the client
|
||||||
@ -60,6 +66,8 @@ type Configuration struct {
|
|||||||
CookieReuse bool
|
CookieReuse bool
|
||||||
// FollowRedirects specifies whether to follow redirects
|
// FollowRedirects specifies whether to follow redirects
|
||||||
FollowRedirects bool
|
FollowRedirects bool
|
||||||
|
// Connection defines custom connection configuration
|
||||||
|
Connection *ConnectionConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hash returns the hash of the configuration to allow client pooling
|
// Hash returns the hash of the configuration to allow client pooling
|
||||||
@ -74,10 +82,17 @@ func (c *Configuration) Hash() string {
|
|||||||
builder.WriteString(strconv.FormatBool(c.FollowRedirects))
|
builder.WriteString(strconv.FormatBool(c.FollowRedirects))
|
||||||
builder.WriteString("r")
|
builder.WriteString("r")
|
||||||
builder.WriteString(strconv.FormatBool(c.CookieReuse))
|
builder.WriteString(strconv.FormatBool(c.CookieReuse))
|
||||||
|
builder.WriteString("c")
|
||||||
|
builder.WriteString(strconv.FormatBool(c.Connection != nil))
|
||||||
hash := builder.String()
|
hash := builder.String()
|
||||||
return hash
|
return hash
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HasCustomOptions checks whether the configuration requires custom settings
|
||||||
|
func (c *Configuration) HasStandardOptions() bool {
|
||||||
|
return c.Threads == 0 && c.MaxRedirects == 0 && !c.FollowRedirects && !c.CookieReuse && c.Connection == nil
|
||||||
|
}
|
||||||
|
|
||||||
// GetRawHTTP returns the rawhttp request client
|
// GetRawHTTP returns the rawhttp request client
|
||||||
func GetRawHTTP(options *types.Options) *rawhttp.Client {
|
func GetRawHTTP(options *types.Options) *rawhttp.Client {
|
||||||
if rawhttpClient == nil {
|
if rawhttpClient == nil {
|
||||||
@ -90,7 +105,7 @@ func GetRawHTTP(options *types.Options) *rawhttp.Client {
|
|||||||
|
|
||||||
// Get creates or gets a client for the protocol based on custom configuration
|
// Get creates or gets a client for the protocol based on custom configuration
|
||||||
func Get(options *types.Options, configuration *Configuration) (*retryablehttp.Client, error) {
|
func Get(options *types.Options, configuration *Configuration) (*retryablehttp.Client, error) {
|
||||||
if configuration.Threads == 0 && configuration.MaxRedirects == 0 && !configuration.FollowRedirects && !configuration.CookieReuse {
|
if configuration.HasStandardOptions() {
|
||||||
return normalClient, nil
|
return normalClient, nil
|
||||||
}
|
}
|
||||||
return wrappedGet(options, configuration)
|
return wrappedGet(options, configuration)
|
||||||
@ -140,6 +155,11 @@ func wrappedGet(options *types.Options, configuration *Configuration) (*retryabl
|
|||||||
followRedirects := configuration.FollowRedirects
|
followRedirects := configuration.FollowRedirects
|
||||||
maxRedirects := configuration.MaxRedirects
|
maxRedirects := configuration.MaxRedirects
|
||||||
|
|
||||||
|
// override connection's settings if required
|
||||||
|
if configuration.Connection != nil {
|
||||||
|
disableKeepAlives = configuration.Connection.DisableKeepAlive
|
||||||
|
}
|
||||||
|
|
||||||
transport := &http.Transport{
|
transport := &http.Transport{
|
||||||
DialContext: Dialer.Dial,
|
DialContext: Dialer.Dial,
|
||||||
MaxIdleConns: maxIdleConns,
|
MaxIdleConns: maxIdleConns,
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
@ -54,6 +55,8 @@ func (r *Request) Extract(data map[string]interface{}, extractor *extractors.Ext
|
|||||||
return extractor.ExtractRegex(item)
|
return extractor.ExtractRegex(item)
|
||||||
case extractors.KValExtractor:
|
case extractors.KValExtractor:
|
||||||
return extractor.ExtractKval(data)
|
return extractor.ExtractKval(data)
|
||||||
|
case extractors.XPathExtractor:
|
||||||
|
return extractor.ExtractHTML(item)
|
||||||
case extractors.JSONExtractor:
|
case extractors.JSONExtractor:
|
||||||
return extractor.ExtractJSON(item)
|
return extractor.ExtractJSON(item)
|
||||||
}
|
}
|
||||||
@ -144,7 +147,7 @@ func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *out
|
|||||||
data := &output.ResultEvent{
|
data := &output.ResultEvent{
|
||||||
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
||||||
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
||||||
Info: wrapped.InternalEvent["template-info"].(map[string]interface{}),
|
Info: wrapped.InternalEvent["template-info"].(model.Info),
|
||||||
Type: "http",
|
Type: "http",
|
||||||
Host: types.ToString(wrapped.InternalEvent["host"]),
|
Host: types.ToString(wrapped.InternalEvent["host"]),
|
||||||
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
||||||
|
|||||||
@ -5,12 +5,15 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestResponseToDSLMap(t *testing.T) {
|
func TestResponseToDSLMap(t *testing.T) {
|
||||||
@ -26,7 +29,7 @@ func TestResponseToDSLMap(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
@ -56,7 +59,7 @@ func TestHTTPOperatorMatch(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
@ -126,7 +129,7 @@ func TestHTTPOperatorExtract(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
@ -237,7 +240,7 @@ func TestHTTPMakeResult(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile file request")
|
require.Nil(t, err, "could not compile file request")
|
||||||
|
|||||||
@ -216,6 +216,10 @@ func (r *Request) ExecuteWithResults(reqURL string, dynamicValues, previous outp
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if hosts just keep erroring
|
||||||
|
if r.options.HostErrorsCache != nil && r.options.HostErrorsCache.Check(reqURL) {
|
||||||
|
break
|
||||||
|
}
|
||||||
var gotOutput bool
|
var gotOutput bool
|
||||||
r.options.RateLimiter.Take()
|
r.options.RateLimiter.Take()
|
||||||
err = r.executeRequest(reqURL, request, previous, func(event *output.InternalWrappedEvent) {
|
err = r.executeRequest(reqURL, request, previous, func(event *output.InternalWrappedEvent) {
|
||||||
@ -237,7 +241,10 @@ func (r *Request) ExecuteWithResults(reqURL string, dynamicValues, previous outp
|
|||||||
}
|
}
|
||||||
}, requestCount)
|
}, requestCount)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
requestErr = multierr.Append(requestErr, err)
|
if r.options.HostErrorsCache != nil && r.options.HostErrorsCache.CheckError(err) {
|
||||||
|
r.options.HostErrorsCache.MarkFailed(reqURL)
|
||||||
|
}
|
||||||
|
requestErr = err
|
||||||
}
|
}
|
||||||
requestCount++
|
requestCount++
|
||||||
r.options.Progress.IncrementRequests()
|
r.options.Progress.IncrementRequests()
|
||||||
@ -304,9 +311,10 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, previ
|
|||||||
|
|
||||||
// For race conditions we can't dump the request body at this point as it's already waiting the open-gate event, already handled with a similar code within the race function
|
// For race conditions we can't dump the request body at this point as it's already waiting the open-gate event, already handled with a similar code within the race function
|
||||||
if !request.original.Race {
|
if !request.original.Race {
|
||||||
dumpedRequest, err = dump(request, reqURL)
|
var dumpError error
|
||||||
if err != nil {
|
dumpedRequest, dumpError = dump(request, reqURL)
|
||||||
return err
|
if dumpError != nil {
|
||||||
|
return dumpError
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.options.Options.Debug || r.options.Options.DebugRequests {
|
if r.options.Options.Debug || r.options.Options.DebugRequests {
|
||||||
@ -314,10 +322,6 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, previ
|
|||||||
gologger.Print().Msgf("%s", string(dumpedRequest))
|
gologger.Print().Msgf("%s", string(dumpedRequest))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if resp == nil {
|
|
||||||
err = errors.New("no response got for request")
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// rawhttp doesn't supports draining response bodies.
|
// rawhttp doesn't supports draining response bodies.
|
||||||
if resp != nil && resp.Body != nil && request.rawRequest == nil {
|
if resp != nil && resp.Body != nil && request.rawRequest == nil {
|
||||||
@ -354,7 +358,7 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, previ
|
|||||||
// Ignore body read due to server misconfiguration errors
|
// Ignore body read due to server misconfiguration errors
|
||||||
if stringsutil.ContainsAny(err.Error(), "gzip: invalid header") {
|
if stringsutil.ContainsAny(err.Error(), "gzip: invalid header") {
|
||||||
gologger.Warning().Msgf("[%s] Server sent an invalid gzip header and it was not possible to read the uncompressed body for %s: %s", r.options.TemplateID, formedURL, err.Error())
|
gologger.Warning().Msgf("[%s] Server sent an invalid gzip header and it was not possible to read the uncompressed body for %s: %s", r.options.TemplateID, formedURL, err.Error())
|
||||||
} else if !stringsutil.ContainsAny(err.Error(), "unexpected EOF") { // ignore EOF error
|
} else if !stringsutil.ContainsAny(err.Error(), "unexpected EOF", "user canceled") { // ignore EOF and random error
|
||||||
return errors.Wrap(err, "could not read http body")
|
return errors.Wrap(err, "could not read http body")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -31,20 +31,39 @@ func (r *Request) newGenerator() *requestGenerator {
|
|||||||
// nextValue returns the next path or the next raw request depending on user input
|
// nextValue returns the next path or the next raw request depending on user input
|
||||||
// It returns false if all the inputs have been exhausted by the generator instance.
|
// It returns false if all the inputs have been exhausted by the generator instance.
|
||||||
func (r *requestGenerator) nextValue() (value string, payloads map[string]interface{}, result bool) {
|
func (r *requestGenerator) nextValue() (value string, payloads map[string]interface{}, result bool) {
|
||||||
// If we have paths, return the next path.
|
// For both raw/path requests, start with the request at current index.
|
||||||
|
// If we are not at the start, then check if the iterator for payloads
|
||||||
|
// has finished if there are any.
|
||||||
|
//
|
||||||
|
// If the iterator has finished for the current request
|
||||||
|
// then reset it and move on to the next value, otherwise use the last request.
|
||||||
|
|
||||||
if len(r.request.Path) > 0 && r.currentIndex < len(r.request.Path) {
|
if len(r.request.Path) > 0 && r.currentIndex < len(r.request.Path) {
|
||||||
|
if r.payloadIterator != nil {
|
||||||
|
payload, ok := r.payloadIterator.Value()
|
||||||
|
if !ok {
|
||||||
|
r.currentIndex++
|
||||||
|
r.payloadIterator.Reset()
|
||||||
|
|
||||||
|
// No more payloads request for us now.
|
||||||
|
if len(r.request.Path) == r.currentIndex {
|
||||||
|
return "", nil, false
|
||||||
|
}
|
||||||
|
if item := r.request.Path[r.currentIndex]; item != "" {
|
||||||
|
newPayload, ok := r.payloadIterator.Value()
|
||||||
|
return item, newPayload, ok
|
||||||
|
}
|
||||||
|
return "", nil, false
|
||||||
|
}
|
||||||
|
return r.request.Path[r.currentIndex], payload, true
|
||||||
|
}
|
||||||
if value := r.request.Path[r.currentIndex]; value != "" {
|
if value := r.request.Path[r.currentIndex]; value != "" {
|
||||||
r.currentIndex++
|
r.currentIndex++
|
||||||
return value, nil, true
|
return value, nil, true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we have raw requests, start with the request at current index.
|
|
||||||
// If we are not at the start, then check if the iterator for payloads
|
|
||||||
// has finished if there are any.
|
|
||||||
//
|
|
||||||
// If the iterator has finished for the current raw request
|
|
||||||
// then reset it and move on to the next value, otherwise use the last request.
|
|
||||||
if len(r.request.Raw) > 0 && r.currentIndex < len(r.request.Raw) {
|
if len(r.request.Raw) > 0 && r.currentIndex < len(r.request.Raw) {
|
||||||
if r.payloadIterator != nil {
|
if r.payloadIterator != nil {
|
||||||
payload, ok := r.payloadIterator.Value()
|
payload, ok := r.payloadIterator.Value()
|
||||||
|
|||||||
@ -3,8 +3,11 @@ package network
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNetworkCompileMake(t *testing.T) {
|
func TestNetworkCompileMake(t *testing.T) {
|
||||||
@ -20,7 +23,7 @@ func TestNetworkCompileMake(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile network request")
|
require.Nil(t, err, "could not compile network request")
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package network
|
|||||||
import (
|
import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
@ -109,7 +110,7 @@ func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *out
|
|||||||
data := &output.ResultEvent{
|
data := &output.ResultEvent{
|
||||||
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
||||||
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
||||||
Info: wrapped.InternalEvent["template-info"].(map[string]interface{}),
|
Info: wrapped.InternalEvent["template-info"].(model.Info),
|
||||||
Type: "network",
|
Type: "network",
|
||||||
Host: types.ToString(wrapped.InternalEvent["host"]),
|
Host: types.ToString(wrapped.InternalEvent["host"]),
|
||||||
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
||||||
|
|||||||
@ -3,12 +3,15 @@ package network
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestResponseToDSLMap(t *testing.T) {
|
func TestResponseToDSLMap(t *testing.T) {
|
||||||
@ -24,7 +27,7 @@ func TestResponseToDSLMap(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile network request")
|
require.Nil(t, err, "could not compile network request")
|
||||||
@ -49,7 +52,7 @@ func TestNetworkOperatorMatch(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile network request")
|
require.Nil(t, err, "could not compile network request")
|
||||||
@ -112,7 +115,7 @@ func TestNetworkOperatorExtract(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile network request")
|
require.Nil(t, err, "could not compile network request")
|
||||||
@ -175,7 +178,7 @@ func TestNetworkMakeResult(t *testing.T) {
|
|||||||
}
|
}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile network request")
|
require.Nil(t, err, "could not compile network request")
|
||||||
|
|||||||
@ -8,12 +8,15 @@ import (
|
|||||||
"net/url"
|
"net/url"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNetworkExecuteWithResults(t *testing.T) {
|
func TestNetworkExecuteWithResults(t *testing.T) {
|
||||||
@ -52,7 +55,7 @@ func TestNetworkExecuteWithResults(t *testing.T) {
|
|||||||
request.Inputs = append(request.Inputs, &Input{Data: fmt.Sprintf("GET / HTTP/1.1\r\nHost: %s\r\n\r\n", parsed.Host)})
|
request.Inputs = append(request.Inputs, &Input{Data: fmt.Sprintf("GET / HTTP/1.1\r\nHost: %s\r\n\r\n", parsed.Host)})
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
err = request.Compile(executerOpts)
|
err = request.Compile(executerOpts)
|
||||||
require.Nil(t, err, "could not compile network request")
|
require.Nil(t, err, "could not compile network request")
|
||||||
|
|||||||
@ -6,9 +6,12 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFindResponses(t *testing.T) {
|
func TestFindResponses(t *testing.T) {
|
||||||
@ -19,7 +22,7 @@ func TestFindResponses(t *testing.T) {
|
|||||||
request := &Request{}
|
request := &Request{}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
executerOpts.Operators = []*operators.Operators{{}}
|
executerOpts.Operators = []*operators.Operators{{}}
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
@ -137,7 +138,7 @@ func (r *Request) makeResultEventItem(wrapped *output.InternalWrappedEvent) *out
|
|||||||
data := &output.ResultEvent{
|
data := &output.ResultEvent{
|
||||||
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
|
||||||
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
|
||||||
Info: wrapped.InternalEvent["template-info"].(map[string]interface{}),
|
Info: wrapped.InternalEvent["template-info"].(model.Info),
|
||||||
Type: "http",
|
Type: "http",
|
||||||
Path: types.ToString(wrapped.InternalEvent["path"]),
|
Path: types.ToString(wrapped.InternalEvent["path"]),
|
||||||
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
Matched: types.ToString(wrapped.InternalEvent["matched"]),
|
||||||
|
|||||||
@ -5,12 +5,15 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
"github.com/projectdiscovery/nuclei/v2/internal/testutils"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestResponseToDSLMap(t *testing.T) {
|
func TestResponseToDSLMap(t *testing.T) {
|
||||||
@ -21,7 +24,7 @@ func TestResponseToDSLMap(t *testing.T) {
|
|||||||
request := &Request{}
|
request := &Request{}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
executerOpts.Operators = []*operators.Operators{{}}
|
executerOpts.Operators = []*operators.Operators{{}}
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
@ -47,7 +50,7 @@ func TestHTTPOperatorMatch(t *testing.T) {
|
|||||||
request := &Request{}
|
request := &Request{}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
executerOpts.Operators = []*operators.Operators{{}}
|
executerOpts.Operators = []*operators.Operators{{}}
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
@ -113,7 +116,7 @@ func TestHTTPOperatorExtract(t *testing.T) {
|
|||||||
request := &Request{}
|
request := &Request{}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
executerOpts.Operators = []*operators.Operators{{}}
|
executerOpts.Operators = []*operators.Operators{{}}
|
||||||
err := request.Compile(executerOpts)
|
err := request.Compile(executerOpts)
|
||||||
@ -166,7 +169,7 @@ func TestHTTPMakeResult(t *testing.T) {
|
|||||||
request := &Request{}
|
request := &Request{}
|
||||||
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
|
||||||
ID: templateID,
|
ID: templateID,
|
||||||
Info: map[string]interface{}{"severity": "low", "name": "test"},
|
Info: model.Info{SeverityHolder: severity.SeverityHolder{Severity: severity.Low}, Name: "test"},
|
||||||
})
|
})
|
||||||
executerOpts.Operators = []*operators.Operators{{
|
executerOpts.Operators = []*operators.Operators{{
|
||||||
Matchers: []*matchers.Matcher{{
|
Matchers: []*matchers.Matcher{{
|
||||||
|
|||||||
@ -2,12 +2,14 @@ package protocols
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
|
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/projectfile"
|
"github.com/projectdiscovery/nuclei/v2/pkg/projectfile"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/hosterrorscache"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/interactsh"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/interactsh"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/headless/engine"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/headless/engine"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting"
|
||||||
@ -34,7 +36,7 @@ type ExecuterOptions struct {
|
|||||||
// TemplatePath is the path of the template for the request
|
// TemplatePath is the path of the template for the request
|
||||||
TemplatePath string
|
TemplatePath string
|
||||||
// TemplateInfo contains information block of the template request
|
// TemplateInfo contains information block of the template request
|
||||||
TemplateInfo map[string]interface{}
|
TemplateInfo model.Info
|
||||||
// Output is a writer interface for writing output events from executer.
|
// Output is a writer interface for writing output events from executer.
|
||||||
Output output.Writer
|
Output output.Writer
|
||||||
// Options contains configuration options for the executer.
|
// Options contains configuration options for the executer.
|
||||||
@ -53,8 +55,12 @@ type ExecuterOptions struct {
|
|||||||
Browser *engine.Browser
|
Browser *engine.Browser
|
||||||
// Interactsh is a client for interactsh oob polling server
|
// Interactsh is a client for interactsh oob polling server
|
||||||
Interactsh *interactsh.Client
|
Interactsh *interactsh.Client
|
||||||
|
// HostErrorsCache is an optional cache for handling host errors
|
||||||
|
HostErrorsCache *hosterrorscache.Cache
|
||||||
|
|
||||||
Operators []*operators.Operators // only used by offlinehttp module
|
Operators []*operators.Operators // only used by offlinehttp module
|
||||||
|
|
||||||
|
WorkflowLoader model.WorkflowLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
// Request is an interface implemented any protocol based request generator.
|
// Request is an interface implemented any protocol based request generator.
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import (
|
|||||||
"crypto/sha1"
|
"crypto/sha1"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
|
"reflect"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
@ -106,5 +107,7 @@ func (s *Storage) Index(result *output.ResultEvent) (bool, error) {
|
|||||||
//
|
//
|
||||||
// Reference - https://stackoverflow.com/questions/59209493/how-to-use-unsafe-get-a-byte-slice-from-a-string-without-memory-copy
|
// Reference - https://stackoverflow.com/questions/59209493/how-to-use-unsafe-get-a-byte-slice-from-a-string-without-memory-copy
|
||||||
func unsafeToBytes(data string) []byte {
|
func unsafeToBytes(data string) []byte {
|
||||||
return *(*[]byte)(unsafe.Pointer(&data))
|
var buf = *(*[]byte)(unsafe.Pointer(&data))
|
||||||
|
(*reflect.SliceHeader)(unsafe.Pointer(&buf)).Cap = len(data)
|
||||||
|
return buf
|
||||||
}
|
}
|
||||||
|
|||||||
@ -10,8 +10,11 @@ import (
|
|||||||
|
|
||||||
"github.com/owenrumney/go-sarif/sarif"
|
"github.com/owenrumney/go-sarif/sarif"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/format"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/format"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Exporter is an exporter for nuclei sarif output format.
|
// Exporter is an exporter for nuclei sarif output format.
|
||||||
@ -59,8 +62,8 @@ func (i *Exporter) Export(event *output.ResultEvent) error {
|
|||||||
sarifSeverity := getSarifSeverity(event)
|
sarifSeverity := getSarifSeverity(event)
|
||||||
|
|
||||||
var ruleName string
|
var ruleName string
|
||||||
if s, ok := event.Info["name"]; ok {
|
if utils.IsNotBlank(event.Info.Name) {
|
||||||
ruleName = s.(string)
|
ruleName = event.Info.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
var templateURL string
|
var templateURL string
|
||||||
@ -71,8 +74,8 @@ func (i *Exporter) Export(event *output.ResultEvent) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var ruleDescription string
|
var ruleDescription string
|
||||||
if d, ok := event.Info["description"]; ok {
|
if utils.IsNotBlank(event.Info.Description) {
|
||||||
ruleDescription = d.(string)
|
ruleDescription = event.Info.Description
|
||||||
}
|
}
|
||||||
|
|
||||||
i.mutex.Lock()
|
i.mutex.Lock()
|
||||||
@ -108,17 +111,12 @@ func (i *Exporter) Export(event *output.ResultEvent) error {
|
|||||||
|
|
||||||
// getSarifSeverity returns the sarif severity
|
// getSarifSeverity returns the sarif severity
|
||||||
func getSarifSeverity(event *output.ResultEvent) string {
|
func getSarifSeverity(event *output.ResultEvent) string {
|
||||||
var ruleSeverity string
|
switch event.Info.SeverityHolder.Severity {
|
||||||
if s, ok := event.Info["severity"]; ok {
|
case severity.Info:
|
||||||
ruleSeverity = s.(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch ruleSeverity {
|
|
||||||
case "info":
|
|
||||||
return "note"
|
return "note"
|
||||||
case "low", "medium":
|
case severity.Low, severity.Medium:
|
||||||
return "warning"
|
return "warning"
|
||||||
case "high", "critical":
|
case severity.High, severity.Critical:
|
||||||
return "error"
|
return "error"
|
||||||
default:
|
default:
|
||||||
return "note"
|
return "note"
|
||||||
|
|||||||
@ -3,8 +3,10 @@ package format
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/utils"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||||
)
|
)
|
||||||
@ -17,9 +19,9 @@ func Summary(event *output.ResultEvent) string {
|
|||||||
builder.WriteString("[")
|
builder.WriteString("[")
|
||||||
builder.WriteString(template)
|
builder.WriteString(template)
|
||||||
builder.WriteString("] [")
|
builder.WriteString("] [")
|
||||||
builder.WriteString(types.ToString(event.Info["severity"]))
|
builder.WriteString(types.ToString(event.Info.SeverityHolder))
|
||||||
builder.WriteString("] ")
|
builder.WriteString("] ")
|
||||||
builder.WriteString(types.ToString(event.Info["name"]))
|
builder.WriteString(types.ToString(event.Info.Name))
|
||||||
builder.WriteString(" found on ")
|
builder.WriteString(" found on ")
|
||||||
builder.WriteString(event.Host)
|
builder.WriteString(event.Host)
|
||||||
data := builder.String()
|
data := builder.String()
|
||||||
@ -28,27 +30,28 @@ func Summary(event *output.ResultEvent) string {
|
|||||||
|
|
||||||
// MarkdownDescription formats a short description of the generated
|
// MarkdownDescription formats a short description of the generated
|
||||||
// event by the nuclei scanner in Markdown format.
|
// event by the nuclei scanner in Markdown format.
|
||||||
func MarkdownDescription(event *output.ResultEvent) string {
|
func MarkdownDescription(event *output.ResultEvent) string { // TODO remove the code duplication: format.go <-> jira.go
|
||||||
template := GetMatchedTemplate(event)
|
template := GetMatchedTemplate(event)
|
||||||
builder := &bytes.Buffer{}
|
builder := &bytes.Buffer{}
|
||||||
builder.WriteString("**Details**: **")
|
builder.WriteString("**Details**: **")
|
||||||
builder.WriteString(template)
|
builder.WriteString(template)
|
||||||
builder.WriteString("** ")
|
builder.WriteString("** ")
|
||||||
|
|
||||||
builder.WriteString(" matched at ")
|
builder.WriteString(" matched at ")
|
||||||
builder.WriteString(event.Host)
|
builder.WriteString(event.Host)
|
||||||
|
|
||||||
builder.WriteString("\n\n**Protocol**: ")
|
builder.WriteString("\n\n**Protocol**: ")
|
||||||
builder.WriteString(strings.ToUpper(event.Type))
|
builder.WriteString(strings.ToUpper(event.Type))
|
||||||
|
|
||||||
builder.WriteString("\n\n**Full URL**: ")
|
builder.WriteString("\n\n**Full URL**: ")
|
||||||
builder.WriteString(event.Matched)
|
builder.WriteString(event.Matched)
|
||||||
|
|
||||||
builder.WriteString("\n\n**Timestamp**: ")
|
builder.WriteString("\n\n**Timestamp**: ")
|
||||||
builder.WriteString(event.Timestamp.Format("Mon Jan 2 15:04:05 -0700 MST 2006"))
|
builder.WriteString(event.Timestamp.Format("Mon Jan 2 15:04:05 -0700 MST 2006"))
|
||||||
|
|
||||||
builder.WriteString("\n\n**Template Information**\n\n| Key | Value |\n|---|---|\n")
|
builder.WriteString("\n\n**Template Information**\n\n| Key | Value |\n|---|---|\n")
|
||||||
for k, v := range event.Info {
|
builder.WriteString(ToMarkdownTableString(&event.Info))
|
||||||
if k == "reference" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
builder.WriteString(fmt.Sprintf("| %s | %s |\n", k, v))
|
|
||||||
}
|
|
||||||
if event.Request != "" {
|
if event.Request != "" {
|
||||||
builder.WriteString("\n**Request**\n\n```http\n")
|
builder.WriteString("\n**Request**\n\n```http\n")
|
||||||
builder.WriteString(event.Request)
|
builder.WriteString(event.Request)
|
||||||
@ -113,17 +116,29 @@ func MarkdownDescription(event *output.ResultEvent) string {
|
|||||||
builder.WriteString("\n```\n")
|
builder.WriteString("\n```\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if d, ok := event.Info["reference"]; ok {
|
|
||||||
|
reference := event.Info.Reference
|
||||||
|
if !reference.IsEmpty() {
|
||||||
builder.WriteString("\nReference: \n")
|
builder.WriteString("\nReference: \n")
|
||||||
|
|
||||||
switch v := d.(type) {
|
/*TODO couldn't the following code replace the logic below?
|
||||||
|
referenceSlice := reference.ToSlice()
|
||||||
|
for i, item := range referenceSlice {
|
||||||
|
builder.WriteString("- ")
|
||||||
|
builder.WriteString(item)
|
||||||
|
if len(referenceSlice)-1 != i {
|
||||||
|
builder.WriteString("\n")
|
||||||
|
}
|
||||||
|
}*/
|
||||||
|
|
||||||
|
switch value := reference.Value.(type) {
|
||||||
case string:
|
case string:
|
||||||
if !strings.HasPrefix(v, "-") {
|
if !strings.HasPrefix(value, "-") {
|
||||||
builder.WriteString("- ")
|
builder.WriteString("- ")
|
||||||
}
|
}
|
||||||
builder.WriteString(v)
|
builder.WriteString(value)
|
||||||
case []interface{}:
|
case []interface{}:
|
||||||
slice := types.ToStringSlice(v)
|
slice := types.ToStringSlice(value)
|
||||||
for i, item := range slice {
|
for i, item := range slice {
|
||||||
builder.WriteString("- ")
|
builder.WriteString("- ")
|
||||||
builder.WriteString(item)
|
builder.WriteString(item)
|
||||||
@ -154,3 +169,25 @@ func GetMatchedTemplate(event *output.ResultEvent) string {
|
|||||||
template := builder.String()
|
template := builder.String()
|
||||||
return template
|
return template
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ToMarkdownTableString(templateInfo *model.Info) string {
|
||||||
|
fields := map[string]string{
|
||||||
|
"Name": templateInfo.Name,
|
||||||
|
"Authors": sliceToString(templateInfo.Authors),
|
||||||
|
"Tags": sliceToString(templateInfo.Tags),
|
||||||
|
"Description": templateInfo.Description,
|
||||||
|
"Severity": templateInfo.SeverityHolder.Severity.String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
builder := &bytes.Buffer{}
|
||||||
|
for k, v := range fields {
|
||||||
|
if utils.IsNotBlank(v) {
|
||||||
|
builder.WriteString(fmt.Sprintf("| %s | %s |\n", k, v))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func sliceToString(stringSlice model.StringSlice) string {
|
||||||
|
return strings.Join(stringSlice.ToSlice(), ", ")
|
||||||
|
}
|
||||||
|
|||||||
@ -4,6 +4,10 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
"go.uber.org/multierr"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/dedupe"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/dedupe"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/disk"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/disk"
|
||||||
@ -11,8 +15,6 @@ import (
|
|||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/github"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/github"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/gitlab"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/gitlab"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/jira"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/jira"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
|
||||||
"go.uber.org/multierr"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Options is a configuration file for nuclei reporting module
|
// Options is a configuration file for nuclei reporting module
|
||||||
@ -36,41 +38,44 @@ type Options struct {
|
|||||||
// Filter filters the received event and decides whether to perform
|
// Filter filters the received event and decides whether to perform
|
||||||
// reporting for it or not.
|
// reporting for it or not.
|
||||||
type Filter struct {
|
type Filter struct {
|
||||||
Severity string `yaml:"severity"`
|
Severities severity.Severities `yaml:"severity"`
|
||||||
severity []string
|
Tags model.StringSlice `yaml:"tags"`
|
||||||
Tags string `yaml:"tags"`
|
|
||||||
tags []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compile compiles the filter creating match structures.
|
|
||||||
func (f *Filter) Compile() {
|
|
||||||
parts := strings.Split(f.Severity, ",")
|
|
||||||
for _, part := range parts {
|
|
||||||
f.severity = append(f.severity, strings.TrimSpace(part))
|
|
||||||
}
|
|
||||||
parts = strings.Split(f.Tags, ",")
|
|
||||||
for _, part := range parts {
|
|
||||||
f.tags = append(f.tags, strings.TrimSpace(part))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetMatch returns true if a filter matches result event
|
// GetMatch returns true if a filter matches result event
|
||||||
func (f *Filter) GetMatch(event *output.ResultEvent) bool {
|
func (filter *Filter) GetMatch(event *output.ResultEvent) bool {
|
||||||
severity := types.ToString(event.Info["severity"])
|
return isSeverityMatch(event, filter) && isTagMatch(event, filter) // TODO revisit this
|
||||||
if len(f.severity) > 0 {
|
}
|
||||||
return stringSliceContains(f.severity, severity)
|
|
||||||
|
func isTagMatch(event *output.ResultEvent, filter *Filter) bool {
|
||||||
|
filterTags := filter.Tags
|
||||||
|
if filterTags.IsEmpty() {
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
tags := event.Info["tags"]
|
tags := event.Info.Tags.ToSlice()
|
||||||
tagParts := strings.Split(types.ToString(tags), ",")
|
for _, tag := range filterTags.ToSlice() {
|
||||||
for i, tag := range tagParts {
|
if stringSliceContains(tags, tag) {
|
||||||
tagParts[i] = strings.TrimSpace(tag)
|
|
||||||
}
|
|
||||||
for _, tag := range f.tags {
|
|
||||||
if stringSliceContains(tagParts, tag) {
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSeverityMatch(event *output.ResultEvent, filter *Filter) bool {
|
||||||
|
resultEventSeverity := event.Info.SeverityHolder.Severity // TODO review
|
||||||
|
|
||||||
|
if len(filter.Severities) == 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, current := range filter.Severities {
|
||||||
|
if current == resultEventSeverity {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -98,17 +103,6 @@ type Client struct {
|
|||||||
|
|
||||||
// New creates a new nuclei issue tracker reporting client
|
// New creates a new nuclei issue tracker reporting client
|
||||||
func New(options *Options, db string) (*Client, error) {
|
func New(options *Options, db string) (*Client, error) {
|
||||||
if options == nil {
|
|
||||||
return nil, errors.New("no options passed")
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.AllowList != nil {
|
|
||||||
options.AllowList.Compile()
|
|
||||||
}
|
|
||||||
if options.DenyList != nil {
|
|
||||||
options.DenyList.Compile()
|
|
||||||
}
|
|
||||||
|
|
||||||
client := &Client{options: options}
|
client := &Client{options: options}
|
||||||
if options.Github != nil {
|
if options.Github != nil {
|
||||||
tracker, err := github.New(options.Github)
|
tracker, err := github.New(options.Github)
|
||||||
|
|||||||
@ -6,13 +6,14 @@ import (
|
|||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
jira "github.com/andygrunwald/go-jira"
|
"github.com/andygrunwald/go-jira"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/format"
|
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/format"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Integration is a client for a issue tracker integration
|
// Integration is a client for an issue tracker integration
|
||||||
type Integration struct {
|
type Integration struct {
|
||||||
jira *jira.Client
|
jira *jira.Client
|
||||||
options *Options
|
options *Options
|
||||||
@ -93,31 +94,34 @@ func (i *Integration) CreateIssue(event *output.ResultEvent) error {
|
|||||||
|
|
||||||
// jiraFormatDescription formats a short description of the generated
|
// jiraFormatDescription formats a short description of the generated
|
||||||
// event by the nuclei scanner in Jira format.
|
// event by the nuclei scanner in Jira format.
|
||||||
func jiraFormatDescription(event *output.ResultEvent) string {
|
func jiraFormatDescription(event *output.ResultEvent) string { // TODO remove the code duplication: format.go <-> jira.go
|
||||||
template := format.GetMatchedTemplate(event)
|
template := format.GetMatchedTemplate(event)
|
||||||
|
|
||||||
builder := &bytes.Buffer{}
|
builder := &bytes.Buffer{}
|
||||||
builder.WriteString("*Details*: *")
|
builder.WriteString("*Details*: *")
|
||||||
builder.WriteString(template)
|
builder.WriteString(template)
|
||||||
builder.WriteString("* ")
|
builder.WriteString("* ")
|
||||||
|
|
||||||
builder.WriteString(" matched at ")
|
builder.WriteString(" matched at ")
|
||||||
builder.WriteString(event.Host)
|
builder.WriteString(event.Host)
|
||||||
|
|
||||||
builder.WriteString("\n\n*Protocol*: ")
|
builder.WriteString("\n\n*Protocol*: ")
|
||||||
builder.WriteString(strings.ToUpper(event.Type))
|
builder.WriteString(strings.ToUpper(event.Type))
|
||||||
|
|
||||||
builder.WriteString("\n\n*Full URL*: ")
|
builder.WriteString("\n\n*Full URL*: ")
|
||||||
builder.WriteString(event.Matched)
|
builder.WriteString(event.Matched)
|
||||||
|
|
||||||
builder.WriteString("\n\n*Timestamp*: ")
|
builder.WriteString("\n\n*Timestamp*: ")
|
||||||
builder.WriteString(event.Timestamp.Format("Mon Jan 2 15:04:05 -0700 MST 2006"))
|
builder.WriteString(event.Timestamp.Format("Mon Jan 2 15:04:05 -0700 MST 2006"))
|
||||||
|
|
||||||
builder.WriteString("\n\n*Template Information*\n\n| Key | Value |\n")
|
builder.WriteString("\n\n*Template Information*\n\n| Key | Value |\n")
|
||||||
for k, v := range event.Info {
|
builder.WriteString(format.ToMarkdownTableString(&event.Info))
|
||||||
if k == "reference" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
builder.WriteString(fmt.Sprintf("| %s | %s |\n", k, v))
|
|
||||||
}
|
|
||||||
builder.WriteString("\n*Request*\n\n{code}\n")
|
builder.WriteString("\n*Request*\n\n{code}\n")
|
||||||
builder.WriteString(event.Request)
|
builder.WriteString(event.Request)
|
||||||
builder.WriteString("\n{code}\n\n*Response*\n\n{code}\n")
|
builder.WriteString("\n{code}\n")
|
||||||
|
|
||||||
|
builder.WriteString("\n*Response*\n\n{code}\n")
|
||||||
// If the response is larger than 5 kb, truncate it before writing.
|
// If the response is larger than 5 kb, truncate it before writing.
|
||||||
if len(event.Response) > 5*1024 {
|
if len(event.Response) > 5*1024 {
|
||||||
builder.WriteString(event.Response[:5*1024])
|
builder.WriteString(event.Response[:5*1024])
|
||||||
@ -174,10 +178,22 @@ func jiraFormatDescription(event *output.ResultEvent) string {
|
|||||||
builder.WriteString("\n{code}\n")
|
builder.WriteString("\n{code}\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if d, ok := event.Info["reference"]; ok {
|
|
||||||
|
reference := event.Info.Reference
|
||||||
|
if !reference.IsEmpty() {
|
||||||
builder.WriteString("\nReference: \n")
|
builder.WriteString("\nReference: \n")
|
||||||
|
|
||||||
switch v := d.(type) {
|
/*TODO couldn't the following code replace the logic below?
|
||||||
|
referenceSlice := reference.ToSlice()
|
||||||
|
for i, item := range referenceSlice {
|
||||||
|
builder.WriteString("- ")
|
||||||
|
builder.WriteString(item)
|
||||||
|
if len(referenceSlice)-1 != i {
|
||||||
|
builder.WriteString("\n")
|
||||||
|
}
|
||||||
|
}*/
|
||||||
|
|
||||||
|
switch v := reference.Value.(type) {
|
||||||
case string:
|
case string:
|
||||||
if !strings.HasPrefix(v, "-") {
|
if !strings.HasPrefix(v, "-") {
|
||||||
builder.WriteString("- ")
|
builder.WriteString("- ")
|
||||||
|
|||||||
@ -8,12 +8,13 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/executer"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/executer"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/offlinehttp"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/offlinehttp"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/workflows/compile"
|
"github.com/projectdiscovery/nuclei/v2/pkg/utils"
|
||||||
"gopkg.in/yaml.v2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Parse parses a yaml request template file
|
// Parse parses a yaml request template file
|
||||||
@ -42,10 +43,10 @@ func Parse(filePath string, preprocessor Preprocessor, options protocols.Execute
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := template.Info["name"]; !ok {
|
if utils.IsBlank(template.Info.Name) {
|
||||||
return nil, errors.New("no template name field provided")
|
return nil, errors.New("no template name field provided")
|
||||||
}
|
}
|
||||||
if _, ok := template.Info["author"]; !ok {
|
if template.Info.Authors.IsEmpty() {
|
||||||
return nil, errors.New("no template author field provided")
|
return nil, errors.New("no template author field provided")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -63,11 +64,7 @@ func Parse(filePath string, preprocessor Preprocessor, options protocols.Execute
|
|||||||
if len(template.Workflows) > 0 {
|
if len(template.Workflows) > 0 {
|
||||||
compiled := &template.Workflow
|
compiled := &template.Workflow
|
||||||
|
|
||||||
loader, err := compile.NewLoader(&options)
|
compileWorkflow(preprocessor, &options, compiled, options.WorkflowLoader)
|
||||||
if err != nil {
|
|
||||||
return nil, errors.Wrap(err, "could not create workflow loader")
|
|
||||||
}
|
|
||||||
compileWorkflow(preprocessor, &options, compiled, loader)
|
|
||||||
template.CompiledWorkflow = compiled
|
template.CompiledWorkflow = compiled
|
||||||
template.CompiledWorkflow.Options = &options
|
template.CompiledWorkflow.Options = &options
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,6 +2,7 @@
|
|||||||
package templates
|
package templates
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/dns"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/dns"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/file"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/file"
|
||||||
@ -29,12 +30,10 @@ type Template struct {
|
|||||||
// value: "\"cve-2021-19520\""
|
// value: "\"cve-2021-19520\""
|
||||||
ID string `yaml:"id"`
|
ID string `yaml:"id"`
|
||||||
// description: |
|
// description: |
|
||||||
// Info contains metadata information about the template. At minimum, it
|
// Info contains metadata information about the template.
|
||||||
// should contain `name`, `author`, `severity`, `description`, `tags`. Optionally
|
|
||||||
// you can also specify a list of `references` for the template.
|
|
||||||
// examples:
|
// examples:
|
||||||
// - value: exampleInfoStructure
|
// - value: exampleInfoStructure
|
||||||
Info map[string]interface{} `yaml:"info"`
|
Info model.Info `yaml:"info"`
|
||||||
// description: |
|
// description: |
|
||||||
// Requests contains the http request to make in the template.
|
// Requests contains the http request to make in the template.
|
||||||
// examples:
|
// examples:
|
||||||
|
|||||||
@ -2,13 +2,13 @@ package templates
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/projectdiscovery/gologger"
|
"github.com/projectdiscovery/gologger"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/workflows"
|
"github.com/projectdiscovery/nuclei/v2/pkg/workflows"
|
||||||
"github.com/projectdiscovery/nuclei/v2/pkg/workflows/compile"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// compileWorkflow compiles the workflow for execution
|
// compileWorkflow compiles the workflow for execution
|
||||||
func compileWorkflow(preprocessor Preprocessor, options *protocols.ExecuterOptions, workflow *workflows.Workflow, loader compile.WorkflowLoader) {
|
func compileWorkflow(preprocessor Preprocessor, options *protocols.ExecuterOptions, workflow *workflows.Workflow, loader model.WorkflowLoader) {
|
||||||
for _, workflow := range workflow.Workflows {
|
for _, workflow := range workflow.Workflows {
|
||||||
if err := parseWorkflow(preprocessor, workflow, options, loader); err != nil {
|
if err := parseWorkflow(preprocessor, workflow, options, loader); err != nil {
|
||||||
gologger.Warning().Msgf("Could not parse workflow: %v\n", err)
|
gologger.Warning().Msgf("Could not parse workflow: %v\n", err)
|
||||||
@ -18,7 +18,7 @@ func compileWorkflow(preprocessor Preprocessor, options *protocols.ExecuterOptio
|
|||||||
}
|
}
|
||||||
|
|
||||||
// parseWorkflow parses and compiles all templates in a workflow recursively
|
// parseWorkflow parses and compiles all templates in a workflow recursively
|
||||||
func parseWorkflow(preprocessor Preprocessor, workflow *workflows.WorkflowTemplate, options *protocols.ExecuterOptions, loader compile.WorkflowLoader) error {
|
func parseWorkflow(preprocessor Preprocessor, workflow *workflows.WorkflowTemplate, options *protocols.ExecuterOptions, loader model.WorkflowLoader) error {
|
||||||
shouldNotValidate := false
|
shouldNotValidate := false
|
||||||
|
|
||||||
if len(workflow.Subtemplates) > 0 || len(workflow.Matchers) > 0 {
|
if len(workflow.Subtemplates) > 0 || len(workflow.Matchers) > 0 {
|
||||||
@ -45,11 +45,12 @@ func parseWorkflow(preprocessor Preprocessor, workflow *workflows.WorkflowTempla
|
|||||||
}
|
}
|
||||||
|
|
||||||
// parseWorkflowTemplate parses a workflow template creating an executer
|
// parseWorkflowTemplate parses a workflow template creating an executer
|
||||||
func parseWorkflowTemplate(workflow *workflows.WorkflowTemplate, preprocessor Preprocessor, options *protocols.ExecuterOptions, loader compile.WorkflowLoader, noValidate bool) error {
|
func parseWorkflowTemplate(workflow *workflows.WorkflowTemplate, preprocessor Preprocessor, options *protocols.ExecuterOptions, loader model.WorkflowLoader, noValidate bool) error {
|
||||||
var paths []string
|
var paths []string
|
||||||
|
|
||||||
if len(workflow.Tags) > 0 {
|
workflowTags := workflow.Tags
|
||||||
paths = loader.ListTags([]string{workflow.Tags})
|
if !workflowTags.IsEmpty() {
|
||||||
|
paths = loader.ListTags(workflowTags.ToSlice())
|
||||||
} else {
|
} else {
|
||||||
paths = loader.ListTemplates([]string{workflow.Template}, noValidate)
|
paths = loader.ListTemplates([]string{workflow.Template}, noValidate)
|
||||||
}
|
}
|
||||||
@ -67,6 +68,7 @@ func parseWorkflowTemplate(workflow *workflows.WorkflowTemplate, preprocessor Pr
|
|||||||
IssuesClient: options.IssuesClient,
|
IssuesClient: options.IssuesClient,
|
||||||
Interactsh: options.Interactsh,
|
Interactsh: options.Interactsh,
|
||||||
ProjectFile: options.ProjectFile,
|
ProjectFile: options.ProjectFile,
|
||||||
|
HostErrorsCache: options.HostErrorsCache,
|
||||||
}
|
}
|
||||||
template, err := Parse(path, preprocessor, opts)
|
template, err := Parse(path, preprocessor, opts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -6,6 +6,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
)
|
)
|
||||||
|
|
||||||
// JSONScalarToString converts an interface coming from json to string
|
// JSONScalarToString converts an interface coming from json to string
|
||||||
@ -60,6 +62,10 @@ func ToString(data interface{}) string {
|
|||||||
return strconv.FormatUint(uint64(s), 10)
|
return strconv.FormatUint(uint64(s), 10)
|
||||||
case []byte:
|
case []byte:
|
||||||
return string(s)
|
return string(s)
|
||||||
|
case severity.SeverityHolder:
|
||||||
|
return s.Severity.String()
|
||||||
|
case severity.Severity:
|
||||||
|
return s.String()
|
||||||
case fmt.Stringer:
|
case fmt.Stringer:
|
||||||
return s.String()
|
return s.String()
|
||||||
case error:
|
case error:
|
||||||
|
|||||||
@ -1,6 +1,9 @@
|
|||||||
package types
|
package types
|
||||||
|
|
||||||
import "github.com/projectdiscovery/goflags"
|
import (
|
||||||
|
"github.com/projectdiscovery/goflags"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/internal/severity"
|
||||||
|
)
|
||||||
|
|
||||||
// Options contains the configuration options for nuclei scanner.
|
// Options contains the configuration options for nuclei scanner.
|
||||||
type Options struct {
|
type Options struct {
|
||||||
@ -18,8 +21,10 @@ type Options struct {
|
|||||||
ExcludedTemplates goflags.StringSlice
|
ExcludedTemplates goflags.StringSlice
|
||||||
// CustomHeaders is the list of custom global headers to send with each request.
|
// CustomHeaders is the list of custom global headers to send with each request.
|
||||||
CustomHeaders goflags.StringSlice
|
CustomHeaders goflags.StringSlice
|
||||||
// Severity filters templates based on their severity and only run the matching ones.
|
// Vars is the list of custom global vars
|
||||||
Severity goflags.NormalizedStringSlice
|
Vars goflags.RuntimeMap
|
||||||
|
// Severities filters templates based on their severity and only run the matching ones.
|
||||||
|
Severities severity.Severities
|
||||||
// Author filters templates based on their author and only run the matching ones.
|
// Author filters templates based on their author and only run the matching ones.
|
||||||
Author goflags.NormalizedStringSlice
|
Author goflags.NormalizedStringSlice
|
||||||
// IncludeTags includes specified tags to be run even while being in denylist
|
// IncludeTags includes specified tags to be run even while being in denylist
|
||||||
@ -32,10 +37,10 @@ type Options struct {
|
|||||||
ProjectPath string
|
ProjectPath string
|
||||||
// InteractshURL is the URL for the interactsh server.
|
// InteractshURL is the URL for the interactsh server.
|
||||||
InteractshURL string
|
InteractshURL string
|
||||||
// Target is a single URL/Domain to scan using a template
|
// Target URLs/Domains to scan using a template
|
||||||
Target string
|
Targets goflags.StringSlice
|
||||||
// Targets specifies the targets to scan using templates.
|
// TargetsFilePath specifies the targets from a file to scan using templates.
|
||||||
Targets string
|
TargetsFilePath string
|
||||||
// Output is the file to write found results to.
|
// Output is the file to write found results to.
|
||||||
Output string
|
Output string
|
||||||
// ProxyURL is the URL for the proxy server
|
// ProxyURL is the URL for the proxy server
|
||||||
@ -60,6 +65,8 @@ type Options struct {
|
|||||||
StatsInterval int
|
StatsInterval int
|
||||||
// MetricsPort is the port to show metrics on
|
// MetricsPort is the port to show metrics on
|
||||||
MetricsPort int
|
MetricsPort int
|
||||||
|
// HostMaxErrors is the maximum number of errors allowed for a host
|
||||||
|
HostMaxErrors int
|
||||||
// BulkSize is the of targets analyzed in parallel for each template
|
// BulkSize is the of targets analyzed in parallel for each template
|
||||||
BulkSize int
|
BulkSize int
|
||||||
// TemplateThreads is the number of templates executed in parallel
|
// TemplateThreads is the number of templates executed in parallel
|
||||||
@ -143,4 +150,6 @@ type Options struct {
|
|||||||
UpdateNuclei bool
|
UpdateNuclei bool
|
||||||
// NoUpdateTemplates disables checking for nuclei templates updates
|
// NoUpdateTemplates disables checking for nuclei templates updates
|
||||||
NoUpdateTemplates bool
|
NoUpdateTemplates bool
|
||||||
|
// EnvironmentVariables enables support for environment variables
|
||||||
|
EnvironmentVariables bool
|
||||||
}
|
}
|
||||||
|
|||||||
13
v2/pkg/utils/utils.go
Normal file
13
v2/pkg/utils/utils.go
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func IsBlank(value string) bool {
|
||||||
|
return strings.TrimSpace(value) == ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsNotBlank(value string) bool {
|
||||||
|
return !IsBlank(value)
|
||||||
|
}
|
||||||
@ -55,6 +55,11 @@ func (w *Workflow) runWorkflowStep(template *WorkflowTemplate, input string, res
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if w.Options.HostErrorsCache != nil {
|
||||||
|
if w.Options.HostErrorsCache.CheckError(err) {
|
||||||
|
w.Options.HostErrorsCache.MarkFailed(input)
|
||||||
|
}
|
||||||
|
}
|
||||||
if len(template.Executers) == 1 {
|
if len(template.Executers) == 1 {
|
||||||
mainErr = err
|
mainErr = err
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -1,6 +1,9 @@
|
|||||||
package workflows
|
package workflows
|
||||||
|
|
||||||
import "github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
import (
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/model"
|
||||||
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||||
|
)
|
||||||
|
|
||||||
// Workflow is a workflow to execute with chained requests, etc.
|
// Workflow is a workflow to execute with chained requests, etc.
|
||||||
type Workflow struct {
|
type Workflow struct {
|
||||||
@ -23,7 +26,7 @@ type WorkflowTemplate struct {
|
|||||||
Template string `yaml:"template,omitempty"`
|
Template string `yaml:"template,omitempty"`
|
||||||
// description: |
|
// description: |
|
||||||
// Tags to run templates based on.
|
// Tags to run templates based on.
|
||||||
Tags string `yaml:"tags,omitempty"`
|
Tags model.StringSlice `yaml:"tags,omitempty"`
|
||||||
// description: |
|
// description: |
|
||||||
// Matchers perform name based matching to run subtemplates for a workflow.
|
// Matchers perform name based matching to run subtemplates for a workflow.
|
||||||
Matchers []*Matcher `yaml:"matchers,omitempty"`
|
Matchers []*Matcher `yaml:"matchers,omitempty"`
|
||||||
@ -31,7 +34,7 @@ type WorkflowTemplate struct {
|
|||||||
// Subtemplates are ran if the `template` field Template matches.
|
// Subtemplates are ran if the `template` field Template matches.
|
||||||
Subtemplates []*WorkflowTemplate `yaml:"subtemplates,omitempty"`
|
Subtemplates []*WorkflowTemplate `yaml:"subtemplates,omitempty"`
|
||||||
// Executers perform the actual execution for the workflow template
|
// Executers perform the actual execution for the workflow template
|
||||||
Executers []*ProtocolExecuterPair
|
Executers []*ProtocolExecuterPair `yaml:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ProtocolExecuterPair is a pair of protocol executer and its options
|
// ProtocolExecuterPair is a pair of protocol executer and its options
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user