feat: added initial live DAST server implementation (#5772)

* feat: added initial live DAST server implementation

* feat: more logging + misc additions

* feat: auth file support enhancements for more complex scenarios + misc

* feat: added io.Reader support to input providers for http

* feat: added stats db to fuzzing + use sdk for dast server + misc

* feat: more additions and enhancements

* misc changes to live server

* misc

* use utils pprof server

* feat: added simpler stats tracking system

* feat: fixed analyzer timeout issue + missing case fix

* misc changes fix

* feat: changed the logics a bit + misc changes and additions

* feat: re-added slope checks + misc

* feat: added baseline measurements for time based checks

* chore(server): fix typos

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

* fix(templates): potential DOM XSS

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

* fix(authx): potential NIL deref

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

* feat: misc review changes

* removed debug logging

* feat: remove existing cookies only

* feat: lint fixes

* misc

* misc text update

* request endpoint update

* feat: added tracking for status code, waf-detection & grouped errors (#6028)

* feat: added tracking for status code, waf-detection & grouped errors

* lint error fixes

* feat: review changes + moving to package + misc

---------

Co-authored-by: sandeep <8293321+ehsandeep@users.noreply.github.com>

* fix var dump (#5921)

* fix var dump

* fix dump test

* Added filename length restriction for debug mode (-srd flag) (#5931)

Co-authored-by: Andrey Matveenko <an.matveenko@vkteam.ru>

* more updates

* Update pkg/output/stats/waf/waf.go

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

---------

Co-authored-by: sandeep <8293321+ehsandeep@users.noreply.github.com>
Co-authored-by: Dwi Siswanto <25837540+dwisiswant0@users.noreply.github.com>
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
Co-authored-by: Dogan Can Bakir <65292895+dogancanbakir@users.noreply.github.com>
Co-authored-by: 9flowers <51699499+Lercas@users.noreply.github.com>
Co-authored-by: Andrey Matveenko <an.matveenko@vkteam.ru>
Co-authored-by: Sandeep Singh <sandeep@projectdiscovery.io>
This commit is contained in:
Ice3man 2025-02-13 18:46:28 +05:30 committed by GitHub
parent 31fb7c8963
commit 5f0b7eb19b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
50 changed files with 2505 additions and 380 deletions

View File

@ -185,6 +185,11 @@ func main() {
go func() {
for range c {
gologger.Info().Msgf("CTRL+C pressed: Exiting\n")
if options.DASTServer {
nucleiRunner.Close()
os.Exit(1)
}
gologger.Info().Msgf("Attempting graceful shutdown...")
if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...")
@ -358,9 +363,15 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.StringVarP(&options.FuzzingMode, "fuzzing-mode", "fm", "", "overrides fuzzing mode set in template (multiple, single)"),
flagSet.BoolVar(&fuzzFlag, "fuzz", false, "enable loading fuzzing templates (Deprecated: use -dast instead)"),
flagSet.BoolVar(&options.DAST, "dast", false, "enable / run dast (fuzz) nuclei templates"),
flagSet.BoolVarP(&options.DASTServer, "dast-server", "dts", false, "enable dast server mode (live fuzzing)"),
flagSet.BoolVarP(&options.DASTReport, "dast-report", "dtr", false, "write dast scan report to file"),
flagSet.StringVarP(&options.DASTServerToken, "dast-server-token", "dtst", "", "dast server token (optional)"),
flagSet.StringVarP(&options.DASTServerAddress, "dast-server-address", "dtsa", "localhost:9055", "dast server address"),
flagSet.BoolVarP(&options.DisplayFuzzPoints, "display-fuzz-points", "dfp", false, "display fuzz points in the output for debugging"),
flagSet.IntVar(&options.FuzzParamFrequency, "fuzz-param-frequency", 10, "frequency of uninteresting parameters for fuzzing before skipping"),
flagSet.StringVarP(&options.FuzzAggressionLevel, "fuzz-aggression", "fa", "low", "fuzzing aggression level controls payload count for fuzz (low, medium, high)"),
flagSet.StringSliceVarP(&options.Scope, "fuzz-scope", "cs", nil, "in scope url regex to be followed by fuzzer", goflags.FileCommaSeparatedStringSliceOptions),
flagSet.StringSliceVarP(&options.OutOfScope, "fuzz-out-scope", "cos", nil, "out of scope url regex to be excluded by fuzzer", goflags.FileCommaSeparatedStringSliceOptions),
)
flagSet.CreateGroup("uncover", "Uncover",

5
go.mod
View File

@ -51,6 +51,7 @@ require (
github.com/DataDog/gostackparse v0.6.0
github.com/Masterminds/semver/v3 v3.2.1
github.com/Mzack9999/gcache v0.0.0-20230410081825-519e28eab057
github.com/alitto/pond v1.9.2
github.com/antchfx/xmlquery v1.3.17
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
github.com/aws/aws-sdk-go-v2 v1.19.0
@ -75,7 +76,7 @@ require (
github.com/h2non/filetype v1.1.3
github.com/invopop/yaml v0.3.1
github.com/kitabisa/go-ci v1.0.3
github.com/labstack/echo/v4 v4.10.2
github.com/labstack/echo/v4 v4.12.0
github.com/leslie-qiwa/flat v0.0.0-20230424180412-f9d1cf014baa
github.com/lib/pq v1.10.9
github.com/mattn/go-sqlite3 v1.14.22
@ -359,7 +360,7 @@ require (
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jcmturner/gokrb5/v8 v8.4.4
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/labstack/gommon v0.4.0 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect

17
go.sum
View File

@ -114,6 +114,8 @@ github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAu
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/alexbrainman/sspi v0.0.0-20210105120005-909beea2cc74 h1:Kk6a4nehpJ3UuJRqlA3JxYxBZEqCeOmATOvrbT4p9RA=
github.com/alexbrainman/sspi v0.0.0-20210105120005-909beea2cc74/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/alitto/pond v1.9.2 h1:9Qb75z/scEZVCoSU+osVmQ0I0JOeLfdTDafrbcJ8CLs=
github.com/alitto/pond v1.9.2/go.mod h1:xQn3P/sHTYcU/1BR3i86IGIrilcrGC2LiS+E2+CJWsI=
github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y=
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
@ -692,10 +694,10 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/labstack/echo/v4 v4.10.2 h1:n1jAhnq/elIFTHr1EYpiYtyKgx4RW9ccVgkqByZaN2M=
github.com/labstack/echo/v4 v4.10.2/go.mod h1:OEyqf2//K1DFdE57vw2DRgWY0M7s65IVQO2FzvI4J5k=
github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8=
github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM=
github.com/labstack/echo/v4 v4.12.0 h1:IKpw49IMryVB2p1a4dzwlhP1O2Tf2E0Ir/450lH+kI0=
github.com/labstack/echo/v4 v4.12.0/go.mod h1:UP9Cr2DJXbOK3Kr9ONYzNowSh7HP0aG0ShAyycHSJvM=
github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0=
github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU=
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
@ -723,12 +725,10 @@ github.com/mackerelio/go-osstat v0.2.4/go.mod h1:Zy+qzGdZs3A9cuIqmgbJvwbmLQH9dJv
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
@ -1104,7 +1104,6 @@ github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijb
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/weppos/publicsuffix-go v0.12.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k=
@ -1420,10 +1419,7 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@ -1662,7 +1658,6 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools/v3 v3.3.0 h1:MfDY1b1/0xN1CyMlQDac0ziEy9zJQd9CXBRRDHw2jJo=

View File

@ -114,8 +114,13 @@ func GetLazyAuthFetchCallback(opts *AuthLazyFetchOptions) authx.LazyFetchSecret
}
// dynamic values
for k, v := range e.OperatorsResult.DynamicValues {
if len(v) > 0 {
data[k] = v[0]
// Iterate through all the values and choose the
// largest value as the extracted value
for _, value := range v {
oldVal, ok := data[k]
if !ok || len(value) > len(oldVal.(string)) {
data[k] = value
}
}
}
// named extractors

View File

@ -171,6 +171,11 @@ func ValidateOptions(options *types.Options) error {
if options.Validate {
validateTemplatePaths(config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows)
}
if options.DAST {
if err := validateDASTOptions(options); err != nil {
return err
}
}
// Verify if any of the client certificate options were set since it requires all three to work properly
if options.HasClientCertificates() {
@ -274,6 +279,14 @@ func validateMissingGitLabOptions(options *types.Options) []string {
return missing
}
func validateDASTOptions(options *types.Options) error {
// Ensure the DAST server token meets minimum length requirement
if len(options.DASTServerToken) > 0 && len(options.DASTServerToken) < 16 {
return fmt.Errorf("DAST server token must be at least 16 characters long")
}
return nil
}
func createReportingOptions(options *types.Options) (*reporting.Options, error) {
var reportingOptions = &reporting.Options{}
if options.ReportingConfig != "" {

View File

@ -3,8 +3,6 @@ package runner
import (
"context"
"fmt"
"net/http"
_ "net/http/pprof"
"os"
"path/filepath"
"reflect"
@ -13,6 +11,7 @@ import (
"time"
"github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/nuclei/v3/internal/server"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/frequency"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
@ -26,6 +25,7 @@ import (
"github.com/projectdiscovery/utils/env"
fileutil "github.com/projectdiscovery/utils/file"
permissionutil "github.com/projectdiscovery/utils/permission"
pprofutil "github.com/projectdiscovery/utils/pprof"
updateutils "github.com/projectdiscovery/utils/update"
"github.com/logrusorgru/aurora"
@ -41,6 +41,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/external/customtemplates"
fuzzStats "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
parsers "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
@ -89,7 +90,7 @@ type Runner struct {
rateLimiter *ratelimit.Limiter
hostErrors hosterrorscache.CacheInterface
resumeCfg *types.ResumeCfg
pprofServer *http.Server
pprofServer *pprofutil.PprofServer
pdcpUploadErrMsg string
inputProvider provider.InputProvider
fuzzFrequencyCache *frequency.Tracker
@ -99,10 +100,10 @@ type Runner struct {
tmpDir string
parser parser.Parser
httpApiEndpoint *httpapi.Server
fuzzStats *fuzzStats.Tracker
dastServer *server.DASTServer
}
const pprofServerAddress = "127.0.0.1:8086"
// New creates a new client for running the enumeration process.
func New(options *types.Options) (*Runner, error) {
runner := &Runner{
@ -219,15 +220,8 @@ func New(options *types.Options) (*Runner, error) {
templates.SeverityColorizer = colorizer.New(runner.colorizer)
if options.EnablePprof {
server := &http.Server{
Addr: pprofServerAddress,
Handler: http.DefaultServeMux,
}
gologger.Info().Msgf("Listening pprof debug server on: %s", pprofServerAddress)
runner.pprofServer = server
go func() {
_ = server.ListenAndServe()
}()
runner.pprofServer = pprofutil.NewPprofServer()
runner.pprofServer.Start()
}
if options.HttpApiEndpoint != "" {
@ -303,6 +297,37 @@ func New(options *types.Options) (*Runner, error) {
}
runner.resumeCfg = resumeCfg
if options.DASTReport || options.DASTServer {
var err error
runner.fuzzStats, err = fuzzStats.NewTracker()
if err != nil {
return nil, errors.Wrap(err, "could not create fuzz stats db")
}
if !options.DASTServer {
dastServer, err := server.NewStatsServer(runner.fuzzStats)
if err != nil {
return nil, errors.Wrap(err, "could not create dast server")
}
runner.dastServer = dastServer
}
}
if runner.fuzzStats != nil {
outputWriter.JSONLogRequestHook = func(request *output.JSONLogRequest) {
if request.Error == "none" || request.Error == "" {
return
}
runner.fuzzStats.RecordErrorEvent(fuzzStats.ErrorEvent{
TemplateID: request.Template,
URL: request.Input,
Error: request.Error,
})
}
}
// setup a proxy writer to automatically upload results to PDCP
runner.output = runner.setupPDCPUpload(outputWriter)
opts := interactsh.DefaultOptions(runner.output, runner.issuesClient, runner.progress)
opts.Debug = runner.options.Debug
opts.NoColor = runner.options.NoColor
@ -369,6 +394,9 @@ func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecutorOptions,
// Close releases all the resources and cleans up
func (r *Runner) Close() {
if r.dastServer != nil {
r.dastServer.Close()
}
if r.httpStats != nil {
r.httpStats.DisplayTopStats(r.options.NoColor)
}
@ -390,7 +418,7 @@ func (r *Runner) Close() {
}
protocolinit.Close()
if r.pprofServer != nil {
_ = r.pprofServer.Shutdown(context.Background())
r.pprofServer.Stop()
}
if r.rateLimiter != nil {
r.rateLimiter.Stop()
@ -449,6 +477,41 @@ func (r *Runner) setupPDCPUpload(writer output.Writer) output.Writer {
// RunEnumeration sets up the input layer for giving input nuclei.
// binary and runs the actual enumeration
func (r *Runner) RunEnumeration() error {
// If the user has asked for DAST server mode, run the live
// DAST fuzzing server.
if r.options.DASTServer {
execurOpts := &server.NucleiExecutorOptions{
Options: r.options,
Output: r.output,
Progress: r.progress,
Catalog: r.catalog,
IssuesClient: r.issuesClient,
RateLimiter: r.rateLimiter,
Interactsh: r.interactsh,
ProjectFile: r.projectFile,
Browser: r.browser,
Colorizer: r.colorizer,
Parser: r.parser,
TemporaryDirectory: r.tmpDir,
FuzzStatsDB: r.fuzzStats,
}
dastServer, err := server.New(&server.Options{
Address: r.options.DASTServerAddress,
Templates: r.options.Templates,
OutputWriter: r.output,
Verbose: r.options.Verbose,
Token: r.options.DASTServerToken,
InScope: r.options.Scope,
OutScope: r.options.OutOfScope,
NucleiExecutorOptions: execurOpts,
})
if err != nil {
return err
}
r.dastServer = dastServer
return dastServer.Start()
}
// If user asked for new templates to be executed, collect the list from the templates' directory.
if r.options.NewTemplates {
if arr := config.DefaultConfig.GetNewAdditions(); len(arr) > 0 {
@ -634,6 +697,14 @@ func (r *Runner) RunEnumeration() error {
Retries: r.options.Retries,
}, "")
if r.dastServer != nil {
go func() {
if err := r.dastServer.Start(); err != nil {
gologger.Error().Msgf("could not start dast server: %v", err)
}
}()
}
enumeration := false
var results *atomic.Bool
results, err = r.runStandardEnumeration(executorOpts, store, executorEngine)
@ -643,6 +714,9 @@ func (r *Runner) RunEnumeration() error {
return err
}
if executorOpts.FuzzStatsDB != nil {
executorOpts.FuzzStatsDB.Close()
}
if r.interactsh != nil {
matched := r.interactsh.Close()
if matched {

122
internal/server/dedupe.go Normal file
View File

@ -0,0 +1,122 @@
package server
import (
"crypto/sha256"
"encoding/hex"
"net/url"
"sort"
"strings"
"sync"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
mapsutil "github.com/projectdiscovery/utils/maps"
)
var dynamicHeaders = map[string]bool{
"date": true,
"if-modified-since": true,
"if-unmodified-since": true,
"cache-control": true,
"if-none-match": true,
"if-match": true,
"authorization": true,
"cookie": true,
"x-csrf-token": true,
"content-length": true,
"content-md5": true,
"host": true,
"x-request-id": true,
"x-correlation-id": true,
"user-agent": true,
"referer": true,
}
type requestDeduplicator struct {
hashes map[string]struct{}
lock *sync.RWMutex
}
func newRequestDeduplicator() *requestDeduplicator {
return &requestDeduplicator{
hashes: make(map[string]struct{}),
lock: &sync.RWMutex{},
}
}
func (r *requestDeduplicator) isDuplicate(req *types.RequestResponse) bool {
hash, err := hashRequest(req)
if err != nil {
return false
}
r.lock.RLock()
_, ok := r.hashes[hash]
r.lock.RUnlock()
if ok {
return true
}
r.lock.Lock()
r.hashes[hash] = struct{}{}
r.lock.Unlock()
return false
}
func hashRequest(req *types.RequestResponse) (string, error) {
normalizedURL, err := normalizeURL(req.URL.URL)
if err != nil {
return "", err
}
var hashContent strings.Builder
hashContent.WriteString(req.Request.Method)
hashContent.WriteString(normalizedURL)
headers := sortedNonDynamicHeaders(req.Request.Headers)
for _, header := range headers {
hashContent.WriteString(header.Key)
hashContent.WriteString(header.Value)
}
if len(req.Request.Body) > 0 {
hashContent.Write([]byte(req.Request.Body))
}
// Calculate the SHA256 hash
hash := sha256.Sum256([]byte(hashContent.String()))
return hex.EncodeToString(hash[:]), nil
}
func normalizeURL(u *url.URL) (string, error) {
query := u.Query()
sortedQuery := make(url.Values)
for k, v := range query {
sort.Strings(v)
sortedQuery[k] = v
}
u.RawQuery = sortedQuery.Encode()
if u.Path == "" {
u.Path = "/"
}
return u.String(), nil
}
type header struct {
Key string
Value string
}
func sortedNonDynamicHeaders(headers mapsutil.OrderedMap[string, string]) []header {
var result []header
headers.Iterate(func(k, v string) bool {
if !dynamicHeaders[strings.ToLower(k)] {
result = append(result, header{Key: k, Value: v})
}
return true
})
sort.Slice(result, func(i, j int) bool {
return result[i].Key < result[j].Key
})
return result
}

View File

@ -0,0 +1,199 @@
package server
import (
"context"
"fmt"
_ "net/http/pprof"
"strings"
"github.com/logrusorgru/aurora"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/frequency"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider/http"
"github.com/projectdiscovery/nuclei/v3/pkg/projectfile"
"gopkg.in/yaml.v3"
"github.com/pkg/errors"
"github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/parser"
parsers "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/progress"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/globalmatchers"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/hosterrorscache"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/interactsh"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/utils/excludematchers"
browserEngine "github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
)
type nucleiExecutor struct {
engine *core.Engine
store *loader.Store
options *NucleiExecutorOptions
executorOpts protocols.ExecutorOptions
}
type NucleiExecutorOptions struct {
Options *types.Options
Output output.Writer
Progress progress.Progress
Catalog catalog.Catalog
IssuesClient reporting.Client
RateLimiter *ratelimit.Limiter
Interactsh *interactsh.Client
ProjectFile *projectfile.ProjectFile
Browser *browserEngine.Browser
FuzzStatsDB *stats.Tracker
Colorizer aurora.Aurora
Parser parser.Parser
TemporaryDirectory string
}
func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
fuzzFreqCache := frequency.New(frequency.DefaultMaxTrackCount, opts.Options.FuzzParamFrequency)
resumeCfg := types.NewResumeCfg()
// Create the executor options which will be used throughout the execution
// stage by the nuclei engine modules.
executorOpts := protocols.ExecutorOptions{
Output: opts.Output,
Options: opts.Options,
Progress: opts.Progress,
Catalog: opts.Catalog,
IssuesClient: opts.IssuesClient,
RateLimiter: opts.RateLimiter,
Interactsh: opts.Interactsh,
ProjectFile: opts.ProjectFile,
Browser: opts.Browser,
Colorizer: opts.Colorizer,
ResumeCfg: resumeCfg,
ExcludeMatchers: excludematchers.New(opts.Options.ExcludeMatchers),
InputHelper: input.NewHelper(),
TemporaryDirectory: opts.TemporaryDirectory,
Parser: opts.Parser,
FuzzParamsFrequency: fuzzFreqCache,
GlobalMatchers: globalmatchers.New(),
FuzzStatsDB: opts.FuzzStatsDB,
}
if opts.Options.ShouldUseHostError() {
maxHostError := opts.Options.MaxHostError
if maxHostError == 30 {
maxHostError = 100 // auto adjust for fuzzings
}
if opts.Options.TemplateThreads > maxHostError {
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads)
maxHostError = opts.Options.TemplateThreads
}
cache := hosterrorscache.New(maxHostError, hosterrorscache.DefaultMaxHostsCount, opts.Options.TrackError)
cache.SetVerbose(opts.Options.Verbose)
executorOpts.HostErrorsCache = cache
}
executorEngine := core.New(opts.Options)
executorEngine.SetExecuterOptions(executorOpts)
workflowLoader, err := parsers.NewLoader(&executorOpts)
if err != nil {
return nil, errors.Wrap(err, "Could not create loader options.")
}
executorOpts.WorkflowLoader = workflowLoader
// If using input-file flags, only load http fuzzing based templates.
loaderConfig := loader.NewConfig(opts.Options, opts.Catalog, executorOpts)
if !strings.EqualFold(opts.Options.InputFileMode, "list") || opts.Options.DAST || opts.Options.DASTServer {
// if input type is not list (implicitly enable fuzzing)
opts.Options.DAST = true
}
store, err := loader.New(loaderConfig)
if err != nil {
return nil, errors.Wrap(err, "Could not create loader options.")
}
store.Load()
return &nucleiExecutor{
engine: executorEngine,
store: store,
options: opts,
executorOpts: executorOpts,
}, nil
}
// proxifyRequest is a request for proxify
type proxifyRequest struct {
URL string `json:"url"`
Request struct {
Header map[string]string `json:"header"`
Body string `json:"body"`
Raw string `json:"raw"`
} `json:"request"`
}
func (n *nucleiExecutor) ExecuteScan(target PostRequestsHandlerRequest) error {
finalTemplates := []*templates.Template{}
finalTemplates = append(finalTemplates, n.store.Templates()...)
finalTemplates = append(finalTemplates, n.store.Workflows()...)
if len(finalTemplates) == 0 {
return errors.New("no templates provided for scan")
}
payload := proxifyRequest{
URL: target.URL,
Request: struct {
Header map[string]string `json:"header"`
Body string `json:"body"`
Raw string `json:"raw"`
}{
Raw: target.RawHTTP,
},
}
marshalledYaml, err := yaml.Marshal(payload)
if err != nil {
return fmt.Errorf("error marshalling yaml: %s", err)
}
inputProvider, err := http.NewHttpInputProvider(&http.HttpMultiFormatOptions{
InputContents: string(marshalledYaml),
InputMode: "yaml",
Options: formats.InputFormatOptions{
Variables: make(map[string]interface{}),
},
})
if err != nil {
return errors.Wrap(err, "could not create input provider")
}
// We don't care about the result as its a boolean
// stating whether we got matches or not
_ = n.engine.ExecuteScanWithOpts(context.Background(), finalTemplates, inputProvider, true)
return nil
}
func (n *nucleiExecutor) Close() {
if n.executorOpts.FuzzStatsDB != nil {
n.executorOpts.FuzzStatsDB.Close()
}
if n.options.Interactsh != nil {
_ = n.options.Interactsh.Close()
}
if n.executorOpts.InputHelper != nil {
_ = n.executorOpts.InputHelper.Close()
}
}

View File

@ -0,0 +1,58 @@
package server
import (
"path"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/server/scope"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
)
func (s *DASTServer) consumeTaskRequest(req PostRequestsHandlerRequest) {
defer s.endpointsInQueue.Add(-1)
parsedReq, err := types.ParseRawRequestWithURL(req.RawHTTP, req.URL)
if err != nil {
gologger.Warning().Msgf("Could not parse raw request: %s\n", err)
return
}
if parsedReq.URL.Scheme != "http" && parsedReq.URL.Scheme != "https" {
gologger.Warning().Msgf("Invalid scheme: %s\n", parsedReq.URL.Scheme)
return
}
// Check filenames and don't allow non-interesting files
extension := path.Base(parsedReq.URL.Path)
if extension != "/" && extension != "" && scope.IsUninterestingPath(extension) {
gologger.Warning().Msgf("Uninteresting path: %s\n", parsedReq.URL.Path)
return
}
inScope, err := s.scopeManager.Validate(parsedReq.URL.URL)
if err != nil {
gologger.Warning().Msgf("Could not validate scope: %s\n", err)
return
}
if !inScope {
gologger.Warning().Msgf("Request is out of scope: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
return
}
if s.deduplicator.isDuplicate(parsedReq) {
gologger.Warning().Msgf("Duplicate request detected: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
return
}
gologger.Verbose().Msgf("Fuzzing request: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
s.endpointsBeingTested.Add(1)
defer s.endpointsBeingTested.Add(-1)
// Fuzz the request finally
err = s.nucleiExecutor.ExecuteScan(req)
if err != nil {
gologger.Warning().Msgf("Could not run nuclei: %s\n", err)
return
}
}

View File

@ -0,0 +1,33 @@
package scope
import "path"
func IsUninterestingPath(uriPath string) bool {
extension := path.Ext(uriPath)
if _, ok := excludedExtensions[extension]; ok {
return true
}
return false
}
var excludedExtensions = map[string]struct{}{
".jpg": {}, ".jpeg": {}, ".png": {}, ".gif": {}, ".bmp": {}, ".tiff": {}, ".ico": {},
".mp4": {}, ".avi": {}, ".mov": {}, ".wmv": {}, ".flv": {}, ".mkv": {}, ".webm": {},
".mp3": {}, ".wav": {}, ".aac": {}, ".flac": {}, ".ogg": {}, ".wma": {},
".zip": {}, ".rar": {}, ".7z": {}, ".tar": {}, ".gz": {}, ".bz2": {},
".exe": {}, ".bin": {}, ".iso": {}, ".img": {},
".doc": {}, ".docx": {}, ".xls": {}, ".xlsx": {}, ".ppt": {}, ".pptx": {},
".pdf": {}, ".psd": {}, ".ai": {}, ".eps": {}, ".indd": {},
".swf": {}, ".fla": {}, ".css": {}, ".scss": {}, ".less": {},
".js": {}, ".ts": {}, ".jsx": {}, ".tsx": {},
".xml": {}, ".json": {}, ".yaml": {}, ".yml": {},
".csv": {}, ".txt": {}, ".log": {}, ".md": {},
".ttf": {}, ".otf": {}, ".woff": {}, ".woff2": {}, ".eot": {},
".svg": {}, ".svgz": {}, ".webp": {}, ".tif": {},
".mpg": {}, ".mpeg": {}, ".weba": {},
".m4a": {}, ".m4v": {}, ".3gp": {}, ".3g2": {},
".ogv": {}, ".ogm": {}, ".oga": {}, ".ogx": {},
".srt": {}, ".min.js": {}, ".min.css": {}, ".js.map": {},
".min.js.map": {}, ".chunk.css.map": {}, ".hub.js.map": {},
".hub.css.map": {}, ".map": {},
}

View File

@ -0,0 +1,77 @@
// From Katana
package scope
import (
"fmt"
"net/url"
"regexp"
)
// Manager manages scope for crawling process
type Manager struct {
inScope []*regexp.Regexp
outOfScope []*regexp.Regexp
noScope bool
}
// NewManager returns a new scope manager for crawling
func NewManager(inScope, outOfScope []string) (*Manager, error) {
manager := &Manager{}
for _, regex := range inScope {
if compiled, err := regexp.Compile(regex); err != nil {
return nil, fmt.Errorf("could not compile regex %s: %s", regex, err)
} else {
manager.inScope = append(manager.inScope, compiled)
}
}
for _, regex := range outOfScope {
if compiled, err := regexp.Compile(regex); err != nil {
return nil, fmt.Errorf("could not compile regex %s: %s", regex, err)
} else {
manager.outOfScope = append(manager.outOfScope, compiled)
}
}
if len(manager.inScope) == 0 && len(manager.outOfScope) == 0 {
manager.noScope = true
}
return manager, nil
}
// Validate returns true if the URL matches scope rules
func (m *Manager) Validate(URL *url.URL) (bool, error) {
if m.noScope {
return true, nil
}
urlStr := URL.String()
urlValidated, err := m.validateURL(urlStr)
if err != nil {
return false, err
}
if urlValidated {
return true, nil
}
return false, nil
}
func (m *Manager) validateURL(URL string) (bool, error) {
for _, item := range m.outOfScope {
if item.MatchString(URL) {
return false, nil
}
}
if len(m.inScope) == 0 {
return true, nil
}
var inScopeMatched bool
for _, item := range m.inScope {
if item.MatchString(URL) {
inScopeMatched = true
break
}
}
return inScopeMatched, nil
}

View File

@ -0,0 +1,26 @@
package scope
import (
"testing"
urlutil "github.com/projectdiscovery/utils/url"
"github.com/stretchr/testify/require"
)
func TestManagerValidate(t *testing.T) {
t.Run("url", func(t *testing.T) {
manager, err := NewManager([]string{`example`}, []string{`logout\.php`})
require.NoError(t, err, "could not create scope manager")
parsed, _ := urlutil.Parse("https://test.com/index.php/example")
validated, err := manager.Validate(parsed.URL)
require.NoError(t, err, "could not validate url")
require.True(t, validated, "could not get correct in-scope validation")
parsed, _ = urlutil.Parse("https://test.com/logout.php")
validated, err = manager.Validate(parsed.URL)
require.NoError(t, err, "could not validate url")
require.False(t, validated, "could not get correct out-scope validation")
})
}

296
internal/server/server.go Normal file
View File

@ -0,0 +1,296 @@
package server
import (
_ "embed"
"fmt"
"html/template"
"net/http"
"net/url"
"strings"
"sync/atomic"
"time"
"github.com/alitto/pond"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/server/scope"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/utils/env"
)
// DASTServer is a server that performs execution of fuzzing templates
// on user input passed to the API.
type DASTServer struct {
echo *echo.Echo
options *Options
tasksPool *pond.WorkerPool
deduplicator *requestDeduplicator
scopeManager *scope.Manager
startTime time.Time
// metrics
endpointsInQueue atomic.Int64
endpointsBeingTested atomic.Int64
nucleiExecutor *nucleiExecutor
}
// Options contains the configuration options for the server.
type Options struct {
// Address is the address to bind the server to
Address string
// Token is the token to use for authentication (optional)
Token string
// Templates is the list of templates to use for fuzzing
Templates []string
// Verbose is a flag that controls verbose output
Verbose bool
// Scope fields for fuzzer
InScope []string
OutScope []string
OutputWriter output.Writer
NucleiExecutorOptions *NucleiExecutorOptions
}
// New creates a new instance of the DAST server.
func New(options *Options) (*DASTServer, error) {
// If the user has specified no templates, use the default ones
// for DAST only.
if len(options.Templates) == 0 {
options.Templates = []string{"dast/"}
}
// Disable bulk mode and single threaded execution
// by auto adjusting in case of default values
if options.NucleiExecutorOptions.Options.BulkSize == 25 && options.NucleiExecutorOptions.Options.TemplateThreads == 25 {
options.NucleiExecutorOptions.Options.BulkSize = 1
options.NucleiExecutorOptions.Options.TemplateThreads = 1
}
maxWorkers := env.GetEnvOrDefault[int]("FUZZ_MAX_WORKERS", 1)
bufferSize := env.GetEnvOrDefault[int]("FUZZ_BUFFER_SIZE", 10000)
server := &DASTServer{
options: options,
tasksPool: pond.New(maxWorkers, bufferSize),
deduplicator: newRequestDeduplicator(),
startTime: time.Now(),
}
server.setupHandlers(false)
executor, err := newNucleiExecutor(options.NucleiExecutorOptions)
if err != nil {
return nil, err
}
server.nucleiExecutor = executor
scopeManager, err := scope.NewManager(
options.InScope,
options.OutScope,
)
if err != nil {
return nil, err
}
server.scopeManager = scopeManager
var builder strings.Builder
gologger.Debug().Msgf("Using %d parallel tasks with %d buffer", maxWorkers, bufferSize)
if options.Token != "" {
builder.WriteString(" (with token)")
}
gologger.Info().Msgf("DAST Server API: %s", server.buildURL("/fuzz"))
gologger.Info().Msgf("DAST Server Stats URL: %s", server.buildURL("/stats"))
return server, nil
}
func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) {
server := &DASTServer{
nucleiExecutor: &nucleiExecutor{
executorOpts: protocols.ExecutorOptions{
FuzzStatsDB: fuzzStatsDB,
},
},
}
server.setupHandlers(true)
gologger.Info().Msgf("Stats UI URL: %s", server.buildURL("/stats"))
return server, nil
}
func (s *DASTServer) Close() {
s.nucleiExecutor.Close()
s.echo.Close()
s.tasksPool.StopAndWaitFor(1 * time.Minute)
}
func (s *DASTServer) buildURL(endpoint string) string {
values := make(url.Values)
if s.options.Token != "" {
values.Set("token", s.options.Token)
}
// Use url.URL struct to safely construct the URL
u := &url.URL{
Scheme: "http",
Host: s.options.Address,
Path: endpoint,
RawQuery: values.Encode(),
}
return u.String()
}
func (s *DASTServer) setupHandlers(onlyStats bool) {
e := echo.New()
e.Use(middleware.Recover())
if s.options.Verbose {
cfg := middleware.DefaultLoggerConfig
cfg.Skipper = func(c echo.Context) bool {
// Skip /stats and /stats.json
return c.Request().URL.Path == "/stats" || c.Request().URL.Path == "/stats.json"
}
e.Use(middleware.LoggerWithConfig(cfg))
}
e.Use(middleware.CORS())
if s.options.Token != "" {
e.Use(middleware.KeyAuthWithConfig(middleware.KeyAuthConfig{
KeyLookup: "query:token",
Validator: func(key string, c echo.Context) (bool, error) {
return key == s.options.Token, nil
},
}))
}
e.HideBanner = true
// POST /fuzz - Queue a request for fuzzing
if !onlyStats {
e.POST("/fuzz", s.handleRequest)
}
e.GET("/stats", s.handleStats)
e.GET("/stats.json", s.handleStatsJSON)
s.echo = e
}
func (s *DASTServer) Start() error {
if err := s.echo.Start(s.options.Address); err != nil && err != http.ErrServerClosed {
return err
}
return nil
}
// PostReuestsHandlerRequest is the request body for the /fuzz POST handler.
type PostRequestsHandlerRequest struct {
RawHTTP string `json:"raw_http"`
URL string `json:"url"`
}
func (s *DASTServer) handleRequest(c echo.Context) error {
var req PostRequestsHandlerRequest
if err := c.Bind(&req); err != nil {
fmt.Printf("Error binding request: %s\n", err)
return err
}
// Validate the request
if req.RawHTTP == "" || req.URL == "" {
fmt.Printf("Missing required fields\n")
return c.JSON(400, map[string]string{"error": "missing required fields"})
}
s.endpointsInQueue.Add(1)
s.tasksPool.Submit(func() {
s.consumeTaskRequest(req)
})
return c.NoContent(200)
}
type StatsResponse struct {
DASTServerInfo DASTServerInfo `json:"dast_server_info"`
DASTScanStatistics DASTScanStatistics `json:"dast_scan_statistics"`
DASTScanStatusStatistics map[string]int64 `json:"dast_scan_status_statistics"`
DASTScanSeverityBreakdown map[string]int64 `json:"dast_scan_severity_breakdown"`
DASTScanErrorStatistics map[string]int64 `json:"dast_scan_error_statistics"`
DASTScanStartTime time.Time `json:"dast_scan_start_time"`
}
type DASTServerInfo struct {
NucleiVersion string `json:"nuclei_version"`
NucleiTemplateVersion string `json:"nuclei_template_version"`
NucleiDastServerAPI string `json:"nuclei_dast_server_api"`
ServerAuthEnabled bool `json:"sever_auth_enabled"`
}
type DASTScanStatistics struct {
EndpointsInQueue int64 `json:"endpoints_in_queue"`
EndpointsBeingTested int64 `json:"endpoints_being_tested"`
TotalTemplatesLoaded int64 `json:"total_dast_templates_loaded"`
TotalTemplatesTested int64 `json:"total_dast_templates_tested"`
TotalMatchedResults int64 `json:"total_matched_results"`
TotalComponentsTested int64 `json:"total_components_tested"`
TotalEndpointsTested int64 `json:"total_endpoints_tested"`
TotalFuzzedRequests int64 `json:"total_fuzzed_requests"`
TotalErroredRequests int64 `json:"total_errored_requests"`
}
func (s *DASTServer) getStats() (StatsResponse, error) {
cfg := config.DefaultConfig
resp := StatsResponse{
DASTServerInfo: DASTServerInfo{
NucleiVersion: config.Version,
NucleiTemplateVersion: cfg.TemplateVersion,
NucleiDastServerAPI: s.buildURL("/fuzz"),
ServerAuthEnabled: s.options.Token != "",
},
DASTScanStartTime: s.startTime,
DASTScanStatistics: DASTScanStatistics{
EndpointsInQueue: s.endpointsInQueue.Load(),
EndpointsBeingTested: s.endpointsBeingTested.Load(),
TotalTemplatesLoaded: int64(len(s.nucleiExecutor.store.Templates())),
},
}
if s.nucleiExecutor.executorOpts.FuzzStatsDB != nil {
fuzzStats := s.nucleiExecutor.executorOpts.FuzzStatsDB.GetStats()
resp.DASTScanSeverityBreakdown = fuzzStats.SeverityCounts
resp.DASTScanStatusStatistics = fuzzStats.StatusCodes
resp.DASTScanStatistics.TotalMatchedResults = fuzzStats.TotalMatchedResults
resp.DASTScanStatistics.TotalComponentsTested = fuzzStats.TotalComponentsTested
resp.DASTScanStatistics.TotalEndpointsTested = fuzzStats.TotalEndpointsTested
resp.DASTScanStatistics.TotalFuzzedRequests = fuzzStats.TotalFuzzedRequests
resp.DASTScanStatistics.TotalTemplatesTested = fuzzStats.TotalTemplatesTested
resp.DASTScanStatistics.TotalErroredRequests = fuzzStats.TotalErroredRequests
resp.DASTScanErrorStatistics = fuzzStats.ErrorGroupedStats
}
return resp, nil
}
//go:embed templates/index.html
var indexTemplate string
func (s *DASTServer) handleStats(c echo.Context) error {
stats, err := s.getStats()
if err != nil {
return c.JSON(500, map[string]string{"error": err.Error()})
}
tmpl, err := template.New("index").Parse(indexTemplate)
if err != nil {
return c.JSON(500, map[string]string{"error": err.Error()})
}
return tmpl.Execute(c.Response().Writer, stats)
}
func (s *DASTServer) handleStatsJSON(c echo.Context) error {
resp, err := s.getStats()
if err != nil {
return c.JSON(500, map[string]string{"error": err.Error()})
}
return c.JSONPretty(200, resp, " ")
}

View File

@ -0,0 +1,342 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>DAST Scan Report</title>
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/bootstrap-icons/1.11.3/font/bootstrap-icons.css" integrity="sha512-ywmPbuxGS4cJ7GxwCX+bCJweeext047ZYU2HP52WWKbpJnF4/Zzfr2Bo19J4CWPXZmleVusQ9d//RB5bq0RP7w==" crossorigin="anonymous" referrerpolicy="no-referrer" />
<style>
@import url('https://fonts.googleapis.com/css2?family=Geist+Mono:wght@400;500&display=swap');
:root {
--bg-color: #0a0a0a;
--text-color: #33ff00;
--header-color: #00cc00;
--border-color: #1a1a1a;
--box-bg: #0f0f0f;
--critical: #ff0000;
--high: #ff4400;
--medium: #ffcc00;
--low: #00ff00;
--info: #00ccff;
--muted: #999999;
}
body {
font-family: 'Geist Mono', 'Courier New', monospace;
background: var(--bg-color);
color: var(--text-color);
line-height: 1.5;
padding: 20px;
max-width: 1200px;
margin: 0 auto;
position: relative;
}
.report-header {
border-bottom: 1px solid var(--text-color);
margin-bottom: 20px;
padding: 10px 0;
}
.ascii-header {
color: var(--header-color);
white-space: pre;
font-size: 16px;
margin-bottom: 15px;
line-height: 1.2;
}
.timestamp {
color: var(--muted);
margin-bottom: 20px;
}
.section {
margin: 25px 0;
border: 1px solid var(--border-color);
padding: 15px;
background: var(--box-bg);
}
.section-header {
color: var(--header-color);
margin-bottom: 15px;
padding-bottom: 5px;
border-bottom: 1px solid var(--border-color);
}
.terminal-line {
font-family: 'Courier New', monospace;
margin: 5px 0;
}
.key {
color: var(--muted);
display: inline-block;
width: 200px;
}
.value {
color: var(--text-color);
}
.grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
gap: 15px;
margin-top: 15px;
}
.stat-box {
background: var(--box-bg);
padding: 10px;
border-left: 2px solid var(--text-color);
}
.severity-critical { border-left-color: var(--critical); }
.severity-high { border-left-color: var(--high); }
.severity-medium { border-left-color: var(--medium); }
.severity-low { border-left-color: var(--low); }
.severity-info { border-left-color: var(--info); }
.progress-bar {
width: 100%;
height: 2px;
background: var(--border-color);
margin-top: 5px;
}
.progress-fill {
height: 100%;
background: var(--text-color);
transition: width 0.3s ease;
}
@media (max-width: 768px) {
.grid {
grid-template-columns: 1fr;
}
}
/* Add these new CSS variables for light theme */
[data-theme="light"] {
--bg-color: #ffffff;
--text-color: #2a2a2a;
--header-color: #087f5b;
--border-color: #e0e0e0;
--box-bg: #f8f9fa;
--muted: #6c757d;
--critical: #dc3545;
--high: #fd7e14;
--medium: #ffc107;
--low: #198754;
--info: #0dcaf0;
}
/* Add styles for the controls container */
.controls {
position: absolute;
top: 20px;
right: 20px;
display: flex;
gap: 15px;
align-items: center;
z-index: 100;
}
.theme-toggle, .json-button {
background: var(--box-bg);
border: 1px solid var(--border-color);
color: var(--text-color);
padding: 8px 15px;
cursor: pointer;
font-family: 'Geist Mono', monospace;
font-size: 14px;
transition: all 0.3s ease;
display: flex;
align-items: center;
gap: 8px;
border-radius: 4px;
}
.theme-toggle:hover, .json-button:hover {
border-color: var(--text-color);
background: var(--border-color);
}
/* Add styles for icons */
.theme-icon {
font-size: 1.1em;
}
/* Update stat box styles for better light theme contrast */
[data-theme="light"] .stat-box {
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
}
[data-theme="light"] .section {
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
}
.error-table {
width: 100%;
margin-top: 10px;
}
.error-row {
display: flex;
justify-content: space-between;
align-items: flex-start;
padding: 8px 0;
border-bottom: 1px solid var(--border-color);
}
.error-row:last-child {
border-bottom: none;
}
.error-message {
flex: 1;
padding-right: 20px;
word-break: break-word;
color: var(--muted);
}
.error-count {
white-space: nowrap;
color: var(--muted);
margin-right: 20px;
}
</style>
</head>
<body>
<div class="controls">
<button class="theme-toggle" onclick="toggleTheme()">
<i class="bi bi-moon-fill theme-icon" id="theme-icon"></i>
</button>
<button class="json-button" onclick="toggleJSON()">
<i class="bi bi-code-slash"></i>
JSON
</button>
</div>
<div class="report-header">
<div class="ascii-header">
__ _
____ __ _______/ /__ (_)
/ __ \/ / / / ___/ / _ \/ /
/ / / / /_/ / /__/ / __/ /
/_/ /_/\__,_/\___/_/\___/_/ {{.DASTServerInfo.NucleiVersion}}
projectdiscovery.io
Dynamic Application Security Testing (DAST) API Server
</div>
<div class="timestamp">[+] Server started at: <span id="datetime">{{.DASTScanStartTime}}</span></div>
</div>
<div class="section">
<div class="section-header">[*] Server Configuration</div>
<div class="terminal-line"><span class="key">Nuclei Version</span><span class="value">{{.DASTServerInfo.NucleiVersion}}</span></div>
<div class="terminal-line"><span class="key">Template Version</span><span class="value">{{.DASTServerInfo.NucleiTemplateVersion}}</span></div>
<div class="terminal-line"><span class="key">DAST Server API</span><span class="value">{{.DASTServerInfo.NucleiDastServerAPI}}</span></div>
<div class="terminal-line"><span class="key">Auth Status</span><span class="value">{{if .DASTServerInfo.ServerAuthEnabled}}ENABLED{{else}}DISABLED{{end}}</span></div>
</div>
<div class="section">
<div class="section-header">[+] Scan Progress</div>
<div class="terminal-line"><span class="key">Total Results</span><span class="value">{{.DASTScanStatistics.TotalMatchedResults}} findings</span></div>
<div class="terminal-line"><span class="key">Endpoints In Queue</span><span class="value">{{.DASTScanStatistics.EndpointsInQueue}}</span></div>
<div class="terminal-line"><span class="key">Currently Testing</span><span class="value">{{.DASTScanStatistics.EndpointsBeingTested}}</span></div>
<div class="terminal-line"><span class="key">Components Tested</span><span class="value">{{.DASTScanStatistics.TotalComponentsTested}}</span></div>
<div class="terminal-line"><span class="key">Endpoints Tested</span><span class="value">{{.DASTScanStatistics.TotalEndpointsTested}}</span></div>
<div class="terminal-line"><span class="key">Templates Loaded</span><span class="value">{{.DASTScanStatistics.TotalTemplatesLoaded}}</span></div>
<div class="terminal-line"><span class="key">Templates Tested</span><span class="value">{{.DASTScanStatistics.TotalTemplatesTested}}</span></div>
<div class="terminal-line"><span class="key">Total Requests</span><span class="value">{{.DASTScanStatistics.TotalFuzzedRequests}}</span></div>
<div class="terminal-line"><span class="key">Total Errors</span><span class="value">{{.DASTScanStatistics.TotalErroredRequests}}</span></div>
</div>
<div class="section">
<div class="section-header">[!] Security Findings</div>
<div class="grid">
<div class="stat-box severity-critical">
<div class="key">Critical</div>
<div class="value">{{index .DASTScanSeverityBreakdown "critical"}} findings</div>
</div>
<div class="stat-box severity-high">
<div class="key">High</div>
<div class="value">{{index .DASTScanSeverityBreakdown "high"}} findings</div>
</div>
<div class="stat-box severity-medium">
<div class="key">Medium</div>
<div class="value">{{index .DASTScanSeverityBreakdown "medium"}} findings</div>
</div>
<div class="stat-box severity-low">
<div class="key">Low</div>
<div class="value">{{index .DASTScanSeverityBreakdown "low"}} findings</div>
</div>
<div class="stat-box severity-info">
<div class="key">Info</div>
<div class="value">{{index .DASTScanSeverityBreakdown "info"}} findings</div>
</div>
</div>
</div>
<div class="section">
<div class="section-header">[-] Status Codes Breakdown</div>
<!-- Status Codes Breakdown -->
<div class="terminal-line"><span class="key">Response Codes</span></div>
{{range $status, $count := .DASTScanStatusStatistics}}
<div class="terminal-line"><span class="key">&nbsp;&nbsp;{{$status}}</span><span class="value">{{$count}} times</span></div>
{{end}}
</div>
<div class="section">
<div class="section-header">[-] Error Breakdown</div>
<div class="error-table">
{{range $error, $count := .DASTScanErrorStatistics}}
<div class="error-row">
<div class="error-message">{{$error}}</div>
<div class="error-count">{{$count}} times</div>
</div>
{{end}}
</div>
</div>
<script>
// Theme toggle functionality
function toggleTheme() {
const body = document.body;
const themeIcon = document.getElementById('theme-icon');
const currentTheme = body.getAttribute('data-theme');
if (currentTheme === 'light') {
body.removeAttribute('data-theme');
localStorage.setItem('theme', 'dark');
themeIcon.className = 'bi bi-moon-fill theme-icon';
} else {
body.setAttribute('data-theme', 'light');
localStorage.setItem('theme', 'light');
themeIcon.className = 'bi bi-sun-fill theme-icon';
}
}
// Load saved theme preference
document.addEventListener('DOMContentLoaded', () => {
const savedTheme = localStorage.getItem('theme');
const themeIcon = document.getElementById('theme-icon');
if (savedTheme === 'light') {
document.body.setAttribute('data-theme', 'light');
themeIcon.className = 'bi bi-sun-fill theme-icon';
}
});
function toggleJSON() {
const url = new URL(window.location.href);
url.pathname = url.pathname + '.json';
window.location.href = url.toString();
}
</script>
</body>
</html>

View File

@ -2,6 +2,7 @@ package authx
import (
"net/http"
"slices"
"github.com/projectdiscovery/retryablehttp-go"
)
@ -33,11 +34,27 @@ func (s *CookiesAuthStrategy) Apply(req *http.Request) {
// ApplyOnRR applies the cookies auth strategy to the retryable request
func (s *CookiesAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
existingCookies := req.Cookies()
for _, newCookie := range s.Data.Cookies {
for i, existing := range existingCookies {
if existing.Name == newCookie.Key {
existingCookies = slices.Delete(existingCookies, i, i+1)
break
}
}
}
// Clear and reset remaining cookies
req.Header.Del("Cookie")
for _, cookie := range existingCookies {
req.AddCookie(cookie)
}
// Add new cookies
for _, cookie := range s.Data.Cookies {
c := &http.Cookie{
req.AddCookie(&http.Cookie{
Name: cookie.Key,
Value: cookie.Value,
}
req.AddCookie(c)
})
}
}

View File

@ -9,6 +9,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/replacer"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors"
sliceutil "github.com/projectdiscovery/utils/slice"
)
type LazyFetchSecret func(d *Dynamic) error
@ -22,7 +23,8 @@ var (
// ex: username and password are dynamic secrets, the actual secret is the token obtained
// after authenticating with the username and password
type Dynamic struct {
Secret `yaml:",inline"` // this is a static secret that will be generated after the dynamic secret is resolved
*Secret `yaml:",inline"` // this is a static secret that will be generated after the dynamic secret is resolved
Secrets []*Secret `yaml:"secrets"`
TemplatePath string `json:"template" yaml:"template"`
Variables []KV `json:"variables" yaml:"variables"`
Input string `json:"input" yaml:"input"` // (optional) target for the dynamic secret
@ -33,6 +35,22 @@ type Dynamic struct {
error error `json:"-" yaml:"-"` // error if any
}
func (d *Dynamic) GetDomainAndDomainRegex() ([]string, []string) {
var domains []string
var domainRegex []string
for _, secret := range d.Secrets {
domains = append(domains, secret.Domains...)
domainRegex = append(domainRegex, secret.DomainsRegex...)
}
if d.Secret != nil {
domains = append(domains, d.Secret.Domains...)
domainRegex = append(domainRegex, d.Secret.DomainsRegex...)
}
uniqueDomains := sliceutil.Dedupe(domains)
uniqueDomainRegex := sliceutil.Dedupe(domainRegex)
return uniqueDomains, uniqueDomainRegex
}
func (d *Dynamic) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &d); err != nil {
return err
@ -41,7 +59,7 @@ func (d *Dynamic) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &s); err != nil {
return err
}
d.Secret = s
d.Secret = &s
return nil
}
@ -54,9 +72,18 @@ func (d *Dynamic) Validate() error {
if len(d.Variables) == 0 {
return errorutil.New("variables are required for dynamic secret")
}
d.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
if err := d.Secret.Validate(); err != nil {
return err
if d.Secret != nil {
d.Secret.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
if err := d.Secret.Validate(); err != nil {
return err
}
}
for _, secret := range d.Secrets {
secret.skipCookieParse = true
if err := secret.Validate(); err != nil {
return err
}
}
return nil
}
@ -74,76 +101,98 @@ func (d *Dynamic) SetLazyFetchCallback(callback LazyFetchSecret) {
return fmt.Errorf("no extracted values found for dynamic secret")
}
// evaluate headers
for i, header := range d.Headers {
if strings.Contains(header.Value, "{{") {
header.Value = replacer.Replace(header.Value, d.Extracted)
if d.Secret != nil {
if err := d.applyValuesToSecret(d.Secret); err != nil {
return err
}
if strings.Contains(header.Key, "{{") {
header.Key = replacer.Replace(header.Key, d.Extracted)
}
d.Headers[i] = header
}
// evaluate cookies
for i, cookie := range d.Cookies {
if strings.Contains(cookie.Value, "{{") {
cookie.Value = replacer.Replace(cookie.Value, d.Extracted)
}
if strings.Contains(cookie.Key, "{{") {
cookie.Key = replacer.Replace(cookie.Key, d.Extracted)
}
if strings.Contains(cookie.Raw, "{{") {
cookie.Raw = replacer.Replace(cookie.Raw, d.Extracted)
}
d.Cookies[i] = cookie
}
// evaluate query params
for i, query := range d.Params {
if strings.Contains(query.Value, "{{") {
query.Value = replacer.Replace(query.Value, d.Extracted)
}
if strings.Contains(query.Key, "{{") {
query.Key = replacer.Replace(query.Key, d.Extracted)
}
d.Params[i] = query
}
// check username, password and token
if strings.Contains(d.Username, "{{") {
d.Username = replacer.Replace(d.Username, d.Extracted)
}
if strings.Contains(d.Password, "{{") {
d.Password = replacer.Replace(d.Password, d.Extracted)
}
if strings.Contains(d.Token, "{{") {
d.Token = replacer.Replace(d.Token, d.Extracted)
}
// now attempt to parse the cookies
d.skipCookieParse = false
for i, cookie := range d.Cookies {
if cookie.Raw != "" {
if err := cookie.Parse(); err != nil {
return fmt.Errorf("[%s] invalid raw cookie in cookiesAuth: %s", d.TemplatePath, err)
}
d.Cookies[i] = cookie
for _, secret := range d.Secrets {
if err := d.applyValuesToSecret(secret); err != nil {
return err
}
}
return nil
}
}
// GetStrategy returns the auth strategy for the dynamic secret
func (d *Dynamic) GetStrategy() AuthStrategy {
func (d *Dynamic) applyValuesToSecret(secret *Secret) error {
// evaluate headers
for i, header := range secret.Headers {
if strings.Contains(header.Value, "{{") {
header.Value = replacer.Replace(header.Value, d.Extracted)
}
if strings.Contains(header.Key, "{{") {
header.Key = replacer.Replace(header.Key, d.Extracted)
}
secret.Headers[i] = header
}
// evaluate cookies
for i, cookie := range secret.Cookies {
if strings.Contains(cookie.Value, "{{") {
cookie.Value = replacer.Replace(cookie.Value, d.Extracted)
}
if strings.Contains(cookie.Key, "{{") {
cookie.Key = replacer.Replace(cookie.Key, d.Extracted)
}
if strings.Contains(cookie.Raw, "{{") {
cookie.Raw = replacer.Replace(cookie.Raw, d.Extracted)
}
secret.Cookies[i] = cookie
}
// evaluate query params
for i, query := range secret.Params {
if strings.Contains(query.Value, "{{") {
query.Value = replacer.Replace(query.Value, d.Extracted)
}
if strings.Contains(query.Key, "{{") {
query.Key = replacer.Replace(query.Key, d.Extracted)
}
secret.Params[i] = query
}
// check username, password and token
if strings.Contains(secret.Username, "{{") {
secret.Username = replacer.Replace(secret.Username, d.Extracted)
}
if strings.Contains(secret.Password, "{{") {
secret.Password = replacer.Replace(secret.Password, d.Extracted)
}
if strings.Contains(secret.Token, "{{") {
secret.Token = replacer.Replace(secret.Token, d.Extracted)
}
// now attempt to parse the cookies
secret.skipCookieParse = false
for i, cookie := range secret.Cookies {
if cookie.Raw != "" {
if err := cookie.Parse(); err != nil {
return fmt.Errorf("[%s] invalid raw cookie in cookiesAuth: %s", d.TemplatePath, err)
}
secret.Cookies[i] = cookie
}
}
return nil
}
// GetStrategy returns the auth strategies for the dynamic secret
func (d *Dynamic) GetStrategies() []AuthStrategy {
if !d.fetched {
_ = d.Fetch(true)
}
if d.error != nil {
return nil
}
return d.Secret.GetStrategy()
var strategies []AuthStrategy
if d.Secret != nil {
strategies = append(strategies, d.Secret.GetStrategy())
}
for _, secret := range d.Secrets {
strategies = append(strategies, secret.GetStrategy())
}
return strategies
}
// Fetch fetches the dynamic secret

View File

@ -24,16 +24,22 @@ type DynamicAuthStrategy struct {
// Apply applies the strategy to the request
func (d *DynamicAuthStrategy) Apply(req *http.Request) {
strategy := d.Dynamic.GetStrategy()
if strategy != nil {
strategy.Apply(req)
strategies := d.Dynamic.GetStrategies()
if strategies == nil {
return
}
for _, s := range strategies {
if s == nil {
continue
}
s.Apply(req)
}
}
// ApplyOnRR applies the strategy to the retryable request
func (d *DynamicAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
strategy := d.Dynamic.GetStrategy()
if strategy != nil {
strategy.ApplyOnRR(req)
strategy := d.Dynamic.GetStrategies()
for _, s := range strategy {
s.ApplyOnRR(req)
}
}

View File

@ -85,8 +85,10 @@ func (f *FileAuthProvider) init() {
}
}
for _, dynamic := range f.store.Dynamic {
if len(dynamic.DomainsRegex) > 0 {
for _, domain := range dynamic.DomainsRegex {
domain, domainsRegex := dynamic.GetDomainAndDomainRegex()
if len(domainsRegex) > 0 {
for _, domain := range domainsRegex {
if f.compiled == nil {
f.compiled = make(map[*regexp.Regexp][]authx.AuthStrategy)
}
@ -101,7 +103,7 @@ func (f *FileAuthProvider) init() {
}
}
}
for _, domain := range dynamic.Domains {
for _, domain := range domain {
if f.domains == nil {
f.domains = make(map[string][]authx.AuthStrategy)
}

View File

@ -542,7 +542,8 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// Skip DAST filter when loading auth templates
if store.ID() != AuthStoreId && store.config.ExecutorOptions.Options.DAST {
// check if the template is a DAST template
if parsed.IsFuzzing() {
// also allow global matchers template to be loaded
if parsed.IsFuzzing() || parsed.Options.GlobalMatchers != nil && parsed.Options.GlobalMatchers.HasMatchers() {
loadTemplate(parsed)
}
} else if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {

View File

@ -81,18 +81,11 @@ func ApplyPayloadTransformations(value string) string {
}
const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
const (
letterIdxBits = 6 // 6 bits to represent a letter index
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
)
func randStringBytesMask(n int) string {
b := make([]byte, n)
for i := 0; i < n; {
if idx := int(random.Int63() & letterIdxMask); idx < len(letterBytes) {
b[i] = letterBytes[idx]
i++
}
for i := range b {
b[i] = letterBytes[random.Intn(len(letterBytes))]
}
return string(b)
}

View File

@ -15,15 +15,17 @@ import (
)
// Analyzer is a time delay analyzer for the fuzzer
type Analyzer struct{}
type Analyzer struct {
}
const (
DefaultSleepDuration = int(5)
DefaultSleepDuration = int(7)
DefaultRequestsLimit = int(4)
DefaultTimeCorrelationErrorRange = float64(0.15)
DefaultTimeSlopeErrorRange = float64(0.30)
DefaultLowSleepTimeSeconds = float64(3)
defaultSleepTimeDuration = 5 * time.Second
defaultSleepTimeDuration = 7 * time.Second
)
var _ analyzers.Analyzer = &Analyzer{}
@ -129,11 +131,19 @@ func (a *Analyzer) Analyze(options *analyzers.Options) (bool, string, error) {
}
return timeTaken, nil
}
// Check the baseline delay of the request by doing two requests
baselineDelay, err := getBaselineDelay(reqSender)
if err != nil {
return false, "", err
}
matched, matchReason, err := checkTimingDependency(
requestsLimit,
sleepDuration,
timeCorrelationErrorRange,
timeSlopeErrorRange,
baselineDelay,
reqSender,
)
if err != nil {
@ -145,16 +155,39 @@ func (a *Analyzer) Analyze(options *analyzers.Options) (bool, string, error) {
return false, "", nil
}
func getBaselineDelay(reqSender timeDelayRequestSender) (float64, error) {
var delays []float64
// Use zero or a very small delay to measure baseline
for i := 0; i < 3; i++ {
delay, err := reqSender(0)
if err != nil {
return 0, errors.Wrap(err, "could not get baseline delay")
}
delays = append(delays, delay)
}
var total float64
for _, d := range delays {
total += d
}
avg := total / float64(len(delays))
return avg, nil
}
// doHTTPRequestWithTimeTracing does a http request with time tracing
func doHTTPRequestWithTimeTracing(req *retryablehttp.Request, httpclient *retryablehttp.Client) (float64, error) {
var ttfb time.Duration
var start time.Time
var serverTime time.Duration
var wroteRequest time.Time
trace := &httptrace.ClientTrace{
GotFirstResponseByte: func() { ttfb = time.Since(start) },
WroteHeaders: func() {
wroteRequest = time.Now()
},
GotFirstResponseByte: func() {
serverTime = time.Since(wroteRequest)
},
}
req = req.WithContext(httptrace.WithClientTrace(req.Context(), trace))
start = time.Now()
resp, err := httpclient.Do(req)
if err != nil {
return 0, errors.Wrap(err, "could not do request")
@ -164,5 +197,5 @@ func doHTTPRequestWithTimeTracing(req *retryablehttp.Request, httpclient *retrya
if err != nil {
return 0, errors.Wrap(err, "could not read response body")
}
return ttfb.Seconds(), nil
return serverTime.Seconds(), nil
}

View File

@ -6,6 +6,10 @@
// Advantages of this approach are many compared to the old approach of
// heuristics of sleep time.
//
// NOTE: This algorithm has been heavily modified after being introduced
// in nuclei. Now the logic has sever bug fixes and improvements and
// has been evolving to be more stable.
//
// As we are building a statistical model, we can predict if the delay
// is random or not very quickly. Also, the payloads are alternated to send
// a very high sleep and a very low sleep. This way the comparison is
@ -24,10 +28,18 @@ import (
"errors"
"fmt"
"math"
"strings"
)
type timeDelayRequestSender func(delay int) (float64, error)
// requestsSentMetadata is used to store the delay requested
// and delay received for each request
type requestsSentMetadata struct {
delay int
delayReceived float64
}
// checkTimingDependency checks the timing dependency for a given request
//
// It alternates and sends first a high request, then a low request. Each time
@ -37,6 +49,7 @@ func checkTimingDependency(
highSleepTimeSeconds int,
correlationErrorRange float64,
slopeErrorRange float64,
baselineDelay float64,
requestSender timeDelayRequestSender,
) (bool, string, error) {
if requestsLimit < 2 {
@ -46,38 +59,60 @@ func checkTimingDependency(
regression := newSimpleLinearRegression()
requestsLeft := requestsLimit
var requestsSent []requestsSentMetadata
for {
if requestsLeft <= 0 {
break
}
isCorrelationPossible, err := sendRequestAndTestConfidence(regression, highSleepTimeSeconds, requestSender)
isCorrelationPossible, delayRecieved, err := sendRequestAndTestConfidence(regression, highSleepTimeSeconds, requestSender, baselineDelay)
if err != nil {
return false, "", err
}
if !isCorrelationPossible {
return false, "", nil
}
// Check the delay is greater than baseline by seconds requested
if delayRecieved < baselineDelay+float64(highSleepTimeSeconds)*0.8 {
return false, "", nil
}
requestsSent = append(requestsSent, requestsSentMetadata{
delay: highSleepTimeSeconds,
delayReceived: delayRecieved,
})
isCorrelationPossible, err = sendRequestAndTestConfidence(regression, 1, requestSender)
isCorrelationPossibleSecond, delayRecievedSecond, err := sendRequestAndTestConfidence(regression, int(DefaultLowSleepTimeSeconds), requestSender, baselineDelay)
if err != nil {
return false, "", err
}
if !isCorrelationPossible {
if !isCorrelationPossibleSecond {
return false, "", nil
}
if delayRecievedSecond < baselineDelay+float64(DefaultLowSleepTimeSeconds)*0.8 {
return false, "", nil
}
requestsLeft = requestsLeft - 2
requestsSent = append(requestsSent, requestsSentMetadata{
delay: int(DefaultLowSleepTimeSeconds),
delayReceived: delayRecievedSecond,
})
}
result := regression.IsWithinConfidence(correlationErrorRange, 1.0, slopeErrorRange)
if result {
resultReason := fmt.Sprintf(
"[time_delay] made %d requests successfully, with a regression slope of %.2f and correlation %.2f",
var resultReason strings.Builder
resultReason.WriteString(fmt.Sprintf(
"[time_delay] made %d requests (baseline: %.2fs) successfully, with a regression slope of %.2f and correlation %.2f",
requestsLimit,
baselineDelay,
regression.slope,
regression.correlation,
)
return result, resultReason, nil
))
for _, request := range requestsSent {
resultReason.WriteString(fmt.Sprintf("\n - delay: %ds, delayReceived: %fs", request.delay, request.delayReceived))
}
return result, resultReason.String(), nil
}
return result, "", nil
}
@ -87,35 +122,33 @@ func sendRequestAndTestConfidence(
regression *simpleLinearRegression,
delay int,
requestSender timeDelayRequestSender,
) (bool, error) {
baselineDelay float64,
) (bool, float64, error) {
delayReceived, err := requestSender(delay)
if err != nil {
return false, err
return false, 0, err
}
if delayReceived < float64(delay) {
return false, nil
return false, 0, nil
}
regression.AddPoint(float64(delay), delayReceived)
regression.AddPoint(float64(delay), delayReceived-baselineDelay)
if !regression.IsWithinConfidence(0.3, 1.0, 0.5) {
return false, nil
return false, delayReceived, nil
}
return true, nil
return true, delayReceived, nil
}
// simpleLinearRegression is a simple linear regression model that can be updated at runtime.
// It is based on the same algorithm in ZAP for doing timing checks.
type simpleLinearRegression struct {
count float64
independentSum float64
dependentSum float64
count float64
// Variances
independentVarianceN float64
dependentVarianceN float64
sampleCovarianceN float64
sumX float64
sumY float64
sumXX float64
sumYY float64
sumXY float64
slope float64
intercept float64
@ -124,39 +157,52 @@ type simpleLinearRegression struct {
func newSimpleLinearRegression() *simpleLinearRegression {
return &simpleLinearRegression{
slope: 1,
correlation: 1,
// Start everything at zero until we have data
slope: 0.0,
intercept: 0.0,
correlation: 0.0,
}
}
func (o *simpleLinearRegression) AddPoint(x, y float64) {
independentResidualAdjustment := x - o.independentSum/o.count
dependentResidualAdjustment := y - o.dependentSum/o.count
o.count += 1
o.independentSum += x
o.dependentSum += y
o.sumX += x
o.sumY += y
o.sumXX += x * x
o.sumYY += y * y
o.sumXY += x * y
if math.IsNaN(independentResidualAdjustment) {
// Need at least two points for meaningful calculation
if o.count < 2 {
return
}
independentResidual := x - o.independentSum/o.count
dependentResidual := y - o.dependentSum/o.count
n := o.count
meanX := o.sumX / n
meanY := o.sumY / n
o.independentVarianceN += independentResidual * independentResidualAdjustment
o.dependentVarianceN += dependentResidual * dependentResidualAdjustment
o.sampleCovarianceN += independentResidual * dependentResidualAdjustment
// Compute sample variances and covariance
varX := (o.sumXX - n*meanX*meanX) / (n - 1)
varY := (o.sumYY - n*meanY*meanY) / (n - 1)
covXY := (o.sumXY - n*meanX*meanY) / (n - 1)
o.slope = o.sampleCovarianceN / o.independentVarianceN
o.correlation = o.slope * math.Sqrt(o.independentVarianceN/o.dependentVarianceN)
o.correlation *= o.correlation
// If varX is zero, slope cannot be computed meaningfully.
// This would mean all X are the same, so handle that edge case.
if varX == 0 {
o.slope = 0.0
o.intercept = meanY // Just the mean
o.correlation = 0.0 // No correlation since all X are identical
return
}
// NOTE: zap had the reverse formula, changed it to the correct one
// for intercept. Verify if this is correct.
o.intercept = o.dependentSum/o.count - o.slope*(o.independentSum/o.count)
if math.IsNaN(o.correlation) {
o.correlation = 1
o.slope = covXY / varX
o.intercept = meanY - o.slope*meanX
// If varX or varY are zero, we cannot compute correlation properly.
if varX > 0 && varY > 0 {
o.correlation = covXY / (math.Sqrt(varX) * math.Sqrt(varY))
} else {
o.correlation = 0.0
}
}
@ -164,8 +210,17 @@ func (o *simpleLinearRegression) Predict(x float64) float64 {
return o.slope*x + o.intercept
}
func (o *simpleLinearRegression) IsWithinConfidence(correlationErrorRange float64, expectedSlope float64, slopeErrorRange float64,
) bool {
return o.correlation > 1.0-correlationErrorRange &&
math.Abs(expectedSlope-o.slope) < slopeErrorRange
func (o *simpleLinearRegression) IsWithinConfidence(correlationErrorRange float64, expectedSlope float64, slopeErrorRange float64) bool {
if o.count < 2 {
return true
}
// Check if slope is within error range of expected slope
// Also consider cases where slope is approximately 2x of expected slope
// as this can happen with time-based responses
slopeDiff := math.Abs(expectedSlope - o.slope)
slope2xDiff := math.Abs(expectedSlope*2 - o.slope)
if slopeDiff > slopeErrorRange && slope2xDiff > slopeErrorRange {
return false
}
return o.correlation > 1.0-correlationErrorRange
}

View File

@ -3,141 +3,498 @@
package time
import (
"math"
"math/rand"
"reflect"
"testing"
"time"
"github.com/stretchr/testify/require"
)
const (
correlationErrorRange = float64(0.1)
slopeErrorRange = float64(0.2)
)
// This test suite verifies the timing dependency detection algorithm by testing various scenarios:
//
// Test Categories:
// 1. Perfect Linear Cases
// - TestPerfectLinear: Basic case with slope=1, no noise
// - TestPerfectLinearSlopeOne_NoNoise: Similar to above but with different parameters
// - TestPerfectLinearSlopeTwo_NoNoise: Tests detection of slope=2 relationship
//
// 2. Noisy Cases
// - TestLinearWithNoise: Verifies detection works with moderate noise (±0.2s)
// - TestNoisyLinear: Similar but with different noise parameters
// - TestHighNoiseConcealsSlope: Verifies detection fails with extreme noise (±5s)
//
// 3. No Correlation Cases
// - TestNoCorrelation: Basic case where delay has no effect
// - TestNoCorrelationHighBaseline: High baseline (~15s) masks any delay effect
// - TestNegativeSlopeScenario: Verifies detection rejects negative correlations
//
// 4. Edge Cases
// - TestMinimalData: Tests behavior with minimal data points (2 requests)
// - TestLargeNumberOfRequests: Tests stability with many data points (20 requests)
// - TestChangingBaseline: Tests detection with shifting baseline mid-test
// - TestHighBaselineLowSlope: Tests detection of subtle correlations (slope=0.85)
//
// ZAP Test Cases:
//
// 1. Alternating Sequence Tests
// - TestAlternatingSequences: Verifies correct alternation between high and low delays
//
// 2. Non-Injectable Cases
// - TestNonInjectableQuickFail: Tests quick failure when response time < requested delay
// - TestSlowNonInjectableCase: Tests early termination with consistently high response times
// - TestRealWorldNonInjectableCase: Tests behavior with real-world response patterns
//
// 3. Error Tolerance Tests
// - TestSmallErrorDependence: Verifies detection works with small random variations
//
// Key Parameters Tested:
// - requestsLimit: Number of requests to make (2-20)
// - highSleepTimeSeconds: Maximum delay to test (typically 5s)
// - correlationErrorRange: Acceptable deviation from perfect correlation (0.05-0.3)
// - slopeErrorRange: Acceptable deviation from expected slope (0.1-1.5)
//
// The test suite uses various mock senders (perfectLinearSender, noCorrelationSender, etc.)
// to simulate different timing behaviors and verify the detection algorithm works correctly
// across a wide range of scenarios.
var rng = rand.New(rand.NewSource(time.Now().UnixNano()))
// Mock request sender that simulates a perfect linear relationship:
// Observed delay = baseline + requested_delay
func perfectLinearSender(baseline float64) func(delay int) (float64, error) {
return func(delay int) (float64, error) {
// simulate some processing time
time.Sleep(10 * time.Millisecond) // just a small artificial sleep to mimic network
return baseline + float64(delay), nil
}
}
func Test_should_generate_alternating_sequences(t *testing.T) {
// Mock request sender that simulates no correlation:
// The response time is random around a certain constant baseline, ignoring requested delay.
func noCorrelationSender(baseline, noiseAmplitude float64) func(int) (float64, error) {
return func(delay int) (float64, error) {
time.Sleep(10 * time.Millisecond)
noise := 0.0
if noiseAmplitude > 0 {
noise = (rand.Float64()*2 - 1) * noiseAmplitude
}
return baseline + noise, nil
}
}
// Mock request sender that simulates partial linearity but with some noise.
func noisyLinearSender(baseline float64) func(delay int) (float64, error) {
return func(delay int) (float64, error) {
time.Sleep(10 * time.Millisecond)
// Add some noise (±0.2s) to a linear relationship
noise := 0.2
return baseline + float64(delay) + noise, nil
}
}
func TestPerfectLinear(t *testing.T) {
// Expect near-perfect correlation and slope ~ 1.0
requestsLimit := 6 // 3 pairs: enough data for stable regression
highSleepTimeSeconds := 5
corrErrRange := 0.1
slopeErrRange := 0.2
baseline := 5.0
sender := perfectLinearSender(5.0) // baseline 5s, observed = 5s + requested_delay
match, reason, err := checkTimingDependency(
requestsLimit,
highSleepTimeSeconds,
corrErrRange,
slopeErrRange,
baseline,
sender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if !match {
t.Fatalf("Expected a match but got none. Reason: %s", reason)
}
}
func TestNoCorrelation(t *testing.T) {
// Expect no match because requested delay doesn't influence observed delay
requestsLimit := 6
highSleepTimeSeconds := 5
corrErrRange := 0.1
slopeErrRange := 0.5
baseline := 8.0
sender := noCorrelationSender(8.0, 0.1)
match, reason, err := checkTimingDependency(
requestsLimit,
highSleepTimeSeconds,
corrErrRange,
slopeErrRange,
baseline,
sender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if match {
t.Fatalf("Expected no match but got one. Reason: %s", reason)
}
}
func TestNoisyLinear(t *testing.T) {
// Even with some noise, it should detect a strong positive correlation if
// we allow a slightly bigger margin for slope/correlation.
requestsLimit := 10 // More requests to average out noise
highSleepTimeSeconds := 5
corrErrRange := 0.2 // allow some lower correlation due to noise
slopeErrRange := 0.5 // slope may deviate slightly
baseline := 2.0
sender := noisyLinearSender(2.0) // baseline 2s, observed ~ 2s + requested_delay ±0.2
match, reason, err := checkTimingDependency(
requestsLimit,
highSleepTimeSeconds,
corrErrRange,
slopeErrRange,
baseline,
sender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// We expect a match since it's still roughly linear. The slope should be close to 1.
if !match {
t.Fatalf("Expected a match in noisy linear test but got none. Reason: %s", reason)
}
}
func TestMinimalData(t *testing.T) {
// With too few requests, correlation might not be stable.
// Here, we send only 2 requests (1 pair) and see if the logic handles it gracefully.
requestsLimit := 2
highSleepTimeSeconds := 5
corrErrRange := 0.3
slopeErrRange := 0.5
baseline := 5.0
// Perfect linear sender again
sender := perfectLinearSender(5.0)
match, reason, err := checkTimingDependency(
requestsLimit,
highSleepTimeSeconds,
corrErrRange,
slopeErrRange,
baseline,
sender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if !match {
t.Fatalf("Expected match but got none. Reason: %s", reason)
}
}
// Utility functions to generate different behaviors
// linearSender returns a sender that calculates observed delay as:
// observed = baseline + slope * requested_delay + noise
func linearSender(baseline, slope, noiseAmplitude float64) func(int) (float64, error) {
return func(delay int) (float64, error) {
time.Sleep(10 * time.Millisecond)
noise := 0.0
if noiseAmplitude > 0 {
noise = (rand.Float64()*2 - 1) * noiseAmplitude // random noise in [-noiseAmplitude, noiseAmplitude]
}
return baseline + slope*float64(delay) + noise, nil
}
}
// negativeSlopeSender just for completeness - higher delay = less observed time
func negativeSlopeSender(baseline float64) func(int) (float64, error) {
return func(delay int) (float64, error) {
time.Sleep(10 * time.Millisecond)
return baseline - float64(delay)*2.0, nil
}
}
func TestPerfectLinearSlopeOne_NoNoise(t *testing.T) {
baseline := 2.0
match, reason, err := checkTimingDependency(
10, // requestsLimit
5, // highSleepTimeSeconds
0.1, // correlationErrorRange
0.2, // slopeErrorRange (allowing slope between 0.8 and 1.2)
baseline,
linearSender(baseline, 1.0, 0.0),
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if !match {
t.Fatalf("Expected a match for perfect linear slope=1. Reason: %s", reason)
}
}
func TestPerfectLinearSlopeTwo_NoNoise(t *testing.T) {
baseline := 2.0
// slope=2 means observed = baseline + 2*requested_delay
match, reason, err := checkTimingDependency(
10,
5,
0.1, // correlation must still be good
1.5, // allow slope in range (0.5 to 2.5), we should be close to 2.0 anyway
baseline,
linearSender(baseline, 2.0, 0.0),
)
if err != nil {
t.Fatalf("Error: %v", err)
}
if !match {
t.Fatalf("Expected a match for slope=2. Reason: %s", reason)
}
}
func TestLinearWithNoise(t *testing.T) {
baseline := 5.0
// slope=1 but with noise ±0.2 seconds
match, reason, err := checkTimingDependency(
12,
5,
0.2, // correlationErrorRange relaxed to account for noise
0.5, // slopeErrorRange also relaxed
baseline,
linearSender(baseline, 1.0, 0.2),
)
if err != nil {
t.Fatalf("Error: %v", err)
}
if !match {
t.Fatalf("Expected a match for noisy linear data. Reason: %s", reason)
}
}
func TestNoCorrelationHighBaseline(t *testing.T) {
baseline := 15.0
// baseline ~15s, requested delays won't matter
match, reason, err := checkTimingDependency(
10,
5,
0.1, // correlation should be near zero, so no match expected
0.5,
baseline,
noCorrelationSender(baseline, 0.1),
)
if err != nil {
t.Fatalf("Error: %v", err)
}
if match {
t.Fatalf("Expected no match for no correlation scenario. Got: %s", reason)
}
}
func TestNegativeSlopeScenario(t *testing.T) {
baseline := 10.0
// Increasing delay decreases observed time
match, reason, err := checkTimingDependency(
10,
5,
0.2,
0.5,
baseline,
negativeSlopeSender(baseline),
)
if err != nil {
t.Fatalf("Error: %v", err)
}
if match {
t.Fatalf("Expected no match in negative slope scenario. Reason: %s", reason)
}
}
func TestLargeNumberOfRequests(t *testing.T) {
baseline := 1.0
// 20 requests, slope=1.0, no noise. Should be very stable and produce a very high correlation.
match, reason, err := checkTimingDependency(
20,
5,
0.05, // very strict correlation requirement
0.1, // very strict slope range
baseline,
linearSender(baseline, 1.0, 0.0),
)
if err != nil {
t.Fatalf("Error: %v", err)
}
if !match {
t.Fatalf("Expected a strong match with many requests and perfect linearity. Reason: %s", reason)
}
}
func TestHighBaselineLowSlope(t *testing.T) {
baseline := 15.0
match, reason, err := checkTimingDependency(
10,
5,
0.2,
0.2, // expecting slope around 0.5, allow range ~0.4 to 0.6
baseline,
linearSender(baseline, 0.85, 0.0),
)
if err != nil {
t.Fatalf("Error: %v", err)
}
if !match {
t.Fatalf("Expected a match for slope=0.5 linear scenario. Reason: %s", reason)
}
}
func TestHighNoiseConcealsSlope(t *testing.T) {
baseline := 5.0
// slope=1, but noise=5 seconds is huge and might conceal the correlation.
// With large noise, the test may fail to detect correlation.
match, reason, err := checkTimingDependency(
12,
5,
0.1, // still strict
0.2, // still strict
baseline,
linearSender(baseline, 1.0, 5.0),
)
if err != nil {
t.Fatalf("Error: %v", err)
}
// Expect no match because the noise level is too high to establish a reliable correlation.
if match {
t.Fatalf("Expected no match due to extreme noise. Reason: %s", reason)
}
}
func TestAlternatingSequences(t *testing.T) {
baseline := 0.0
var generatedDelays []float64
reqSender := func(delay int) (float64, error) {
generatedDelays = append(generatedDelays, float64(delay))
return float64(delay), nil
}
matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.True(t, matched)
require.EqualValues(t, []float64{15, 1, 15, 1}, generatedDelays)
match, reason, err := checkTimingDependency(
4, // requestsLimit
15, // highSleepTimeSeconds
0.1, // correlationErrorRange
0.2, // slopeErrorRange
baseline,
reqSender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if !match {
t.Fatalf("Expected a match but got none. Reason: %s", reason)
}
// Verify alternating sequence of delays
expectedDelays := []float64{15, 3, 15, 3}
if !reflect.DeepEqual(generatedDelays, expectedDelays) {
t.Fatalf("Expected delays %v but got %v", expectedDelays, generatedDelays)
}
}
func Test_should_giveup_non_injectable(t *testing.T) {
func TestNonInjectableQuickFail(t *testing.T) {
baseline := 0.5
var timesCalled int
reqSender := func(delay int) (float64, error) {
timesCalled++
return 0.5, nil
return 0.5, nil // Return value less than delay
}
match, _, err := checkTimingDependency(
4, // requestsLimit
15, // highSleepTimeSeconds
0.1, // correlationErrorRange
0.2, // slopeErrorRange
baseline,
reqSender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if match {
t.Fatal("Expected no match for non-injectable case")
}
if timesCalled != 1 {
t.Fatalf("Expected quick fail after 1 call, got %d calls", timesCalled)
}
matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.False(t, matched)
require.Equal(t, 1, timesCalled)
}
func Test_should_giveup_slow_non_injectable(t *testing.T) {
func TestSlowNonInjectableCase(t *testing.T) {
baseline := 10.0
rng := rand.New(rand.NewSource(time.Now().UnixNano()))
var timesCalled int
reqSender := func(delay int) (float64, error) {
timesCalled++
return 10 + rng.Float64()*0.5, nil
}
matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.False(t, matched)
require.LessOrEqual(t, timesCalled, 3)
match, _, err := checkTimingDependency(
4, // requestsLimit
15, // highSleepTimeSeconds
0.1, // correlationErrorRange
0.2, // slopeErrorRange
baseline,
reqSender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if match {
t.Fatal("Expected no match for slow non-injectable case")
}
if timesCalled > 3 {
t.Fatalf("Expected early termination (≤3 calls), got %d calls", timesCalled)
}
}
func Test_should_giveup_slow_non_injectable_realworld(t *testing.T) {
var timesCalled int
var iteration = 0
counts := []float64{21, 11, 21, 11}
func TestRealWorldNonInjectableCase(t *testing.T) {
baseline := 0.0
var iteration int
counts := []float64{11, 21, 11, 21, 11}
reqSender := func(delay int) (float64, error) {
timesCalled++
iteration++
return counts[iteration-1], nil
}
matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.False(t, matched)
require.LessOrEqual(t, timesCalled, 4)
match, _, err := checkTimingDependency(
4, // requestsLimit
15, // highSleepTimeSeconds
0.1, // correlationErrorRange
0.2, // slopeErrorRange
baseline,
reqSender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if match {
t.Fatal("Expected no match for real-world non-injectable case")
}
if iteration > 4 {
t.Fatalf("Expected ≤4 iterations, got %d", iteration)
}
}
func Test_should_detect_dependence_with_small_error(t *testing.T) {
func TestSmallErrorDependence(t *testing.T) {
baseline := 0.0
rng := rand.New(rand.NewSource(time.Now().UnixNano()))
reqSender := func(delay int) (float64, error) {
return float64(delay) + rng.Float64()*0.5, nil
}
matched, reason, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.True(t, matched)
require.NotEmpty(t, reason)
}
func Test_LinearRegression_Numerical_stability(t *testing.T) {
variables := [][]float64{
{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {1, 1}, {2, 2}, {2, 2}, {2, 2},
}
slope := float64(1)
correlation := float64(1)
regression := newSimpleLinearRegression()
for _, v := range variables {
regression.AddPoint(v[0], v[1])
}
require.True(t, almostEqual(regression.slope, slope))
require.True(t, almostEqual(regression.correlation, correlation))
}
func Test_LinearRegression_exact_verify(t *testing.T) {
variables := [][]float64{
{1, 1}, {2, 3},
}
slope := float64(2)
correlation := float64(1)
regression := newSimpleLinearRegression()
for _, v := range variables {
regression.AddPoint(v[0], v[1])
}
require.True(t, almostEqual(regression.slope, slope))
require.True(t, almostEqual(regression.correlation, correlation))
}
func Test_LinearRegression_known_verify(t *testing.T) {
variables := [][]float64{
{1, 1.348520581}, {2, 2.524046187}, {3, 3.276944688}, {4, 4.735374498}, {5, 5.150291657},
}
slope := float64(0.981487046)
correlation := float64(0.979228906)
regression := newSimpleLinearRegression()
for _, v := range variables {
regression.AddPoint(v[0], v[1])
}
require.True(t, almostEqual(regression.slope, slope))
require.True(t, almostEqual(regression.correlation, correlation))
}
func Test_LinearRegression_nonlinear_verify(t *testing.T) {
variables := [][]float64{
{1, 2}, {2, 4}, {3, 8}, {4, 16}, {5, 32},
}
regression := newSimpleLinearRegression()
for _, v := range variables {
regression.AddPoint(v[0], v[1])
}
require.Less(t, regression.correlation, 0.9)
}
const float64EqualityThreshold = 1e-8
func almostEqual(a, b float64) bool {
return math.Abs(a-b) <= float64EqualityThreshold
match, reason, err := checkTimingDependency(
4, // requestsLimit
15, // highSleepTimeSeconds
0.1, // correlationErrorRange
0.2, // slopeErrorRange
baseline,
reqSender,
)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if !match {
t.Fatalf("Expected match for small error case. Reason: %s", reason)
}
}

View File

@ -67,8 +67,8 @@ const (
var Components = []string{
RequestBodyComponent,
RequestQueryComponent,
RequestPathComponent,
RequestHeaderComponent,
RequestPathComponent,
RequestCookieComponent,
}

View File

@ -52,10 +52,6 @@ func (c *Cookie) Parse(req *retryablehttp.Request) (bool, error) {
// Iterate iterates through the component
func (c *Cookie) Iterate(callback func(key string, value interface{}) error) (err error) {
c.value.parsed.Iterate(func(key string, value any) bool {
// Skip ignored cookies
if _, ok := defaultIgnoredCookieKeys[key]; ok {
return ok
}
if errx := callback(key, value); errx != nil {
err = errx
return false
@ -85,6 +81,7 @@ func (c *Cookie) Delete(key string) error {
// Rebuild returns a new request with the
// component rebuilt
func (c *Cookie) Rebuild() (*retryablehttp.Request, error) {
// TODO: Fix cookie duplication with auth-file
cloned := c.req.Clone(context.Background())
cloned.Header.Del("Cookie")
@ -106,47 +103,3 @@ func (c *Cookie) Clone() Component {
req: c.req.Clone(context.Background()),
}
}
// A list of cookies that are essential to the request and
// must not be fuzzed.
var defaultIgnoredCookieKeys = map[string]struct{}{
"awsELB": {},
"AWSALB": {},
"AWSALBCORS": {},
"__utma": {},
"__utmb": {},
"__utmc": {},
"__utmt": {},
"__utmz": {},
"_ga": {},
"_gat": {},
"_gid": {},
"_gcl_au": {},
"_fbp": {},
"fr": {},
"__hstc": {},
"hubspotutk": {},
"__hssc": {},
"__hssrc": {},
"mp_mixpanel__c": {},
"JSESSIONID": {},
"NREUM": {},
"_pk_id": {},
"_pk_ref": {},
"_pk_ses": {},
"_pk_cvar": {},
"_pk_hsr": {},
"_hjIncludedInSample": {},
"__cfduid": {},
"cf_use_ob": {},
"cf_ob_info": {},
"intercom-session": {},
"optimizelyEndUserId": {},
"optimizelySegments": {},
"optimizelyBuckets": {},
"optimizelyPendingLogEvents": {},
"YSC": {},
"VISITOR_INFO1_LIVE": {},
"PREF": {},
"GPS": {},
}

View File

@ -9,6 +9,7 @@ import (
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/component"
fuzzStats "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/expressions"
@ -122,6 +123,18 @@ func (rule *Rule) Execute(input *ExecuteRuleInput) (err error) {
return nil
})
}
if rule.options.FuzzStatsDB != nil {
_ = component.Iterate(func(key string, value interface{}) error {
rule.options.FuzzStatsDB.RecordComponentEvent(fuzzStats.ComponentEvent{
URL: input.Input.MetaInput.Target(),
ComponentType: componentName,
ComponentName: fmt.Sprintf("%v", value),
})
return nil
})
}
finalComponentList = append(finalComponentList, component)
}
if len(displayDebugFuzzPoints) > 0 {

15
pkg/fuzz/stats/db.go Normal file
View File

@ -0,0 +1,15 @@
package stats
import (
_ "embed"
_ "github.com/mattn/go-sqlite3"
)
type StatsDatabase interface {
Close()
InsertComponent(event ComponentEvent) error
InsertMatchedRecord(event FuzzingEvent) error
InsertError(event ErrorEvent) error
}

24
pkg/fuzz/stats/db_test.go Normal file
View File

@ -0,0 +1,24 @@
package stats
import (
"testing"
"github.com/stretchr/testify/require"
)
func Test_NewStatsDatabase(t *testing.T) {
db, err := NewSimpleStats()
require.NoError(t, err)
err = db.InsertMatchedRecord(FuzzingEvent{
URL: "http://localhost:8080/login",
TemplateID: "apache-struts2-001",
ComponentType: "path",
ComponentName: "/login",
PayloadSent: "/login'\"><",
StatusCode: 401,
})
require.NoError(t, err)
//os.Remove("test.stats.db")
}

164
pkg/fuzz/stats/simple.go Normal file
View File

@ -0,0 +1,164 @@
package stats
import (
"fmt"
"net/http"
"net/url"
"strings"
"sync"
"sync/atomic"
)
type simpleStats struct {
totalComponentsTested atomic.Int64
totalEndpointsTested atomic.Int64
totalFuzzedRequests atomic.Int64
totalMatchedResults atomic.Int64
totalTemplatesTested atomic.Int64
totalErroredRequests atomic.Int64
statusCodes sync.Map
severityCounts sync.Map
componentsUniqueMap sync.Map
endpointsUniqueMap sync.Map
templatesUniqueMap sync.Map
errorGroupedStats sync.Map
}
func NewSimpleStats() (*simpleStats, error) {
return &simpleStats{
totalComponentsTested: atomic.Int64{},
totalEndpointsTested: atomic.Int64{},
totalMatchedResults: atomic.Int64{},
totalFuzzedRequests: atomic.Int64{},
totalTemplatesTested: atomic.Int64{},
totalErroredRequests: atomic.Int64{},
statusCodes: sync.Map{},
severityCounts: sync.Map{},
componentsUniqueMap: sync.Map{},
endpointsUniqueMap: sync.Map{},
templatesUniqueMap: sync.Map{},
errorGroupedStats: sync.Map{},
}, nil
}
func (s *simpleStats) Close() {}
func (s *simpleStats) InsertComponent(event ComponentEvent) error {
componentKey := fmt.Sprintf("%s_%s", event.ComponentName, event.ComponentType)
if _, ok := s.componentsUniqueMap.Load(componentKey); !ok {
s.componentsUniqueMap.Store(componentKey, true)
s.totalComponentsTested.Add(1)
}
parsedURL, err := url.Parse(event.URL)
if err != nil {
return err
}
endpointsKey := fmt.Sprintf("%s_%s", event.siteName, parsedURL.Path)
if _, ok := s.endpointsUniqueMap.Load(endpointsKey); !ok {
s.endpointsUniqueMap.Store(endpointsKey, true)
s.totalEndpointsTested.Add(1)
}
return nil
}
func (s *simpleStats) InsertMatchedRecord(event FuzzingEvent) error {
s.totalFuzzedRequests.Add(1)
s.incrementStatusCode(event.StatusCode)
if event.Matched {
s.totalMatchedResults.Add(1)
s.incrementSeverityCount(event.Severity)
}
if _, ok := s.templatesUniqueMap.Load(event.TemplateID); !ok {
s.templatesUniqueMap.Store(event.TemplateID, true)
s.totalTemplatesTested.Add(1)
}
return nil
}
func (s *simpleStats) InsertError(event ErrorEvent) error {
s.totalErroredRequests.Add(1)
value, _ := s.errorGroupedStats.LoadOrStore(event.Error, &atomic.Int64{})
if counter, ok := value.(*atomic.Int64); ok {
counter.Add(1)
}
return nil
}
type SimpleStatsResponse struct {
TotalMatchedResults int64
TotalComponentsTested int64
TotalEndpointsTested int64
TotalFuzzedRequests int64
TotalTemplatesTested int64
TotalErroredRequests int64
StatusCodes map[string]int64
SeverityCounts map[string]int64
ErrorGroupedStats map[string]int64
}
func (s *simpleStats) GetStatistics() SimpleStatsResponse {
statusStats := make(map[string]int64)
s.statusCodes.Range(func(key, value interface{}) bool {
if count, ok := value.(*atomic.Int64); ok {
statusStats[formatStatusCode(key.(int))] = count.Load()
}
return true
})
severityStats := make(map[string]int64)
s.severityCounts.Range(func(key, value interface{}) bool {
if count, ok := value.(*atomic.Int64); ok {
severityStats[key.(string)] = count.Load()
}
return true
})
errorStats := make(map[string]int64)
s.errorGroupedStats.Range(func(key, value interface{}) bool {
if count, ok := value.(*atomic.Int64); ok {
errorStats[key.(string)] = count.Load()
}
return true
})
return SimpleStatsResponse{
TotalMatchedResults: s.totalMatchedResults.Load(),
StatusCodes: statusStats,
SeverityCounts: severityStats,
TotalComponentsTested: s.totalComponentsTested.Load(),
TotalEndpointsTested: s.totalEndpointsTested.Load(),
TotalFuzzedRequests: s.totalFuzzedRequests.Load(),
TotalTemplatesTested: s.totalTemplatesTested.Load(),
TotalErroredRequests: s.totalErroredRequests.Load(),
ErrorGroupedStats: errorStats,
}
}
func (s *simpleStats) incrementStatusCode(statusCode int) {
value, _ := s.statusCodes.LoadOrStore(statusCode, &atomic.Int64{})
if counter, ok := value.(*atomic.Int64); ok {
counter.Add(1)
}
}
func (s *simpleStats) incrementSeverityCount(severity string) {
value, _ := s.severityCounts.LoadOrStore(severity, &atomic.Int64{})
if counter, ok := value.(*atomic.Int64); ok {
counter.Add(1)
}
}
func formatStatusCode(code int) string {
escapedText := strings.ToTitle(strings.ReplaceAll(http.StatusText(code), " ", "_"))
formatted := fmt.Sprintf("%d_%s", code, escapedText)
return formatted
}

106
pkg/fuzz/stats/stats.go Normal file
View File

@ -0,0 +1,106 @@
// Package stats implements a statistics recording module for
// nuclei fuzzing.
package stats
import (
"fmt"
"log"
"net/url"
"github.com/pkg/errors"
)
// Tracker is a stats tracker module for fuzzing server
type Tracker struct {
database *simpleStats
}
// NewTracker creates a new tracker instance
func NewTracker() (*Tracker, error) {
db, err := NewSimpleStats()
if err != nil {
return nil, errors.Wrap(err, "could not create new tracker")
}
tracker := &Tracker{
database: db,
}
return tracker, nil
}
func (t *Tracker) GetStats() SimpleStatsResponse {
return t.database.GetStatistics()
}
// Close closes the tracker
func (t *Tracker) Close() {
t.database.Close()
}
// FuzzingEvent is a fuzzing event
type FuzzingEvent struct {
URL string
ComponentType string
ComponentName string
TemplateID string
PayloadSent string
StatusCode int
Matched bool
RawRequest string
RawResponse string
Severity string
siteName string
}
func (t *Tracker) RecordResultEvent(event FuzzingEvent) {
event.siteName = getCorrectSiteName(event.URL)
if err := t.database.InsertMatchedRecord(event); err != nil {
log.Printf("could not insert matched record: %s", err)
}
}
type ComponentEvent struct {
URL string
ComponentType string
ComponentName string
siteName string
}
func (t *Tracker) RecordComponentEvent(event ComponentEvent) {
event.siteName = getCorrectSiteName(event.URL)
if err := t.database.InsertComponent(event); err != nil {
log.Printf("could not insert component record: %s", err)
}
}
type ErrorEvent struct {
TemplateID string
URL string
Error string
}
func (t *Tracker) RecordErrorEvent(event ErrorEvent) {
if err := t.database.InsertError(event); err != nil {
log.Printf("could not insert error record: %s", err)
}
}
func getCorrectSiteName(originalURL string) string {
parsed, err := url.Parse(originalURL)
if err != nil {
return ""
}
// Site is the host:port combo
siteName := parsed.Host
if parsed.Port() == "" {
if parsed.Scheme == "https" {
siteName = fmt.Sprintf("%s:443", siteName)
} else if parsed.Scheme == "http" {
siteName = fmt.Sprintf("%s:80", siteName)
}
}
return siteName
}

View File

@ -2,7 +2,7 @@ package burp
import (
"encoding/base64"
"os"
"io"
"strings"
"github.com/pkg/errors"
@ -35,14 +35,8 @@ func (j *BurpFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
func (j *BurpFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
file, err := os.Open(input)
if err != nil {
return errors.Wrap(err, "could not open data file")
}
defer file.Close()
items, err := burpxml.Parse(file, true)
func (j *BurpFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
items, err := burpxml.Parse(input, true)
if err != nil {
return errors.Wrap(err, "could not decode burp xml schema")
}

View File

@ -1,6 +1,7 @@
package burp
import (
"os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@ -14,10 +15,14 @@ func TestBurpParse(t *testing.T) {
var gotMethodsToURLs []string
err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
file, err := os.Open(proxifyInputFile)
require.Nilf(t, err, "error opening proxify input file: %v", err)
defer file.Close()
err = format.Parse(file, func(request *types.RequestResponse) bool {
gotMethodsToURLs = append(gotMethodsToURLs, request.URL.String())
return false
})
}, proxifyInputFile)
if err != nil {
t.Fatal(err)
}

View File

@ -2,6 +2,7 @@ package formats
import (
"errors"
"io"
"os"
"strings"
@ -35,7 +36,7 @@ type Format interface {
Name() string
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
Parse(input string, resultsCb ParseReqRespCallback) error
Parse(input io.Reader, resultsCb ParseReqRespCallback, filePath string) error
// SetOptions sets the options for the input format
SetOptions(options InputFormatOptions)
}

View File

@ -2,7 +2,6 @@ package json
import (
"io"
"os"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
@ -46,14 +45,8 @@ func (j *JSONFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
func (j *JSONFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
file, err := os.Open(input)
if err != nil {
return errors.Wrap(err, "could not open json file")
}
defer file.Close()
decoder := json.NewDecoder(file)
func (j *JSONFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
decoder := json.NewDecoder(input)
for {
var request proxifyRequest
err := decoder.Decode(&request)

View File

@ -1,6 +1,7 @@
package json
import (
"os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@ -41,11 +42,15 @@ func TestJSONFormatterParse(t *testing.T) {
proxifyInputFile := "../testdata/ginandjuice.proxify.json"
file, err := os.Open(proxifyInputFile)
require.Nilf(t, err, "error opening proxify input file: %v", err)
defer file.Close()
var urls []string
err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
err = format.Parse(file, func(request *types.RequestResponse) bool {
urls = append(urls, request.URL.String())
return false
})
}, proxifyInputFile)
if err != nil {
t.Fatal(err)
}

View File

@ -1,6 +1,8 @@
package openapi
import (
"io"
"github.com/getkin/kin-openapi/openapi3"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
@ -29,9 +31,9 @@ func (j *OpenAPIFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
func (j *OpenAPIFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
func (j *OpenAPIFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
loader := openapi3.NewLoader()
schema, err := loader.LoadFromFile(input)
schema, err := loader.LoadFromIoReader(input)
if err != nil {
return errors.Wrap(err, "could not decode openapi 3.0 schema")
}

View File

@ -1,6 +1,7 @@
package openapi
import (
"os"
"strings"
"testing"
@ -41,11 +42,15 @@ func TestOpenAPIParser(t *testing.T) {
gotMethodsToURLs := make(map[string][]string)
err := format.Parse(proxifyInputFile, func(rr *types.RequestResponse) bool {
file, err := os.Open(proxifyInputFile)
require.Nilf(t, err, "error opening proxify input file: %v", err)
defer file.Close()
err = format.Parse(file, func(rr *types.RequestResponse) bool {
gotMethodsToURLs[rr.Request.Method] = append(gotMethodsToURLs[rr.Request.Method],
strings.Replace(rr.URL.String(), baseURL, "{{baseUrl}}", 1))
return false
})
}, proxifyInputFile)
if err != nil {
t.Fatal(err)
}

View File

@ -2,7 +2,6 @@ package swagger
import (
"io"
"os"
"path"
"github.com/getkin/kin-openapi/openapi2"
@ -12,6 +11,7 @@ import (
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats/openapi"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
)
@ -38,24 +38,19 @@ func (j *SwaggerFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
func (j *SwaggerFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
file, err := os.Open(input)
if err != nil {
return errors.Wrap(err, "could not open data file")
}
defer file.Close()
func (j *SwaggerFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
schemav2 := &openapi2.T{}
ext := path.Ext(input)
ext := path.Ext(filePath)
var err error
if ext == ".yaml" || ext == ".yml" {
data, err_data := io.ReadAll(file)
if err_data != nil {
var data []byte
data, err = io.ReadAll(input)
if err != nil {
return errors.Wrap(err, "could not read data file")
}
err = yaml.Unmarshal(data, schemav2)
} else {
err = json.NewDecoder(file).Decode(schemav2)
err = json.NewDecoder(input).Decode(schemav2)
}
if err != nil {
return errors.Wrap(err, "could not decode openapi 2.0 schema")

View File

@ -1,6 +1,7 @@
package swagger
import (
"os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@ -14,10 +15,14 @@ func TestSwaggerAPIParser(t *testing.T) {
var gotMethodsToURLs []string
err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
file, err := os.Open(proxifyInputFile)
require.Nilf(t, err, "error opening proxify input file: %v", err)
defer file.Close()
err = format.Parse(file, func(request *types.RequestResponse) bool {
gotMethodsToURLs = append(gotMethodsToURLs, request.URL.String())
return false
})
}, proxifyInputFile)
if err != nil {
t.Fatal(err)
}

View File

@ -2,7 +2,6 @@ package yaml
import (
"io"
"os"
"strings"
"github.com/pkg/errors"
@ -46,14 +45,8 @@ func (j *YamlMultiDocFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
func (j *YamlMultiDocFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
file, err := os.Open(input)
if err != nil {
return errors.Wrap(err, "could not open json file")
}
defer file.Close()
decoder := YamlUtil.NewDecoder(file)
func (j *YamlMultiDocFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
decoder := YamlUtil.NewDecoder(input)
for {
var request proxifyRequest
err := decoder.Decode(&request)

View File

@ -1,6 +1,7 @@
package yaml
import (
"os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@ -17,11 +18,15 @@ func TestYamlFormatterParse(t *testing.T) {
"https://ginandjuice.shop/users/3",
}
file, err := os.Open(proxifyInputFile)
require.Nilf(t, err, "error opening proxify input file: %v", err)
defer file.Close()
var urls []string
err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
err = format.Parse(file, func(request *types.RequestResponse) bool {
urls = append(urls, request.URL.String())
return false
})
}, proxifyInputFile)
require.Nilf(t, err, "error parsing yaml file: %v", err)
require.Len(t, urls, len(expectedUrls), "invalid number of urls")
require.ElementsMatch(t, urls, expectedUrls, "invalid urls")

View File

@ -1,6 +1,9 @@
package http
import (
"bytes"
"io"
"os"
"strings"
"github.com/pkg/errors"
@ -23,17 +26,25 @@ type HttpMultiFormatOptions struct {
InputFile string
// InputMode is the mode of input
InputMode string
// optional input reader
InputContents string
}
// HttpInputProvider implements an input provider for nuclei that loads
// inputs from multiple formats like burp, openapi, postman,proxify, etc.
type HttpInputProvider struct {
format formats.Format
inputData []byte
inputFile string
count int64
}
// NewHttpInputProvider creates a new input provider for nuclei from a file
// or an input string
//
// The first preference is given to input file if provided
// otherwise it will use the input string
func NewHttpInputProvider(opts *HttpMultiFormatOptions) (*HttpInputProvider, error) {
var format formats.Format
for _, provider := range providersList {
@ -48,14 +59,40 @@ func NewHttpInputProvider(opts *HttpMultiFormatOptions) (*HttpInputProvider, err
// Do a first pass over the input to identify any errors
// and get the count of the input file as well
count := int64(0)
parseErr := format.Parse(opts.InputFile, func(request *types.RequestResponse) bool {
var inputFile *os.File
var inputReader io.Reader
if opts.InputFile != "" {
file, err := os.Open(opts.InputFile)
if err != nil {
return nil, errors.Wrap(err, "could not open input file")
}
inputFile = file
inputReader = file
} else {
inputReader = strings.NewReader(opts.InputContents)
}
defer func() {
if inputFile != nil {
inputFile.Close()
}
}()
data, err := io.ReadAll(inputReader)
if err != nil {
return nil, errors.Wrap(err, "could not read input file")
}
if len(data) == 0 {
return nil, errors.New("input file is empty")
}
parseErr := format.Parse(bytes.NewReader(data), func(request *types.RequestResponse) bool {
count++
return false
})
}, opts.InputFile)
if parseErr != nil {
return nil, errors.Wrap(parseErr, "could not parse input file")
}
return &HttpInputProvider{format: format, inputFile: opts.InputFile, count: count}, nil
return &HttpInputProvider{format: format, inputData: data, inputFile: opts.InputFile, count: count}, nil
}
// Count returns the number of items for input provider
@ -65,12 +102,12 @@ func (i *HttpInputProvider) Count() int64 {
// Iterate over all inputs in order
func (i *HttpInputProvider) Iterate(callback func(value *contextargs.MetaInput) bool) {
err := i.format.Parse(i.inputFile, func(request *types.RequestResponse) bool {
err := i.format.Parse(bytes.NewReader(i.inputData), func(request *types.RequestResponse) bool {
metaInput := contextargs.NewMetaInput()
metaInput.ReqResp = request
metaInput.Input = request.URL.String()
return callback(metaInput)
})
}, i.inputFile)
if err != nil {
gologger.Warning().Msgf("Could not parse input file while iterating: %s\n", err)
}

View File

@ -75,6 +75,10 @@ type StandardWriter struct {
DisableStdout bool
AddNewLinesOutputFile bool // by default this is only done for stdout
KeysToRedact []string
// JSONLogRequestHook is a hook that can be used to log request/response
// when using custom server code with output
JSONLogRequestHook func(*JSONLogRequest)
}
var _ Writer = &StandardWriter{}
@ -352,7 +356,7 @@ type JSONLogRequest struct {
// Request writes a log the requests trace log
func (w *StandardWriter) Request(templatePath, input, requestType string, requestErr error) {
if w.traceFile == nil && w.errorFile == nil {
if w.traceFile == nil && w.errorFile == nil && w.JSONLogRequestHook == nil {
return
}
@ -366,6 +370,10 @@ func (w *StandardWriter) Request(templatePath, input, requestType string, reques
return
}
if w.JSONLogRequestHook != nil {
w.JSONLogRequestHook(request)
}
if w.traceFile != nil {
_, _ = w.traceFile.Write(data)
}

View File

@ -42,7 +42,12 @@ func NewWafDetector() *WafDetector {
if waf.Regex == "" {
continue
}
store.regexCache[id] = regexp.MustCompile(waf.Regex)
compiled, err := regexp.Compile(waf.Regex)
if err != nil {
log.Printf("invalid WAF regex for %s: %v", id, err)
continue
}
store.regexCache[id] = compiled
}
return store
}

View File

@ -320,8 +320,8 @@ func (request *Request) Compile(options *protocols.ExecutorOptions) error {
timeoutVal = 5
}
// Add 3x buffer to the timeout
customTimeout = int(math.Ceil(float64(timeoutVal) * 3))
// Add 5x buffer to the timeout
customTimeout = int(math.Ceil(float64(timeoutVal) * 5))
}
if customTimeout > 0 {
connectionConfiguration.Connection.CustomMaxTimeout = time.Duration(customTimeout) * time.Second

View File

@ -113,6 +113,7 @@ func (c *Configuration) Clone() *Configuration {
if c.Connection != nil {
cloneConnection := &ConnectionConfiguration{
DisableKeepAlive: c.Connection.DisableKeepAlive,
CustomMaxTimeout: c.Connection.CustomMaxTimeout,
}
if c.Connection.HasCookieJar() {
cookiejar := *c.Connection.GetCookieJar()

View File

@ -20,6 +20,7 @@ import (
"github.com/projectdiscovery/fastdialer/fastdialer"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/analyzers"
fuzzStats "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
@ -1017,6 +1018,21 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ
callback(event)
if request.options.FuzzStatsDB != nil && generatedRequest.fuzzGeneratedRequest.Request != nil {
request.options.FuzzStatsDB.RecordResultEvent(fuzzStats.FuzzingEvent{
URL: input.MetaInput.Target(),
TemplateID: request.options.TemplateID,
ComponentType: generatedRequest.fuzzGeneratedRequest.Component.Name(),
ComponentName: generatedRequest.fuzzGeneratedRequest.Parameter,
PayloadSent: generatedRequest.fuzzGeneratedRequest.Value,
StatusCode: respChain.Response().StatusCode,
Matched: event.HasResults(),
RawRequest: string(dumpedRequest),
RawResponse: respChain.FullResponse().String(),
Severity: request.options.TemplateInfo.SeverityHolder.Severity.String(),
})
}
// Skip further responses if we have stop-at-first-match and a match
if (request.options.Options.StopAtFirstMatch || request.options.StopAtFirstMatch || request.StopAtFirstMatch) && event.HasResults() {
return nil

View File

@ -14,6 +14,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/frequency"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
"github.com/projectdiscovery/nuclei/v3/pkg/js/compiler"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/parser"
@ -99,6 +100,8 @@ type ExecutorOptions struct {
InputHelper *input.Helper
// FuzzParamsFrequency is a cache for parameter frequency
FuzzParamsFrequency *frequency.Tracker
// FuzzStatsDB is a database for fuzzing stats
FuzzStatsDB *stats.Tracker
Operators []*operators.Operators // only used by offlinehttp module

View File

@ -79,7 +79,7 @@ func (sc *ScanEventsCharts) Start(addr string) {
e := echo.New()
e.HideBanner = true
e.GET("/concurrency", sc.ConcurrencyVsTime)
e.GET("/requests", sc.TotalRequestsOverTime)
e.GET("/fuzz", sc.TotalRequestsOverTime)
e.GET("/slow", sc.TopSlowTemplates)
e.GET("/rps", sc.RequestsVSInterval)
e.GET("/", sc.AllCharts)

View File

@ -423,6 +423,18 @@ type Options struct {
ProbeConcurrency int
// Dast only runs DAST templates
DAST bool
// DASTServer is the flag to start nuclei as a DAST server
DASTServer bool
// DASTServerToken is the token optional for the dast server
DASTServerToken string
// DASTServerAddress is the address for the dast server
DASTServerAddress string
// DASTReport enables dast report server & final report generation
DASTReport bool
// Scope contains a list of regexes for in-scope URLS
Scope goflags.StringSlice
// OutOfScope contains a list of regexes for out-scope URLS
OutOfScope goflags.StringSlice
// HttpApiEndpoint is the experimental http api endpoint
HttpApiEndpoint string
// ListTemplateProfiles lists all available template profiles