Merge pull request #5813 from projectdiscovery/dev

v3.3.6
This commit is contained in:
Sandeep Singh 2024-11-21 23:33:29 +05:30 committed by GitHub
commit 419f08f61c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
92 changed files with 1656 additions and 643 deletions

View File

@ -1,10 +1,12 @@
name: 🤖 dep auto merge name: 🤖 Auto Merge
on: on:
pull_request: pull_request_review:
branches: types: [submitted]
- dev workflow_run:
workflow_dispatch: workflows: ["♾️ Compatibility Check"]
types:
- completed
permissions: permissions:
pull-requests: write pull-requests: write
@ -12,11 +14,11 @@ permissions:
repository-projects: write repository-projects: write
jobs: jobs:
automerge: auto-merge:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.actor == 'dependabot[bot]' if: github.actor == 'dependabot[bot]'
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
with: with:
token: ${{ secrets.DEPENDABOT_PAT }} token: ${{ secrets.DEPENDABOT_PAT }}

View File

@ -1,70 +0,0 @@
name: 🔨 Build Test
on:
pull_request:
paths:
- '**.go'
- '**.mod'
workflow_dispatch:
jobs:
build:
name: Test Builds
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Set up Go
uses: projectdiscovery/actions/setup/go@v1
# required for running python code in py-snippet.yaml integration test
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Verify Go modules
run: make verify
- name: Build
run: go build .
working-directory: cmd/nuclei/
- name: Test
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
PDCP_API_KEY: "${{ secrets.PDCP_API_KEY }}"
run: go test ./...
- name: Integration Tests
timeout-minutes: 50
env:
GH_ACTION: true
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
PDCP_API_KEY: "${{ secrets.PDCP_API_KEY }}"
run: |
chmod +x run.sh
bash run.sh ${{ matrix.os }}
working-directory: integration_tests/
- name: Race Condition Tests
if: ${{ matrix.os != 'windows-latest' }} # known issue: https://github.com/golang/go/issues/46099
run: go run -race . -l ../functional-test/targets.txt -id tech-detect,tls-version
working-directory: cmd/nuclei/
- name: Example SDK Simple
run: go run .
working-directory: examples/simple/
# Temporarily disabled very flaky in github actions
# - name: Example SDK Advanced
# run: go run .
# working-directory: examples/advanced/
- name: Example SDK with speed control
run: go run .
working-directory: examples/with_speed_control/

View File

@ -1,39 +0,0 @@
name: 🚨 CodeQL Analysis
on:
pull_request:
paths:
- '**.go'
- '**.mod'
workflow_dispatch:
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest-16-cores
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'go' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
- name: Autobuild
uses: github/codeql-action/autobuild@v2
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -0,0 +1,19 @@
name: ♾️ Compatibility Check
on:
pull_request:
types: [opened, synchronize]
branches:
- dev
jobs:
check:
if: github.actor == 'dependabot[bot]'
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- run: go mod download && go mod verify && go vet ./...

View File

@ -1,4 +1,4 @@
name: 🌥 Docker Push name: 🐳 Docker Push
on: on:
workflow_run: workflow_run:
@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest-16-cores runs-on: ubuntu-latest-16-cores
steps: steps:
- name: Git Checkout - name: Git Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Get GitHub tag - name: Get GitHub tag
id: meta id: meta

View File

@ -1,31 +0,0 @@
name: 🧪 Functional Test
on:
pull_request:
paths:
- '**.go'
workflow_dispatch:
jobs:
functional:
name: Functional Test
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Set up Go
uses: projectdiscovery/actions/setup/go@v1
- name: Functional Tests
env:
GH_ACTION: true
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
run: |
chmod +x run.sh
bash run.sh ${{ matrix.os }}
working-directory: cmd/functional-test

27
.github/workflows/generate-docs.yaml vendored Normal file
View File

@ -0,0 +1,27 @@
name: ⏰ Generate Docs
on:
push:
branches:
- dev
workflow_dispatch:
jobs:
publish-docs:
if: "${{ !endsWith(github.actor, '[bot]') }}"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/git@v1
- run: make syntax-docs
- run: git status -s | wc -l | xargs -I {} echo CHANGES={} >> $GITHUB_OUTPUT
id: status
- uses: projectdiscovery/actions/commit@v1
if: steps.status.outputs.CHANGES > 0
with:
files: |
SYNTAX-REFERENCE.md
nuclei-jsonschema.json
message: 'docs: update syntax & JSON schema 🤖'
- run: git push origin $GITHUB_REF

View File

@ -1,22 +0,0 @@
name: 🙏🏻 Lint Test
on:
pull_request:
paths:
- '**.go'
- '**.mod'
workflow_dispatch:
jobs:
lint:
name: Lint Test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Go
uses: projectdiscovery/actions/setup/go@v1
- name: Run golangci-lint
uses: projectdiscovery/actions/golangci-lint@v1

25
.github/workflows/perf-test.yaml vendored Normal file
View File

@ -0,0 +1,25 @@
name: 🔨 Performance Test
on:
schedule:
- cron: '0 0 * * 0' # Weekly
workflow_dispatch:
jobs:
perf-test:
strategy:
matrix:
count: [50, 100, 150]
runs-on: ubuntu-latest
if: github.repository == 'projectdiscovery/nuclei'
env:
LIST_FILE: "/tmp/targets-${{ matrix.count }}.txt"
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- run: make verify
- name: Generate list
run: for i in {1..${{ matrix.count }}}; do echo "https://scanme.sh/?_=${i}" >> "${LIST_FILE}"; done
- run: go run -race . -l "${LIST_FILE}"
working-directory: cmd/nuclei/

View File

@ -1,31 +0,0 @@
name: 🔨 Performance Test
on:
workflow_dispatch:
schedule:
# Weekly
- cron: '0 0 * * 0'
jobs:
build:
name: Test Performance
strategy:
matrix:
os: [ubuntu-latest, macOS-latest]
runs-on: ${{ matrix.os }}
if: github.repository == 'projectdiscovery/nuclei'
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Set up Go
uses: projectdiscovery/actions/setup/go@v1
- name: Verify Go modules
run: make verify
# Max GH exection time 6H => timeout after that
- name: Running performance with big list
run: go run -race . -l ../functional-test/targets-150.txt
working-directory: cmd/nuclei/

View File

@ -1,47 +0,0 @@
name: ⏰ Publish Docs
on:
push:
branches:
- dev
workflow_dispatch:
jobs:
docs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: "Set up Go"
uses: projectdiscovery/actions/setup/go@v1
- name: "Set up Git"
uses: projectdiscovery/actions/setup/git@v1
- name: Generate YAML Syntax Documentation
id: generate-docs
run: |
if ! which dstdocgen > /dev/null; then
echo -e "Command dstdocgen not found! Installing\c"
go install github.com/projectdiscovery/yamldoc-go/cmd/docgen/dstdocgen@main
fi
go generate pkg/templates/templates.go
go build -o "cmd/docgen/docgen" cmd/docgen/docgen.go
./cmd/docgen/docgen SYNTAX-REFERENCE.md nuclei-jsonschema.json
git status -s | wc -l | xargs -I {} echo CHANGES={} >> $GITHUB_OUTPUT
- name: Commit files
if: steps.generate-docs.outputs.CHANGES > 0
run: |
git add SYNTAX-REFERENCE.md nuclei-jsonschema.json
git commit -m "Auto Generate Syntax Docs + JSONSchema [$(date)] :robot:" -a
- name: Push changes
if: steps.generate-docs.outputs.CHANGES > 0
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: ${{ github.ref }}

View File

@ -1,23 +0,0 @@
name: 🔨 Release Test
on:
pull_request:
paths:
- '**.go'
- '**.mod'
workflow_dispatch:
jobs:
release-test:
runs-on: ubuntu-latest-16-cores
steps:
- name: "Check out code"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: projectdiscovery/actions/setup/go@v1
- name: Release snapshot
uses: projectdiscovery/actions/goreleaser@v1

View File

@ -1,4 +1,4 @@
name: 🎉 Release Binary name: 🎉 Release
on: on:
push: push:
@ -13,10 +13,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: projectdiscovery/actions/setup/go@v1
- name: Set up Go
uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/goreleaser@v1 - uses: projectdiscovery/actions/goreleaser@v1
with: with:
release: true release: true

View File

@ -1,22 +0,0 @@
name: 🛠 Template Validate
on:
pull_request:
paths:
- '**.go'
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest-16-cores
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- name: Template Validation
run: |
go run . -ut
go run . -validate
go run . -validate -w workflows
working-directory: cmd/nuclei/

135
.github/workflows/tests.yaml vendored Normal file
View File

@ -0,0 +1,135 @@
name: 🔨 Tests
on:
push:
branches: ["dev"]
paths:
- '**.go'
- '**.mod'
pull_request:
paths:
- '**.go'
- '**.mod'
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
lint:
name: "Lint"
if: "${{ !endsWith(github.actor, '[bot]') }}"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/golangci-lint@v1
tests:
name: "Tests"
needs: ["lint"]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: "${{ matrix.os }}"
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- run: make vet
- run: make build
- run: make test
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
PDCP_API_KEY: "${{ secrets.PDCP_API_KEY }}"
- run: go run -race . -l ../functional-test/targets.txt -id tech-detect,tls-version
if: ${{ matrix.os != 'windows-latest' }} # known issue: https://github.com/golang/go/issues/46099
working-directory: cmd/nuclei/
sdk:
name: "Run example SDK"
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- name: "Simple"
run: go run .
working-directory: examples/simple/
# - run: go run . # Temporarily disabled very flaky in github actions
# working-directory: examples/advanced/
- name: "with Speed Control"
run: go run .
working-directory: examples/with_speed_control/
integration:
name: "Integration tests"
needs: ["tests"]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- run: bash run.sh "${{ matrix.os }}"
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
PDCP_API_KEY: "${{ secrets.PDCP_API_KEY }}"
timeout-minutes: 50
working-directory: integration_tests/
functional:
name: "Functional tests"
needs: ["tests"]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- run: bash run.sh "${{ matrix.os }}"
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
working-directory: cmd/functional-test/
validate:
name: "Template validate"
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- run: make template-validate
codeql:
name: "CodeQL analysis"
needs: ["tests"]
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
steps:
- uses: actions/checkout@v4
- uses: github/codeql-action/init@v2
with:
languages: 'go'
- uses: github/codeql-action/autobuild@v2
- uses: github/codeql-action/analyze@v2
release:
name: "Release test"
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/goreleaser@v1

1
.gitignore vendored
View File

@ -10,6 +10,7 @@
.vscode .vscode
# Binaries # Binaries
/bin/*
**/bindgen **/bindgen
**/debug-* **/debug-*
**/docgen **/docgen

View File

@ -381,7 +381,7 @@ func (r *Request) Type() templateTypes.ProtocolType {
} }
``` ```
Almost all of these protocols have boilerplate functions for which default implementations have been provided in the `providers` package. Examples are the implementation of `Match`, `Extract`, `MakeResultEvent`, GetCompiledOperators`, etc. which are almost same throughout Nuclei protocols code. It is enough to copy-paste them unless customization is required. Almost all of these protocols have boilerplate functions for which default implementations have been provided in the `providers` package. Examples are the implementation of `Match`, `Extract`, `MakeResultEvent`, `GetCompiledOperators`, etc. which are almost same throughout Nuclei protocols code. It is enough to copy-paste them unless customization is required.
`eventcreator` package offers `CreateEventWithAdditionalOptions` function which can be used to create result events after doing request execution. `eventcreator` package offers `CreateEventWithAdditionalOptions` function which can be used to create result events after doing request execution.

View File

@ -15,8 +15,9 @@ ifneq ($(shell go env GOOS),darwin)
endif endif
.PHONY: all build build-stats clean devtools-all devtools-bindgen devtools-scrapefuncs .PHONY: all build build-stats clean devtools-all devtools-bindgen devtools-scrapefuncs
.PHONY: devtools-tsgen docs dsl-docs functional fuzzplayground go-build integration .PHONY: devtools-tsgen docs docgen dsl-docs functional fuzzplayground go-build syntax-docs
.PHONY: jsupdate-all jsupdate-bindgen jsupdate-tsgen memogen scan-charts test tidy ts verify .PHONY: integration jsupdate-all jsupdate-bindgen jsupdate-tsgen memogen scan-charts test
.PHONY: tidy ts verify download vet template-validate
all: build all: build
@ -28,42 +29,38 @@ go-build:
$(GOBUILD) $(GOFLAGS) -ldflags '${LDFLAGS}' $(GOBUILD_ADDITIONAL_ARGS) \ $(GOBUILD) $(GOFLAGS) -ldflags '${LDFLAGS}' $(GOBUILD_ADDITIONAL_ARGS) \
-o '${GOBUILD_OUTPUT}' $(GOBUILD_PACKAGES) -o '${GOBUILD_OUTPUT}' $(GOBUILD_PACKAGES)
build: GOBUILD_OUTPUT = nuclei build: GOBUILD_OUTPUT = ./bin/nuclei
build: GOBUILD_PACKAGES = cmd/nuclei/main.go build: GOBUILD_PACKAGES = cmd/nuclei/main.go
build: go-build build: go-build
build-stats: GOBUILD_OUTPUT = nuclei-stats build-stats: GOBUILD_OUTPUT = ./bin/nuclei-stats
build-stats: GOBUILD_PACKAGES = cmd/nuclei/main.go build-stats: GOBUILD_PACKAGES = cmd/nuclei/main.go
build-stats: GOBUILD_ADDITIONAL_ARGS = -tags=stats build-stats: GOBUILD_ADDITIONAL_ARGS = -tags=stats
build-stats: go-build build-stats: go-build
scan-charts: GOBUILD_OUTPUT = scan-charts scan-charts: GOBUILD_OUTPUT = ./bin/scan-charts
scan-charts: GOBUILD_PACKAGES = cmd/scan-charts/main.go scan-charts: GOBUILD_PACKAGES = cmd/scan-charts/main.go
scan-charts: go-build scan-charts: go-build
docs: GOBUILD_OUTPUT = cmd/docgen/docgen docgen: GOBUILD_OUTPUT = ./bin/docgen
docs: GOBUILD_PACKAGES = cmd/docgen/docgen.go docgen: GOBUILD_PACKAGES = cmd/docgen/docgen.go
docs: bin = dstdocgen docgen: bin = dstdocgen
docs: docgen:
@if ! which $(bin) >/dev/null; then \ @if ! which $(bin) >/dev/null; then \
read -p "${bin} not found. Do you want to install it? (y/n) " answer; \ echo "Command $(bin) not found! Installing..."; \
if [ "$$answer" = "y" ]; then \ go install -v github.com/projectdiscovery/yamldoc-go/cmd/docgen/$(bin)@latest; \
echo "Installing ${bin}..."; \
go get -v github.com/projectdiscovery/yamldoc-go/cmd/docgen/$(bin); \
go install -v github.com/projectdiscovery/yamldoc-go/cmd/docgen/$(bin); \
else \
echo "Please install ${bin} manually."; \
exit 1; \
fi \
fi fi
# TODO: FIX THIS PANIC
# TODO: Handle the panic, so that we just need to run `go install $(bin)@latest` (line 51-52)
$(GOCMD) generate pkg/templates/templates.go $(GOCMD) generate pkg/templates/templates.go
$(GOBUILD) -o "${GOBUILD_OUTPUT}" $(GOBUILD_PACKAGES) $(GOBUILD) -o "${GOBUILD_OUTPUT}" $(GOBUILD_PACKAGES)
./$(GOBUILD_OUTPUT) docs.md nuclei-jsonschema.json
git reset --hard # line 59 docs: docgen
docs:
./bin/docgen docs.md nuclei-jsonschema.json
syntax-docs: docgen
syntax-docs:
./bin/docgen SYNTAX-REFERENCE.md nuclei-jsonschema.json
test: GOFLAGS = -race -v test: GOFLAGS = -race -v
test: test:
@ -78,30 +75,36 @@ functional:
tidy: tidy:
$(GOMOD) tidy $(GOMOD) tidy
verify: tidy download:
$(GOMOD) download
verify: download
$(GOMOD) verify $(GOMOD) verify
devtools-bindgen: GOBUILD_OUTPUT = bindgen vet: verify
$(GOCMD) vet ./...
devtools-bindgen: GOBUILD_OUTPUT = ./bin/bindgen
devtools-bindgen: GOBUILD_PACKAGES = pkg/js/devtools/bindgen/cmd/bindgen/main.go devtools-bindgen: GOBUILD_PACKAGES = pkg/js/devtools/bindgen/cmd/bindgen/main.go
devtools-bindgen: go-build devtools-bindgen: go-build
devtools-tsgen: GOBUILD_OUTPUT = tsgen devtools-tsgen: GOBUILD_OUTPUT = ./bin/tsgen
devtools-tsgen: GOBUILD_PACKAGES = pkg/js/devtools/tsgen/cmd/tsgen/main.go devtools-tsgen: GOBUILD_PACKAGES = pkg/js/devtools/tsgen/cmd/tsgen/main.go
devtools-tsgen: go-build devtools-tsgen: go-build
devtools-scrapefuncs: GOBUILD_OUTPUT = scrapefuncs devtools-scrapefuncs: GOBUILD_OUTPUT = ./bin/scrapefuncs
devtools-scrapefuncs: GOBUILD_PACKAGES = pkg/js/devtools/scrapefuncs/main.go devtools-scrapefuncs: GOBUILD_PACKAGES = pkg/js/devtools/scrapefuncs/main.go
devtools-scrapefuncs: go-build devtools-scrapefuncs: go-build
devtools-all: devtools-bindgen devtools-tsgen devtools-scrapefuncs devtools-all: devtools-bindgen devtools-tsgen devtools-scrapefuncs
jsupdate-bindgen: GOBUILD_OUTPUT = bindgen jsupdate-bindgen: GOBUILD_OUTPUT = ./bin/bindgen
jsupdate-bindgen: GOBUILD_PACKAGES = pkg/js/devtools/bindgen/cmd/bindgen/main.go jsupdate-bindgen: GOBUILD_PACKAGES = pkg/js/devtools/bindgen/cmd/bindgen/main.go
jsupdate-bindgen: go-build jsupdate-bindgen: go-build
jsupdate-bindgen: jsupdate-bindgen:
./$(GOBUILD_OUTPUT) -dir pkg/js/libs -out pkg/js/generated ./$(GOBUILD_OUTPUT) -dir pkg/js/libs -out pkg/js/generated
jsupdate-tsgen: GOBUILD_OUTPUT = tsgen jsupdate-tsgen: GOBUILD_OUTPUT = ./bin/tsgen
jsupdate-tsgen: GOBUILD_PACKAGES = pkg/js/devtools/tsgen/cmd/tsgen/main.go jsupdate-tsgen: GOBUILD_PACKAGES = pkg/js/devtools/tsgen/cmd/tsgen/main.go
jsupdate-tsgen: go-build jsupdate-tsgen: go-build
jsupdate-tsgen: jsupdate-tsgen:
@ -111,18 +114,24 @@ jsupdate-all: jsupdate-bindgen jsupdate-tsgen
ts: jsupdate-tsgen ts: jsupdate-tsgen
fuzzplayground: GOBUILD_OUTPUT = fuzzplayground fuzzplayground: GOBUILD_OUTPUT = ./bin/fuzzplayground
fuzzplayground: GOBUILD_PACKAGES = cmd/tools/fuzzplayground/main.go fuzzplayground: GOBUILD_PACKAGES = cmd/tools/fuzzplayground/main.go
fuzzplayground: LDFLAGS = -s -w fuzzplayground: LDFLAGS = -s -w
fuzzplayground: go-build fuzzplayground: go-build
memogen: GOBUILD_OUTPUT = memogen memogen: GOBUILD_OUTPUT = ./bin/memogen
memogen: GOBUILD_PACKAGES = cmd/memogen/memogen.go memogen: GOBUILD_PACKAGES = cmd/memogen/memogen.go
memogen: go-build memogen: go-build
memogen: memogen:
./$(GOBUILD_OUTPUT) -src pkg/js/libs -tpl cmd/memogen/function.tpl ./$(GOBUILD_OUTPUT) -src pkg/js/libs -tpl cmd/memogen/function.tpl
dsl-docs: GOBUILD_OUTPUT = scrapefuncs dsl-docs: GOBUILD_OUTPUT = ./bin/scrapefuncs
dsl-docs: GOBUILD_PACKAGES = pkg/js/devtools/scrapefuncs/main.go dsl-docs: GOBUILD_PACKAGES = pkg/js/devtools/scrapefuncs/main.go
dsl-docs: dsl-docs:
./$(GOBUILD_OUTPUT) -out dsl.md ./$(GOBUILD_OUTPUT) -out dsl.md
template-validate: build
template-validate:
./bin/nuclei -ut
./bin/nuclei -validate
./bin/nuclei -validate -w workflows

View File

@ -1373,6 +1373,19 @@ Fuzzing describes schema to fuzz http requests
<div class="dd"> <div class="dd">
<code>analyzer</code> <i><a href="#analyzersanalyzertemplate">analyzers.AnalyzerTemplate</a></i>
</div>
<div class="dt">
Analyzer is an analyzer to use for matching the response.
</div>
<hr />
<div class="dd">
<code>self-contained</code> <i>bool</i> <code>self-contained</code> <i>bool</i>
</div> </div>
@ -2025,6 +2038,59 @@ Appears in:
## analyzers.AnalyzerTemplate
AnalyzerTemplate is the template for the analyzer
Appears in:
- <code><a href="#httprequest">http.Request</a>.analyzer</code>
<hr />
<div class="dd">
<code>name</code> <i>string</i>
</div>
<div class="dt">
Name is the name of the analyzer to use
Valid values:
- <code>time_delay</code>
</div>
<hr />
<div class="dd">
<code>parameters</code> <i>map[string]interface{}</i>
</div>
<div class="dt">
Parameters is the parameters for the analyzer
Parameters are different for each analyzer. For example, you can customize
time_delay analyzer with sleep_duration, time_slope_error_range, etc. Refer
to the docs for each analyzer to get an idea about parameters.
</div>
<hr />
## SignatureTypeHolder ## SignatureTypeHolder
SignatureTypeHolder is used to hold internal type of the signature SignatureTypeHolder is used to hold internal type of the signature

View File

@ -8,6 +8,7 @@ import (
"os" "os"
"strings" "strings"
"github.com/kitabisa/go-ci"
"github.com/logrusorgru/aurora" "github.com/logrusorgru/aurora"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -17,7 +18,6 @@ import (
var ( var (
success = aurora.Green("[✓]").String() success = aurora.Green("[✓]").String()
failed = aurora.Red("[✘]").String() failed = aurora.Red("[✘]").String()
githubAction = os.Getenv("GH_ACTION") == "true"
mainNucleiBinary = flag.String("main", "", "Main Branch Nuclei Binary") mainNucleiBinary = flag.String("main", "", "Main Branch Nuclei Binary")
devNucleiBinary = flag.String("dev", "", "Dev Branch Nuclei Binary") devNucleiBinary = flag.String("dev", "", "Dev Branch Nuclei Binary")
@ -45,7 +45,7 @@ func runFunctionalTests(debug bool) (error, bool) {
errored, failedTestCases := runTestCases(file, debug) errored, failedTestCases := runTestCases(file, debug)
if githubAction { if ci.IsCI() {
fmt.Println("::group::Failed tests with debug") fmt.Println("::group::Failed tests with debug")
for _, failedTestCase := range failedTestCases { for _, failedTestCase := range failedTestCases {
_ = runTestCase(failedTestCase, true) _ = runTestCase(failedTestCase, true)

View File

@ -99,7 +99,7 @@ type codePreCondition struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *codePreCondition) Execute(filePath string) error { func (h *codePreCondition) Execute(filePath string) error {
results, err := testutils.RunNucleiArgsWithEnvAndGetResults(debug, getEnvValues(), "-t", filePath, "-u", "input", "-code") results, err := testutils.RunNucleiArgsWithEnvAndGetResults(debug, getEnvValues(), "-t", filePath, "-u", "input", "-code", "-esc")
if err != nil { if err != nil {
return err return err
} }

View File

@ -15,7 +15,7 @@ type fileWithOrMatcher struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *fileWithOrMatcher) Execute(filePath string) error { func (h *fileWithOrMatcher) Execute(filePath string) error {
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "protocols/file/data/", debug) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "protocols/file/data/", debug, "-file")
if err != nil { if err != nil {
return err return err
} }
@ -27,7 +27,7 @@ type fileWithAndMatcher struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *fileWithAndMatcher) Execute(filePath string) error { func (h *fileWithAndMatcher) Execute(filePath string) error {
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "protocols/file/data/", debug) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "protocols/file/data/", debug, "-file")
if err != nil { if err != nil {
return err return err
} }
@ -39,7 +39,7 @@ type fileWithExtractor struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *fileWithExtractor) Execute(filePath string) error { func (h *fileWithExtractor) Execute(filePath string) error {
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "protocols/file/data/", debug) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "protocols/file/data/", debug, "-file")
if err != nil { if err != nil {
return err return err
} }

View File

@ -22,7 +22,7 @@ var flowTestcases = []TestCaseInfo{
type conditionalFlow struct{} type conditionalFlow struct{}
func (t *conditionalFlow) Execute(filePath string) error { func (t *conditionalFlow) Execute(filePath string) error {
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "blog.projectdiscovery.io", debug) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "cloud.projectdiscovery.io", debug)
if err != nil { if err != nil {
return err return err
} }

View File

@ -155,7 +155,7 @@ func (h *httpInteractshRequest) Execute(filePath string) error {
return err return err
} }
return expectResultsCount(results, 1) return expectResultsCount(results, 1, 2)
} }
type httpDefaultMatcherCondition struct{} type httpDefaultMatcherCondition struct{}
@ -952,7 +952,7 @@ func (h *httpRequestSelfContained) Execute(filePath string) error {
}() }()
defer server.Close() defer server.Close()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil { if err != nil {
return err return err
} }
@ -988,7 +988,7 @@ func (h *httpRequestSelfContainedWithParams) Execute(filePath string) error {
}() }()
defer server.Close() defer server.Close()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil { if err != nil {
return err return err
} }
@ -1031,7 +1031,7 @@ func (h *httpRequestSelfContainedFileInput) Execute(filePath string) error {
} }
defer FileLoc.Close() defer FileLoc.Close()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-V", "test="+FileLoc.Name()) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-V", "test="+FileLoc.Name(), "-esc")
if err != nil { if err != nil {
return err return err
} }

View File

@ -7,6 +7,7 @@ import (
"runtime" "runtime"
"strings" "strings"
"github.com/kitabisa/go-ci"
"github.com/logrusorgru/aurora" "github.com/logrusorgru/aurora"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
@ -23,7 +24,6 @@ type TestCaseInfo struct {
var ( var (
debug = os.Getenv("DEBUG") == "true" debug = os.Getenv("DEBUG") == "true"
githubAction = os.Getenv("GH_ACTION") == "true"
customTests = os.Getenv("TESTS") customTests = os.Getenv("TESTS")
protocol = os.Getenv("PROTO") protocol = os.Getenv("PROTO")
@ -103,12 +103,20 @@ func main() {
failedTestTemplatePaths := runTests(customTestsList) failedTestTemplatePaths := runTests(customTestsList)
if len(failedTestTemplatePaths) > 0 { if len(failedTestTemplatePaths) > 0 {
if githubAction { if ci.IsCI() {
// run failed tests again assuming they are flaky
// if they fail as well only then we assume that there is an actual issue
fmt.Println("::group::Running failed tests again")
failedTestTemplatePaths = runTests(failedTestTemplatePaths)
fmt.Println("::endgroup::")
if len(failedTestTemplatePaths) > 0 {
debug = true debug = true
fmt.Println("::group::Failed integration tests in debug mode") fmt.Println("::group::Failed integration tests in debug mode")
_ = runTests(failedTestTemplatePaths) _ = runTests(failedTestTemplatePaths)
fmt.Println("::endgroup::") fmt.Println("::endgroup::")
} }
}
os.Exit(1) os.Exit(1)
} }

View File

@ -14,7 +14,7 @@ type multiProtoDynamicExtractor struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *multiProtoDynamicExtractor) Execute(templatePath string) error { func (h *multiProtoDynamicExtractor) Execute(templatePath string) error {
results, err := testutils.RunNucleiTemplateAndGetResults(templatePath, "blog.projectdiscovery.io", debug) results, err := testutils.RunNucleiTemplateAndGetResults(templatePath, "docs.projectdiscovery.io", debug)
if err != nil { if err != nil {
return err return err
} }

View File

@ -119,7 +119,7 @@ func (h *networkRequestSelContained) Execute(filePath string) error {
_, _ = conn.Write([]byte("Authentication successful")) _, _ = conn.Write([]byte("Authentication successful"))
}) })
defer ts.Close() defer ts.Close()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil { if err != nil {
return err return err
} }

View File

@ -263,6 +263,8 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.BoolVar(&options.SignTemplates, "sign", false, "signs the templates with the private key defined in NUCLEI_SIGNATURE_PRIVATE_KEY env variable"), flagSet.BoolVar(&options.SignTemplates, "sign", false, "signs the templates with the private key defined in NUCLEI_SIGNATURE_PRIVATE_KEY env variable"),
flagSet.BoolVar(&options.EnableCodeTemplates, "code", false, "enable loading code protocol-based templates"), flagSet.BoolVar(&options.EnableCodeTemplates, "code", false, "enable loading code protocol-based templates"),
flagSet.BoolVarP(&options.DisableUnsignedTemplates, "disable-unsigned-templates", "dut", false, "disable running unsigned templates or templates with mismatched signature"), flagSet.BoolVarP(&options.DisableUnsignedTemplates, "disable-unsigned-templates", "dut", false, "disable running unsigned templates or templates with mismatched signature"),
flagSet.BoolVarP(&options.EnableSelfContainedTemplates, "enable-self-contained", "esc", false, "enable loading self-contained templates"),
flagSet.BoolVar(&options.EnableFileTemplates, "file", false, "enable loading file templates"),
) )
flagSet.CreateGroup("filters", "Filtering", flagSet.CreateGroup("filters", "Filtering",
@ -492,6 +494,11 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
options.DAST = true options.DAST = true
} }
// All cloud-based templates depend on both code and self-contained templates.
if options.EnableCodeTemplates {
options.EnableSelfContainedTemplates = true
}
// api key hierarchy: cli flag > env var > .pdcp/credential file // api key hierarchy: cli flag > env var > .pdcp/credential file
if pdcpauth == "true" { if pdcpauth == "true" {
runner.AuthWithPDCP() runner.AuthWithPDCP()

View File

@ -69,6 +69,7 @@ func init() {
// need to set headless to true for headless templates // need to set headless to true for headless templates
defaultOpts.Headless = true defaultOpts.Headless = true
defaultOpts.EnableCodeTemplates = true defaultOpts.EnableCodeTemplates = true
defaultOpts.EnableSelfContainedTemplates = true
if err := protocolstate.Init(defaultOpts); err != nil { if err := protocolstate.Init(defaultOpts); err != nil {
gologger.Fatal().Msgf("Could not initialize protocol state: %s\n", err) gologger.Fatal().Msgf("Could not initialize protocol state: %s\n", err)
} }

42
go.mod
View File

@ -21,11 +21,11 @@ require (
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/projectdiscovery/clistats v0.1.1 github.com/projectdiscovery/clistats v0.1.1
github.com/projectdiscovery/fastdialer v0.2.9 github.com/projectdiscovery/fastdialer v0.2.9
github.com/projectdiscovery/hmap v0.0.59 github.com/projectdiscovery/hmap v0.0.67
github.com/projectdiscovery/interactsh v1.2.0 github.com/projectdiscovery/interactsh v1.2.0
github.com/projectdiscovery/rawhttp v0.1.67 github.com/projectdiscovery/rawhttp v0.1.74
github.com/projectdiscovery/retryabledns v1.0.77 github.com/projectdiscovery/retryabledns v1.0.85
github.com/projectdiscovery/retryablehttp-go v1.0.78 github.com/projectdiscovery/retryablehttp-go v1.0.86
github.com/projectdiscovery/yamldoc-go v1.0.4 github.com/projectdiscovery/yamldoc-go v1.0.4
github.com/remeh/sizedwaitgroup v1.0.0 github.com/remeh/sizedwaitgroup v1.0.0
github.com/rs/xid v1.5.0 github.com/rs/xid v1.5.0
@ -38,9 +38,9 @@ require (
github.com/weppos/publicsuffix-go v0.30.2 github.com/weppos/publicsuffix-go v0.30.2
github.com/xanzy/go-gitlab v0.107.0 github.com/xanzy/go-gitlab v0.107.0
go.uber.org/multierr v1.11.0 go.uber.org/multierr v1.11.0
golang.org/x/net v0.29.0 golang.org/x/net v0.30.0
golang.org/x/oauth2 v0.22.0 golang.org/x/oauth2 v0.22.0
golang.org/x/text v0.18.0 golang.org/x/text v0.19.0
gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v2 v2.4.0
) )
@ -81,24 +81,24 @@ require (
github.com/microsoft/go-mssqldb v1.6.0 github.com/microsoft/go-mssqldb v1.6.0
github.com/ory/dockertest/v3 v3.10.0 github.com/ory/dockertest/v3 v3.10.0
github.com/praetorian-inc/fingerprintx v1.1.9 github.com/praetorian-inc/fingerprintx v1.1.9
github.com/projectdiscovery/dsl v0.2.1 github.com/projectdiscovery/dsl v0.3.3
github.com/projectdiscovery/fasttemplate v0.0.2 github.com/projectdiscovery/fasttemplate v0.0.2
github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb
github.com/projectdiscovery/goflags v0.1.64 github.com/projectdiscovery/goflags v0.1.65
github.com/projectdiscovery/gologger v1.1.24 github.com/projectdiscovery/gologger v1.1.31
github.com/projectdiscovery/gostruct v0.0.2 github.com/projectdiscovery/gostruct v0.0.2
github.com/projectdiscovery/gozero v0.0.2 github.com/projectdiscovery/gozero v0.0.3
github.com/projectdiscovery/httpx v1.6.8 github.com/projectdiscovery/httpx v1.6.9
github.com/projectdiscovery/mapcidr v1.1.34 github.com/projectdiscovery/mapcidr v1.1.34
github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5
github.com/projectdiscovery/ratelimit v0.0.56 github.com/projectdiscovery/ratelimit v0.0.61
github.com/projectdiscovery/rdap v0.9.1-0.20221108103045-9865884d1917 github.com/projectdiscovery/rdap v0.9.1-0.20221108103045-9865884d1917
github.com/projectdiscovery/sarif v0.0.1 github.com/projectdiscovery/sarif v0.0.1
github.com/projectdiscovery/tlsx v1.1.7 github.com/projectdiscovery/tlsx v1.1.8
github.com/projectdiscovery/uncover v1.0.9 github.com/projectdiscovery/uncover v1.0.9
github.com/projectdiscovery/useragent v0.0.71 github.com/projectdiscovery/useragent v0.0.78
github.com/projectdiscovery/utils v0.2.11 github.com/projectdiscovery/utils v0.2.18
github.com/projectdiscovery/wappalyzergo v0.1.18 github.com/projectdiscovery/wappalyzergo v0.2.2
github.com/redis/go-redis/v9 v9.1.0 github.com/redis/go-redis/v9 v9.1.0
github.com/seh-msft/burpxml v1.0.1 github.com/seh-msft/burpxml v1.0.1
github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466 github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466
@ -107,7 +107,7 @@ require (
github.com/yassinebenaid/godump v0.10.0 github.com/yassinebenaid/godump v0.10.0
github.com/zmap/zgrab2 v0.1.8-0.20230806160807-97ba87c0e706 github.com/zmap/zgrab2 v0.1.8-0.20230806160807-97ba87c0e706
go.mongodb.org/mongo-driver v1.17.0 go.mongodb.org/mongo-driver v1.17.0
golang.org/x/term v0.24.0 golang.org/x/term v0.25.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
moul.io/http2curl v1.0.0 moul.io/http2curl v1.0.0
) )
@ -210,7 +210,7 @@ require (
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/projectdiscovery/asnmap v1.1.1 // indirect github.com/projectdiscovery/asnmap v1.1.1 // indirect
github.com/projectdiscovery/cdncheck v1.1.0 // indirect github.com/projectdiscovery/cdncheck v1.1.0 // indirect
github.com/projectdiscovery/freeport v0.0.6 // indirect github.com/projectdiscovery/freeport v0.0.7 // indirect
github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect
github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect
github.com/refraction-networking/utls v1.6.7 // indirect github.com/refraction-networking/utls v1.6.7 // indirect
@ -272,7 +272,7 @@ require (
github.com/goburrow/cache v0.1.4 // indirect github.com/goburrow/cache v0.1.4 // indirect
github.com/gobwas/httphead v0.1.0 // indirect github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect github.com/gobwas/pool v0.2.1 // indirect
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect github.com/golang-jwt/jwt/v4 v4.5.1 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/snappy v0.0.4 // indirect github.com/golang/snappy v0.0.4 // indirect
github.com/google/go-querystring v1.1.0 // indirect github.com/google/go-querystring v1.1.0 // indirect
@ -313,10 +313,10 @@ require (
go.etcd.io/bbolt v1.3.10 // indirect go.etcd.io/bbolt v1.3.10 // indirect
go.uber.org/zap v1.25.0 // indirect go.uber.org/zap v1.25.0 // indirect
goftp.io/server/v2 v2.0.1 // indirect goftp.io/server/v2 v2.0.1 // indirect
golang.org/x/crypto v0.27.0 // indirect golang.org/x/crypto v0.28.0 // indirect
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842
golang.org/x/mod v0.17.0 // indirect golang.org/x/mod v0.17.0 // indirect
golang.org/x/sys v0.25.0 // indirect golang.org/x/sys v0.26.0 // indirect
golang.org/x/time v0.6.0 // indirect golang.org/x/time v0.6.0 // indirect
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d
google.golang.org/protobuf v1.34.2 // indirect google.golang.org/protobuf v1.34.2 // indirect

84
go.sum
View File

@ -427,8 +427,8 @@ github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
@ -845,28 +845,28 @@ github.com/projectdiscovery/cdncheck v1.1.0 h1:qDITidmJsejzpk3rMkauCh6sjI2GH9hW/
github.com/projectdiscovery/cdncheck v1.1.0/go.mod h1:sZ8U4MjHSsyaTVjBbYWHT1cwUVvUYwDX1W+WvWRicIc= github.com/projectdiscovery/cdncheck v1.1.0/go.mod h1:sZ8U4MjHSsyaTVjBbYWHT1cwUVvUYwDX1W+WvWRicIc=
github.com/projectdiscovery/clistats v0.1.1 h1:8mwbdbwTU4aT88TJvwIzTpiNeow3XnAB72JIg66c8wE= github.com/projectdiscovery/clistats v0.1.1 h1:8mwbdbwTU4aT88TJvwIzTpiNeow3XnAB72JIg66c8wE=
github.com/projectdiscovery/clistats v0.1.1/go.mod h1:4LtTC9Oy//RiuT1+76MfTg8Hqs7FQp1JIGBM3nHK6a0= github.com/projectdiscovery/clistats v0.1.1/go.mod h1:4LtTC9Oy//RiuT1+76MfTg8Hqs7FQp1JIGBM3nHK6a0=
github.com/projectdiscovery/dsl v0.2.1 h1:TK3KD4jsg4YbvY7WJqnz1QyH4AOvAwezeBFOX97Evgk= github.com/projectdiscovery/dsl v0.3.3 h1:4Ij5S86cHlb6xFrS7+5zAiJPeBt5h970XBTHqeTkpyU=
github.com/projectdiscovery/dsl v0.2.1/go.mod h1:IRQXsmi5/g1dDZ79//A9t2vrRtxm4frRSd5t8CZVSbI= github.com/projectdiscovery/dsl v0.3.3/go.mod h1:DAjSeaogLM9f0Ves2zDc/vbJrfcv+kEmS51p0dLLaPI=
github.com/projectdiscovery/fastdialer v0.2.9 h1:vDCqxVMCyUu3oVEizEK1K8K+CCcLkVDW3X2HfiWaVFA= github.com/projectdiscovery/fastdialer v0.2.9 h1:vDCqxVMCyUu3oVEizEK1K8K+CCcLkVDW3X2HfiWaVFA=
github.com/projectdiscovery/fastdialer v0.2.9/go.mod h1:mYv5QaNBDDSHlZO9DI0niRMw+G5hUzwIhs8QixSElUI= github.com/projectdiscovery/fastdialer v0.2.9/go.mod h1:mYv5QaNBDDSHlZO9DI0niRMw+G5hUzwIhs8QixSElUI=
github.com/projectdiscovery/fasttemplate v0.0.2 h1:h2cISk5xDhlJEinlBQS6RRx0vOlOirB2y3Yu4PJzpiA= github.com/projectdiscovery/fasttemplate v0.0.2 h1:h2cISk5xDhlJEinlBQS6RRx0vOlOirB2y3Yu4PJzpiA=
github.com/projectdiscovery/fasttemplate v0.0.2/go.mod h1:XYWWVMxnItd+r0GbjA1GCsUopMw1/XusuQxdyAIHMCw= github.com/projectdiscovery/fasttemplate v0.0.2/go.mod h1:XYWWVMxnItd+r0GbjA1GCsUopMw1/XusuQxdyAIHMCw=
github.com/projectdiscovery/freeport v0.0.6 h1:ROqzuXN8JPqkGdBueb3ah691nS2g2p7r3/3x2E33GbI= github.com/projectdiscovery/freeport v0.0.7 h1:Q6uXo/j8SaV/GlAHkEYQi8WQoPXyJWxyspx+aFmz9Qk=
github.com/projectdiscovery/freeport v0.0.6/go.mod h1:T2kIy+WrbyxBIhI8V3Y9aeNGnuhnM8tEUSK/cm9GjAg= github.com/projectdiscovery/freeport v0.0.7/go.mod h1:cOhWKvNBe9xM6dFJ3RrrLvJ5vXx2NQ36SecuwjenV2k=
github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb h1:rutG906Drtbpz4DwU5mhGIeOhRcktDH4cGQitGUMAsg= github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb h1:rutG906Drtbpz4DwU5mhGIeOhRcktDH4cGQitGUMAsg=
github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb/go.mod h1:FLjF1DmZ+POoGEiIQdWuYVwS++C/GwpX8YaCsTSm1RY= github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb/go.mod h1:FLjF1DmZ+POoGEiIQdWuYVwS++C/GwpX8YaCsTSm1RY=
github.com/projectdiscovery/goflags v0.1.64 h1:FDfwdt9N97Hi8OuhbkDlKtVttpc/CRMIWQVa08VsHsI= github.com/projectdiscovery/goflags v0.1.65 h1:rjoj+5lP/FDzgeM0WILUTX9AOOnw0J0LXtl8P1SVeGE=
github.com/projectdiscovery/goflags v0.1.64/go.mod h1:3FyHIVQtnycNOc1LE3O1jj/XR5XuMdF9QfHd0ujhnX4= github.com/projectdiscovery/goflags v0.1.65/go.mod h1:cg6+yrLlaekP1hnefBc/UXbH1YGWa0fuzEW9iS1aG4g=
github.com/projectdiscovery/gologger v1.1.24 h1:TmA4k9sA6ZvfyRnprZKQ0Uq34g//u5R9yTDPL9IzTOQ= github.com/projectdiscovery/gologger v1.1.31 h1:FlZi1RsDoRtOkj9+a1PhcOmwD3NdRpDyjp/0/fmpQ/s=
github.com/projectdiscovery/gologger v1.1.24/go.mod h1:JA0JMJ+ply+J2wD062TN4h85thm6/28jAlrntwccKVU= github.com/projectdiscovery/gologger v1.1.31/go.mod h1:zVbkxOmWuh1GEyr6dviEPNwH/GMWdnJrMUSOJbRmDqI=
github.com/projectdiscovery/gostruct v0.0.2 h1:s8gP8ApugGM4go1pA+sVlPDXaWqNP5BBDDSv7VEdG1M= github.com/projectdiscovery/gostruct v0.0.2 h1:s8gP8ApugGM4go1pA+sVlPDXaWqNP5BBDDSv7VEdG1M=
github.com/projectdiscovery/gostruct v0.0.2/go.mod h1:H86peL4HKwMXcQQtEa6lmC8FuD9XFt6gkNR0B/Mu5PE= github.com/projectdiscovery/gostruct v0.0.2/go.mod h1:H86peL4HKwMXcQQtEa6lmC8FuD9XFt6gkNR0B/Mu5PE=
github.com/projectdiscovery/gozero v0.0.2 h1:8fJeaCjxL9tpm33uG/RsCQs6HGM/NE6eA3cjkilRQ+E= github.com/projectdiscovery/gozero v0.0.3 h1:tsYkrSvWw4WdIUJyisd4MB1vRiw1X57TuVVk3p8Z3G8=
github.com/projectdiscovery/gozero v0.0.2/go.mod h1:d8bZvDWW07LWNYWrwjZ4OO1I0cpkfqaysyDfSs9ibK8= github.com/projectdiscovery/gozero v0.0.3/go.mod h1:MpJ37Dsh94gy2EKqaemdeh+CzduGVB2SDfhr6Upsjew=
github.com/projectdiscovery/hmap v0.0.59 h1:xWCr/GY2QJanFzwKydh/EkGdOKM4iAcN9hQvvCMgO6A= github.com/projectdiscovery/hmap v0.0.67 h1:PG09AyXH6mchdZCdxAS7WkZz0xxsOsIxJOmEixEmnzI=
github.com/projectdiscovery/hmap v0.0.59/go.mod h1:uHhhnPmvq9qXvCjBSQXCBAlmA1r8JGufP775IkBSbgs= github.com/projectdiscovery/hmap v0.0.67/go.mod h1:WxK8i2J+wcdimIXCgpYzfj9gKxCqRqOM4KENDRzGgAA=
github.com/projectdiscovery/httpx v1.6.8 h1:k0Y5g3ue/7QbDP0+LykIxp/VhPDLfau3UEUyuxtP7qE= github.com/projectdiscovery/httpx v1.6.9 h1:ihyFclesLjvQpiJpRIlAYeebapyIbOI/arDAvvy1ES8=
github.com/projectdiscovery/httpx v1.6.8/go.mod h1:7BIsDxyRwkBjthqFmEajXrA5f3yb4tlVfLmpNdf0ZXA= github.com/projectdiscovery/httpx v1.6.9/go.mod h1:zQtX5CtcDYXzIRWne1ztCVtqG0sXCnx84tFwfMHoB8Q=
github.com/projectdiscovery/interactsh v1.2.0 h1:Al6jHiR+Usl9egYJDLJaWNHOcH8Rugk8gWMasc8Cmw8= github.com/projectdiscovery/interactsh v1.2.0 h1:Al6jHiR+Usl9egYJDLJaWNHOcH8Rugk8gWMasc8Cmw8=
github.com/projectdiscovery/interactsh v1.2.0/go.mod h1:Wxt0fnzxsfrAZQQlpVrf3xMatP4OXZaZbjuDkIQKdYY= github.com/projectdiscovery/interactsh v1.2.0/go.mod h1:Wxt0fnzxsfrAZQQlpVrf3xMatP4OXZaZbjuDkIQKdYY=
github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb h1:MGtI4oE12ruWv11ZlPXXd7hl/uAaQZrFvrIDYDeVMd8= github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb h1:MGtI4oE12ruWv11ZlPXXd7hl/uAaQZrFvrIDYDeVMd8=
@ -879,30 +879,30 @@ github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 h1:L/e8z8yw
github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5/go.mod h1:pGW2ncnTxTxHtP9wzcIJAB+3/NMp6IiuQWd2NK7K+oc= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5/go.mod h1:pGW2ncnTxTxHtP9wzcIJAB+3/NMp6IiuQWd2NK7K+oc=
github.com/projectdiscovery/networkpolicy v0.0.9 h1:IrlDoYZagNNO8y+7iZeHT8k5izE+nek7TdtvEBwCxqk= github.com/projectdiscovery/networkpolicy v0.0.9 h1:IrlDoYZagNNO8y+7iZeHT8k5izE+nek7TdtvEBwCxqk=
github.com/projectdiscovery/networkpolicy v0.0.9/go.mod h1:XFJ2Lnv8BE/ziQCFjBHMsH1w6VmkPiQtk+NlBpdMU7M= github.com/projectdiscovery/networkpolicy v0.0.9/go.mod h1:XFJ2Lnv8BE/ziQCFjBHMsH1w6VmkPiQtk+NlBpdMU7M=
github.com/projectdiscovery/ratelimit v0.0.56 h1:WliU7NvfMb5hK/IJjOFlWIXU1G7+QRylMhSybaSCTI8= github.com/projectdiscovery/ratelimit v0.0.61 h1:n9PD4Z4Y6cLeT2rn9IiOAA0I/kIZE/D7z7z5X/WQds8=
github.com/projectdiscovery/ratelimit v0.0.56/go.mod h1:GbnAo+MbB4R/4jiOI1mH4KAJfovmrPnq4NElcI99fvs= github.com/projectdiscovery/ratelimit v0.0.61/go.mod h1:u7DxBBcUzFg4Cb2s5yabmtCMJs+ojulNpNrSLtftoKg=
github.com/projectdiscovery/rawhttp v0.1.67 h1:HYzathMk3c8Y83hYjHM4GCBFbz/G+vABe0Lz6ajaowY= github.com/projectdiscovery/rawhttp v0.1.74 h1:ahE23GwPyFDBSofmo92MuW439P4x20GBYwOFqejY5G8=
github.com/projectdiscovery/rawhttp v0.1.67/go.mod h1:5viJ6odzc9ZuEFppj/E7HdX+u99FoYlvXnhHyTNc7N0= github.com/projectdiscovery/rawhttp v0.1.74/go.mod h1:xEqBY17CHgGmMfuLOWYntjFQ9crb4PG1xoNgexcAq4g=
github.com/projectdiscovery/rdap v0.9.1-0.20221108103045-9865884d1917 h1:m03X4gBVSorSzvmm0bFa7gDV4QNSOWPL/fgZ4kTXBxk= github.com/projectdiscovery/rdap v0.9.1-0.20221108103045-9865884d1917 h1:m03X4gBVSorSzvmm0bFa7gDV4QNSOWPL/fgZ4kTXBxk=
github.com/projectdiscovery/rdap v0.9.1-0.20221108103045-9865884d1917/go.mod h1:JxXtZC9e195awe7EynrcnBJmFoad/BNDzW9mzFkK8Sg= github.com/projectdiscovery/rdap v0.9.1-0.20221108103045-9865884d1917/go.mod h1:JxXtZC9e195awe7EynrcnBJmFoad/BNDzW9mzFkK8Sg=
github.com/projectdiscovery/retryabledns v1.0.77 h1:rCFSiTPNI7h9PP1uUvrmcq/6XVZVdxpha2H1ioArKpk= github.com/projectdiscovery/retryabledns v1.0.85 h1:9aLPWu0bcmtK8bPm/JJyfts28hgWf74UPsSG0KMXrqo=
github.com/projectdiscovery/retryabledns v1.0.77/go.mod h1:ce1JTjAOxLujqITtA5VLlbhLRVubx+GETYasivapai4= github.com/projectdiscovery/retryabledns v1.0.85/go.mod h1:cZe0rydjby+ns2oIY7JmywHvtkwWxPzp3PuQz1rV50E=
github.com/projectdiscovery/retryablehttp-go v1.0.78 h1:If7/XjCWk893YrnTMaW69TNMsfE1Er3i1SWOkWbEk4E= github.com/projectdiscovery/retryablehttp-go v1.0.86 h1:r/rqVrT/fSMe6/syIq1FGd8do/vt+Kgca9pFegyHG88=
github.com/projectdiscovery/retryablehttp-go v1.0.78/go.mod h1:NqzTdnGihSRkF9c/aXo+3qTJXEeNwnOuza6GrlPo9qw= github.com/projectdiscovery/retryablehttp-go v1.0.86/go.mod h1:upk8ItKt9hayUp6Z7E60tH314BAnIUQ5y4KS4x9R90g=
github.com/projectdiscovery/sarif v0.0.1 h1:C2Tyj0SGOKbCLgHrx83vaE6YkzXEVrMXYRGLkKCr/us= github.com/projectdiscovery/sarif v0.0.1 h1:C2Tyj0SGOKbCLgHrx83vaE6YkzXEVrMXYRGLkKCr/us=
github.com/projectdiscovery/sarif v0.0.1/go.mod h1:cEYlDu8amcPf6b9dSakcz2nNnJsoz4aR6peERwV+wuQ= github.com/projectdiscovery/sarif v0.0.1/go.mod h1:cEYlDu8amcPf6b9dSakcz2nNnJsoz4aR6peERwV+wuQ=
github.com/projectdiscovery/stringsutil v0.0.2 h1:uzmw3IVLJSMW1kEg8eCStG/cGbYYZAja8BH3LqqJXMA= github.com/projectdiscovery/stringsutil v0.0.2 h1:uzmw3IVLJSMW1kEg8eCStG/cGbYYZAja8BH3LqqJXMA=
github.com/projectdiscovery/stringsutil v0.0.2/go.mod h1:EJ3w6bC5fBYjVou6ryzodQq37D5c6qbAYQpGmAy+DC0= github.com/projectdiscovery/stringsutil v0.0.2/go.mod h1:EJ3w6bC5fBYjVou6ryzodQq37D5c6qbAYQpGmAy+DC0=
github.com/projectdiscovery/tlsx v1.1.7 h1:eSsl/SmTDL/z2CMeSrbssk4f/9oOotMP1SgXl3yynSM= github.com/projectdiscovery/tlsx v1.1.8 h1:Y+VkOp6JmUBb4tci1Fbz9U7ELEQ2irFhm+HS58tHruM=
github.com/projectdiscovery/tlsx v1.1.7/go.mod h1:g66QQ4/y4tLVjoGbzWIv+Q6xwFzxfJbEDx86Y1dYHDM= github.com/projectdiscovery/tlsx v1.1.8/go.mod h1:6u/dbLuMsLzmux58AWnAB24qh2+Trk0auCK2I9B17Vo=
github.com/projectdiscovery/uncover v1.0.9 h1:s5RbkD/V4r8QcPkys4gTTqMuRSgXq0JprejqLSopN9Y= github.com/projectdiscovery/uncover v1.0.9 h1:s5RbkD/V4r8QcPkys4gTTqMuRSgXq0JprejqLSopN9Y=
github.com/projectdiscovery/uncover v1.0.9/go.mod h1:2PUF3SpB5QNIJ8epaB2xbRzkPaxEAWRDm3Ir2ijt81U= github.com/projectdiscovery/uncover v1.0.9/go.mod h1:2PUF3SpB5QNIJ8epaB2xbRzkPaxEAWRDm3Ir2ijt81U=
github.com/projectdiscovery/useragent v0.0.71 h1:Q02L3LV15ztOQ6FfmVSqVmOd5QhvzI+yAgYOc/32Nvg= github.com/projectdiscovery/useragent v0.0.78 h1:YpgiY3qXpzygFA88SWVseAyWeV9ZKrIpDkfOY+mQ/UY=
github.com/projectdiscovery/useragent v0.0.71/go.mod h1:DHPruFLCvCvkd2qqPwwQZrP9sziv0lxQJ0R1rE1fa8E= github.com/projectdiscovery/useragent v0.0.78/go.mod h1:SQgk2DZu1qCvYqBRYWs2sjenXqLEDnRw65wJJoolwZ4=
github.com/projectdiscovery/utils v0.2.11 h1:TO7fBG5QI256sn1YuTD87yn4+4OjGJ2wT1772uEnp4Q= github.com/projectdiscovery/utils v0.2.18 h1:uV5JIYKIq8gXdu9wrCeUq3yqPiSCokTrKuLuZwXMSSw=
github.com/projectdiscovery/utils v0.2.11/go.mod h1:W0E74DWkKxlcyKS5XwcAwiob7+smoszPPi1NgX3vZyk= github.com/projectdiscovery/utils v0.2.18/go.mod h1:gcKxBTK1eNF+K8vzD62sMMVFf1eJoTgEiS81mp7CQjI=
github.com/projectdiscovery/wappalyzergo v0.1.18 h1:fFgETis0HcsNE7wREaUPYP45JqIyHgGorJaVp1RH7g4= github.com/projectdiscovery/wappalyzergo v0.2.2 h1:AQT6+oo++HOcseTFSTa2en08vWv5miE/NgnJlqL1lCQ=
github.com/projectdiscovery/wappalyzergo v0.1.18/go.mod h1:/hzgxkBFTMe2wDbA93nFfoMjULw7/vIZ9QPSAnCgUa8= github.com/projectdiscovery/wappalyzergo v0.2.2/go.mod h1:k3aujwFsLcB24ppzwNE0lYpV3tednKGJVTbk4JgrhmI=
github.com/projectdiscovery/yamldoc-go v1.0.4 h1:eZoESapnMw6WAHiVgRwNqvbJEfNHEH148uthhFbG5jE= github.com/projectdiscovery/yamldoc-go v1.0.4 h1:eZoESapnMw6WAHiVgRwNqvbJEfNHEH148uthhFbG5jE=
github.com/projectdiscovery/yamldoc-go v1.0.4/go.mod h1:8PIPRcUD55UbtQdcfFR1hpIGRWG0P7alClXNGt1TBik= github.com/projectdiscovery/yamldoc-go v1.0.4/go.mod h1:8PIPRcUD55UbtQdcfFR1hpIGRWG0P7alClXNGt1TBik=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
@ -1212,8 +1212,8 @@ golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -1306,8 +1306,8 @@ golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4=
golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -1408,8 +1408,8 @@ golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
@ -1422,8 +1422,8 @@ golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM= golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24=
golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -1440,8 +1440,8 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=

View File

@ -14,7 +14,7 @@ dns:
matchers: matchers:
- type: word - type: word
words: words:
- "ghost.io" - ".vercel-dns.com"
internal: true internal: true
http: http:
@ -25,4 +25,4 @@ http:
matchers: matchers:
- type: word - type: word
words: words:
- "ghost.io" - "html>"

View File

@ -23,4 +23,4 @@ code:
- type: dsl - type: dsl
dsl: dsl:
- true - true
# digest: 4a0a0047304502200307590191cb7c766b6c21e5777d345bdddf7adf9d6da8f7d336d585d9ac4a8b022100fd30fb0c7722778eb3d861d60e721d805925b8d8df2b979ef2104c35ec57d5cb:4a3eb6b4988d95847d4203be25ed1d46 # digest: 490a00463044022048c083c338c0195f5012122d40c1009d2e2030c583e56558e0d6249a41e6f3f4022070656adf748f4874018d7a01fce116db10a3acd1f9b03e12a83906fb625b5c50:4a3eb6b4988d95847d4203be25ed1d46

View File

@ -20,4 +20,4 @@ code:
- type: word - type: word
words: words:
- "hello from input baz" - "hello from input baz"
# digest: 4a0a0047304502203fe1d7d52bc2a41886d576a90c82c3be42078baaa4b46e1f3d8519665d6f88b202210081feb82c41150c5b218e226fc4f299ded19f42ba01ef34ba60b0634b4ea6ee12:4a3eb6b4988d95847d4203be25ed1d46 # digest: 4b0a00483046022100cbbdb7214f669d111b671d271110872dc8af2ab41cf5c312b6e4f64126f55337022100a60547952a0c2bea58388f2d2effe8ad73cd6b6fc92e73eb3c8f88beab6105ec:4a3eb6b4988d95847d4203be25ed1d46

View File

@ -18,4 +18,4 @@ code:
- type: word - type: word
words: words:
- "hello from input" - "hello from input"
# digest: 4b0a00483046022100afb5ebff14a40e7f9b679ffc4d93ce7849e33eb398ebb47f2e757cd24831f9dd02210089ffa21b2763e99ebce95dfc5b91e1e62da4ccdc9d2ad5c48584fa350ba335af:4a3eb6b4988d95847d4203be25ed1d46 # digest: 4a0a00473045022032b81e8bb7475abf27639b0ced71355497166d664698021f26498e7031d62a23022100e99ccde578bfc0b658f16427ae9a3d18922849d3ba3e022032ea0d2a8e77fadb:4a3eb6b4988d95847d4203be25ed1d46

View File

@ -26,4 +26,4 @@ code:
part: interactsh_protocol part: interactsh_protocol
words: words:
- "http" - "http"
# digest: 4b0a00483046022100939f83e74d43932a5bd792b1fd2c100eec2df60f2b2a8dd56b5c8ef5faa92b17022100f93031b0de373af7d78e623968ea5a2d67c4561ef70e3e6da15aef7e5c853115:4a3eb6b4988d95847d4203be25ed1d46 # digest: 4a0a0047304502201a5dd0eddfab4f02588a5a8ac1947a5fa41fed80b59d698ad5cc00456296efb6022100fe6e608e38c060964800f5f863a7cdc93f686f2d0f4b52854f73948b808b4511:4a3eb6b4988d95847d4203be25ed1d46

View File

@ -21,4 +21,4 @@ code:
- type: word - type: word
words: words:
- "hello from input" - "hello from input"
# digest: 4a0a00473045022100b8e676ce0c57b60c233a0203539dec20457bbb5f1790d351a5d45405b6668b2602204b1f2fa18e7db099f05329009597ceb2d9b7337562c1a676e8d50ea2f1c6fcbe:4a3eb6b4988d95847d4203be25ed1d46 # digest: 4b0a00483046022100ced1702728cc68f906c4c7d2c4d05ed071bfabee1e36eec7ebecbeca795a170c022100d20fd41796f130a8f9c4972fee85386d67d61eb5fc1119b1afe2a851eb2f3e65:4a3eb6b4988d95847d4203be25ed1d46

View File

@ -13,7 +13,7 @@ dns:
- type: dsl - type: dsl
name: blogid name: blogid
dsl: dsl:
- trim_suffix(cname,'.ghost.io') - trim_suffix(cname,'.vercel-dns.com')
internal: true internal: true
http: http:
@ -24,6 +24,6 @@ http:
matchers: matchers:
- type: dsl - type: dsl
dsl: dsl:
- contains(body,'ProjectDiscovery.io') # check for http string - contains(body,'introduction') # check for http string
- blogid == 'projectdiscovery' # check for cname (extracted information from dns response) - blogid == 'cname' # check for cname (extracted information from dns response)
condition: and condition: and

View File

@ -7,7 +7,7 @@ info:
variables: variables:
cname_filtered: '{{trim_suffix(dns_cname,".ghost.io")}}' cname_filtered: '{{trim_suffix(dns_cname,".vercel-dns.com")}}'
dns: dns:
- name: "{{FQDN}}" # DNS Request - name: "{{FQDN}}" # DNS Request
@ -24,7 +24,7 @@ http:
matchers: matchers:
- type: dsl - type: dsl
dsl: dsl:
- contains(http_body,'ProjectDiscovery.io') # check for http string - contains(http_body,'introduction') # check for http string
- cname_filtered == 'projectdiscovery' # check for cname (extracted information from dns response) - cname_filtered == 'cname' # check for cname (extracted information from dns response)
- ssl_subject_cn == 'blog.projectdiscovery.io' - ssl_subject_cn == 'docs.projectdiscovery.io'
condition: and condition: and

View File

@ -20,7 +20,7 @@ http:
matchers: matchers:
- type: dsl - type: dsl
dsl: dsl:
- contains(http_body,'ProjectDiscovery.io') # check for http string - contains(http_body,'introduction') # check for http string
- trim_suffix(dns_cname,'.ghost.io') == 'projectdiscovery' # check for cname (extracted information from dns response) - trim_suffix(dns_cname,'.vercel-dns.com') == 'cname' # check for cname (extracted information from dns response)
- ssl_subject_cn == 'blog.projectdiscovery.io' - ssl_subject_cn == 'docs.projectdiscovery.io'
condition: and condition: and

View File

@ -19,4 +19,4 @@ code:
regex: regex:
- 'hello from (.*)' - 'hello from (.*)'
group: 1 group: 1
# digest: 490a00463044022050da011362cf08c2cb81e812c7f86d7282afe0562d4bf00d390f1300d19bc910022029e9d305da69e941ac18797645aecb217abde6557f891e141301b48e89a3c0cd:4a3eb6b4988d95847d4203be25ed1d46 # digest: 490a0046304402206b3648e8d393ac4df82c7d59b1a6ee3731c66c249dbd4d9bf31f0b7f176b37ec02203184d36373e516757c7d708b5799bc16edb1cebc0a64f3442d13ded4b33c42fb:4a3eb6b4988d95847d4203be25ed1d46

View File

@ -18,4 +18,4 @@ code:
- type: word - type: word
words: words:
- "hello from first" - "hello from first"
# digest: 4b0a00483046022100b3b8759c0df028455eb59b1433ac240e5d4604b011bb0c63680bd3cc159ac6f0022100f44aa11b640d11ad0e2902897f4eb51666ab3cd83c31dfd2590f6e43391e39b0:4a3eb6b4988d95847d4203be25ed1d46 # digest: 490a0046304402204cbb1bdf8370e49bb930b17460fb35e15f285a3b48b165736ac0e7ba2f9bc0fb022067c134790c4a2cf646b195aa4488e2c222266436e6bda47931908a28807bdb81:4a3eb6b4988d95847d4203be25ed1d46

View File

@ -3,6 +3,7 @@ package runner
import ( import (
"context" "context"
"fmt" "fmt"
"strings"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider/authx" "github.com/projectdiscovery/nuclei/v3/pkg/authprovider/authx"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog" "github.com/projectdiscovery/nuclei/v3/pkg/catalog"
@ -10,9 +11,12 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/output" "github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols" "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/helpers/writer" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/helpers/writer"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/replacer"
"github.com/projectdiscovery/nuclei/v3/pkg/scan" "github.com/projectdiscovery/nuclei/v3/pkg/scan"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors" errorutil "github.com/projectdiscovery/utils/errors"
) )
@ -75,7 +79,25 @@ func GetLazyAuthFetchCallback(opts *AuthLazyFetchOptions) authx.LazyFetchSecret
vars := map[string]interface{}{} vars := map[string]interface{}{}
mainCtx := context.Background() mainCtx := context.Background()
ctx := scan.NewScanContext(mainCtx, contextargs.NewWithInput(mainCtx, d.Input)) ctx := scan.NewScanContext(mainCtx, contextargs.NewWithInput(mainCtx, d.Input))
cliVars := map[string]interface{}{}
if opts.ExecOpts.Options != nil {
// gets variables passed from cli -v and -env-vars
cliVars = generators.BuildPayloadFromOptions(opts.ExecOpts.Options)
}
for _, v := range d.Variables { for _, v := range d.Variables {
// Check if the template has any env variables and expand them
if strings.HasPrefix(v.Value, "$") {
env.ExpandWithEnv(&v.Value)
}
if strings.Contains(v.Value, "{{") {
// if variables had value like {{username}}, then replace it with the value from cliVars
// variables:
// - key: username
// value: {{username}}
v.Value = replacer.Replace(v.Value, cliVars)
}
vars[v.Key] = v.Value vars[v.Key] = v.Value
ctx.Input.Add(v.Key, v.Value) ctx.Input.Add(v.Key, v.Value)
} }

View File

@ -304,12 +304,19 @@ func createReportingOptions(options *types.Options) (*reporting.Options, error)
OmitRaw: options.OmitRawRequests, OmitRaw: options.OmitRawRequests,
} }
} }
// Combine options.
if options.JSONLExport != "" { if options.JSONLExport != "" {
// Combine the CLI options with the config file options with the CLI options taking precedence
if reportingOptions.JSONLExporter != nil {
reportingOptions.JSONLExporter.File = options.JSONLExport
reportingOptions.JSONLExporter.OmitRaw = options.OmitRawRequests
} else {
reportingOptions.JSONLExporter = &jsonl.Options{ reportingOptions.JSONLExporter = &jsonl.Options{
File: options.JSONLExport, File: options.JSONLExport,
OmitRaw: options.OmitRawRequests, OmitRaw: options.OmitRawRequests,
} }
} }
}
reportingOptions.OmitRaw = options.OmitRawRequests reportingOptions.OmitRaw = options.OmitRawRequests
return reportingOptions, nil return reportingOptions, nil

View File

@ -392,6 +392,9 @@ func (r *Runner) Close() {
if r.tmpDir != "" { if r.tmpDir != "" {
_ = os.RemoveAll(r.tmpDir) _ = os.RemoveAll(r.tmpDir)
} }
//this is no-op unless nuclei is built with stats build tag
events.Close()
} }
// setupPDCPUpload sets up the PDCP upload writer // setupPDCPUpload sets up the PDCP upload writer
@ -727,6 +730,8 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
stats.ForceDisplayWarning(templates.ExcludedCodeTmplStats) stats.ForceDisplayWarning(templates.ExcludedCodeTmplStats)
stats.ForceDisplayWarning(templates.ExludedDastTmplStats) stats.ForceDisplayWarning(templates.ExludedDastTmplStats)
stats.ForceDisplayWarning(templates.TemplatesExcludedStats) stats.ForceDisplayWarning(templates.TemplatesExcludedStats)
stats.ForceDisplayWarning(templates.ExcludedFileStats)
stats.ForceDisplayWarning(templates.ExcludedSelfContainedStats)
} }
if tmplCount == 0 && workflowCount == 0 { if tmplCount == 0 && workflowCount == 0 {

View File

@ -380,6 +380,23 @@ func WithSandboxOptions(allowLocalFileAccess bool, restrictLocalNetworkAccess bo
func EnableCodeTemplates() NucleiSDKOptions { func EnableCodeTemplates() NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
e.opts.EnableCodeTemplates = true e.opts.EnableCodeTemplates = true
e.opts.EnableSelfContainedTemplates = true
return nil
}
}
// EnableSelfContainedTemplates allows loading/executing self-contained templates
func EnableSelfContainedTemplates() NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts.EnableSelfContainedTemplates = true
return nil
}
}
// EnableFileTemplates allows loading/executing file protocol templates
func EnableFileTemplates() NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts.EnableFileTemplates = true
return nil return nil
} }
} }
@ -392,6 +409,25 @@ func WithHeaders(headers []string) NucleiSDKOptions {
} }
} }
// WithVars allows setting custom variables to use in templates/workflows context
func WithVars(vars []string) NucleiSDKOptions {
// Create a goflags.RuntimeMap
runtimeVars := goflags.RuntimeMap{}
for _, v := range vars {
err := runtimeVars.Set(v)
if err != nil {
return func(e *NucleiEngine) error {
return err
}
}
}
return func(e *NucleiEngine) error {
e.opts.Vars = runtimeVars
return nil
}
}
// EnablePassiveMode allows enabling passive HTTP response processing mode // EnablePassiveMode allows enabling passive HTTP response processing mode
func EnablePassiveMode() NucleiSDKOptions { func EnablePassiveMode() NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {

View File

@ -7,6 +7,7 @@ import (
"os" "os"
"testing" "testing"
"github.com/kitabisa/go-ci"
nuclei "github.com/projectdiscovery/nuclei/v3/lib" nuclei "github.com/projectdiscovery/nuclei/v3/lib"
"github.com/remeh/sizedwaitgroup" "github.com/remeh/sizedwaitgroup"
) )
@ -78,9 +79,10 @@ func ExampleThreadSafeNucleiEngine() {
func TestMain(m *testing.M) { func TestMain(m *testing.M) {
// this file only contains testtables examples https://go.dev/blog/examples // this file only contains testtables examples https://go.dev/blog/examples
// and actual functionality test are in sdk_test.go // and actual functionality test are in sdk_test.go
if os.Getenv("GH_ACTION") != "" || os.Getenv("CI") != "" { if ci.IsCI() {
// no need to run this test on github actions // no need to run this test on github actions
return return
} }
os.Exit(m.Run()) os.Exit(m.Run())
} }

View File

@ -112,6 +112,14 @@ func (e *NucleiEngine) GetTemplates() []*templates.Template {
return e.store.Templates() return e.store.Templates()
} }
// GetWorkflows returns all nuclei workflows that are loaded
func (e *NucleiEngine) GetWorkflows() []*templates.Template {
if !e.templatesLoaded {
_ = e.LoadAllTemplates()
}
return e.store.Workflows()
}
// LoadTargets(urls/domains/ips only) adds targets to the nuclei engine // LoadTargets(urls/domains/ips only) adds targets to the nuclei engine
func (e *NucleiEngine) LoadTargets(targets []string, probeNonHttp bool) { func (e *NucleiEngine) LoadTargets(targets []string, probeNonHttp bool) {
for _, target := range targets { for _, target := range targets {
@ -271,6 +279,11 @@ func (e *NucleiEngine) Engine() *core.Engine {
return e.engine return e.engine
} }
// Store returns store of nuclei
func (e *NucleiEngine) Store() *loader.Store {
return e.store
}
// NewNucleiEngineCtx creates a new nuclei engine instance with given context // NewNucleiEngineCtx creates a new nuclei engine instance with given context
func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*NucleiEngine, error) { func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*NucleiEngine, error) {
// default options // default options

View File

@ -3,11 +3,12 @@ package nuclei
import ( import (
"context" "context"
"fmt" "fmt"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
"strings" "strings"
"sync" "sync"
"time" "time"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
"github.com/logrusorgru/aurora" "github.com/logrusorgru/aurora"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"

View File

@ -133,3 +133,37 @@ func TestThreadSafeNuclei(t *testing.T) {
fn() fn()
} }
} }
func TestWithVarsNuclei(t *testing.T) {
fn := func() {
defer func() {
// resources like leveldb have a delay to commit in-memory resources
// to disk, typically 1-2 seconds, so we wait for 2 seconds
time.Sleep(2 * time.Second)
goleak.VerifyNone(t, knownLeaks...)
}()
ne, err := nuclei.NewNucleiEngineCtx(
context.TODO(),
nuclei.EnableSelfContainedTemplates(),
nuclei.WithTemplatesOrWorkflows(nuclei.TemplateSources{Templates: []string{"http/token-spray/api-1forge.yaml"}}),
nuclei.WithVars([]string{"token=foobar"}),
nuclei.WithVerbosity(nuclei.VerbosityOptions{Debug: true}),
)
require.Nil(t, err)
ne.LoadTargets([]string{"scanme.sh"}, true) // probe http/https target is set to true here
err = ne.ExecuteWithCallback(nil)
require.Nil(t, err)
defer ne.Close()
}
// this is shared test so needs to be run as seperate process
if env.GetEnvOrDefault("TestWithVarsNuclei", false) {
cmd := exec.Command(os.Args[0], "-test.run=TestWithVarsNuclei")
cmd.Env = append(os.Environ(), "TestWithVarsNuclei=true")
out, err := cmd.CombinedOutput()
if err != nil {
t.Fatalf("process ran with error %s, output: %s", err, out)
}
} else {
fn()
}
}

View File

@ -3,6 +3,22 @@
"$id": "https://templates.-template", "$id": "https://templates.-template",
"$ref": "#/$defs/templates.Template", "$ref": "#/$defs/templates.Template",
"$defs": { "$defs": {
"analyzers.AnalyzerTemplate": {
"properties": {
"name": {
"type": "string"
},
"parameters": {
"$ref": "#/$defs/map[string]interface {}"
}
},
"additionalProperties": false,
"type": "object",
"required": [
"name",
"parameters"
]
},
"code.Request": { "code.Request": {
"properties": { "properties": {
"matchers": { "matchers": {
@ -785,6 +801,11 @@
"title": "fuzzin rules for http fuzzing", "title": "fuzzin rules for http fuzzing",
"description": "Fuzzing describes rule schema to fuzz http requests" "description": "Fuzzing describes rule schema to fuzz http requests"
}, },
"analyzer": {
"$ref": "#/$defs/analyzers.AnalyzerTemplate",
"title": "analyzer for http request",
"description": "Analyzer for HTTP Request"
},
"self-contained": { "self-contained": {
"type": "boolean" "type": "boolean"
}, },

View File

@ -31,7 +31,7 @@ const (
CLIConfigFileName = "config.yaml" CLIConfigFileName = "config.yaml"
ReportingConfigFilename = "reporting-config.yaml" ReportingConfigFilename = "reporting-config.yaml"
// Version is the current version of nuclei // Version is the current version of nuclei
Version = `v3.3.5` Version = `v3.3.6`
// Directory Names of custom templates // Directory Names of custom templates
CustomS3TemplatesDirName = "s3" CustomS3TemplatesDirName = "s3"
CustomGitHubTemplatesDirName = "github" CustomGitHubTemplatesDirName = "github"

View File

@ -352,6 +352,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
if err != nil { if err != nil {
if isParsingError("Error occurred parsing template %s: %s\n", templatePath, err) { if isParsingError("Error occurred parsing template %s: %s\n", templatePath, err) {
areTemplatesValid = false areTemplatesValid = false
continue
} }
} else if template == nil { } else if template == nil {
// NOTE(dwisiswant0): possibly global matchers template. // NOTE(dwisiswant0): possibly global matchers template.
@ -488,6 +489,17 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
stats.Increment(templates.SkippedUnsignedStats) stats.Increment(templates.SkippedUnsignedStats)
return return
} }
if parsed.SelfContained && !store.config.ExecutorOptions.Options.EnableSelfContainedTemplates {
stats.Increment(templates.ExcludedSelfContainedStats)
return
}
if parsed.HasFileProtocol() && !store.config.ExecutorOptions.Options.EnableFileTemplates {
stats.Increment(templates.ExcludedFileStats)
return
}
// if template has request signature like aws then only signed and verified templates are allowed // if template has request signature like aws then only signed and verified templates are allowed
if parsed.UsesRequestSignature() && !parsed.Verified { if parsed.UsesRequestSignature() && !parsed.Verified {
stats.Increment(templates.SkippedRequestSignatureStats) stats.Increment(templates.SkippedRequestSignatureStats)

View File

@ -137,33 +137,59 @@ getRepo:
// download the git repo to a given path // download the git repo to a given path
func (ctr *customTemplateGitHubRepo) cloneRepo(clonePath, githubToken string) error { func (ctr *customTemplateGitHubRepo) cloneRepo(clonePath, githubToken string) error {
r, err := git.PlainClone(clonePath, false, &git.CloneOptions{ cloneOpts := &git.CloneOptions{
URL: ctr.gitCloneURL, URL: ctr.gitCloneURL,
Auth: getAuth(ctr.owner, githubToken), Auth: getAuth(ctr.owner, githubToken),
}) SingleBranch: true,
Depth: 1,
}
err := cloneOpts.Validate()
if err != nil {
return err
}
r, err := git.PlainClone(clonePath, false, cloneOpts)
if err != nil { if err != nil {
return errors.Errorf("%s/%s: %s", ctr.owner, ctr.reponame, err.Error()) return errors.Errorf("%s/%s: %s", ctr.owner, ctr.reponame, err.Error())
} }
// Add the user as well in the config. By default, user is not set // Add the user as well in the config. By default, user is not set
config, _ := r.Storer.Config() config, _ := r.Storer.Config()
config.User.Name = ctr.owner config.User.Name = ctr.owner
return r.SetConfig(config) return r.SetConfig(config)
} }
// performs the git pull on given repo // performs the git pull on given repo
func (ctr *customTemplateGitHubRepo) pullChanges(repoPath, githubToken string) error { func (ctr *customTemplateGitHubRepo) pullChanges(repoPath, githubToken string) error {
pullOpts := &git.PullOptions{
RemoteName: "origin",
Auth: getAuth(ctr.owner, githubToken),
SingleBranch: true,
Depth: 1,
}
err := pullOpts.Validate()
if err != nil {
return err
}
r, err := git.PlainOpen(repoPath) r, err := git.PlainOpen(repoPath)
if err != nil { if err != nil {
return err return err
} }
w, err := r.Worktree() w, err := r.Worktree()
if err != nil { if err != nil {
return err return err
} }
err = w.Pull(&git.PullOptions{RemoteName: "origin", Auth: getAuth(ctr.owner, githubToken)})
err = w.Pull(pullOpts)
if err != nil { if err != nil {
return errors.Errorf("%s/%s: %s", ctr.owner, ctr.reponame, err.Error()) return errors.Errorf("%s/%s: %s", ctr.owner, ctr.reponame, err.Error())
} }
return nil return nil
} }

View File

@ -22,14 +22,12 @@ func TestDownloadCustomTemplatesFromGitHub(t *testing.T) {
config.DefaultConfig.SetTemplatesDir(templatesDirectory) config.DefaultConfig.SetTemplatesDir(templatesDirectory)
options := testutils.DefaultOptions options := testutils.DefaultOptions
options.GitHubTemplateRepo = []string{"projectdiscovery/nuclei-templates", "ehsandeep/nuclei-templates"} options.GitHubTemplateRepo = []string{"projectdiscovery/nuclei-templates-test"}
options.GitHubToken = os.Getenv("GITHUB_TOKEN")
ctm, err := NewCustomTemplatesManager(options) ctm, err := NewCustomTemplatesManager(options)
require.Nil(t, err, "could not create custom templates manager") require.Nil(t, err, "could not create custom templates manager")
ctm.Download(context.Background()) ctm.Download(context.Background())
require.DirExists(t, filepath.Join(templatesDirectory, "github", "projectdiscovery", "nuclei-templates"), "cloned directory does not exists") require.DirExists(t, filepath.Join(templatesDirectory, "github", "projectdiscovery", "nuclei-templates-test"), "cloned directory does not exists")
require.DirExists(t, filepath.Join(templatesDirectory, "github", "ehsandeep", "nuclei-templates"), "cloned directory does not exists")
} }

View File

@ -0,0 +1,103 @@
package analyzers
import (
"math/rand"
"strconv"
"strings"
"time"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz"
"github.com/projectdiscovery/retryablehttp-go"
)
// Analyzer is an interface for all the analyzers
// that can be used for the fuzzer
type Analyzer interface {
// Name returns the name of the analyzer
Name() string
// ApplyTransformation applies the transformation to the initial payload.
ApplyInitialTransformation(data string, params map[string]interface{}) string
// Analyze is the main function for the analyzer
Analyze(options *Options) (bool, string, error)
}
// AnalyzerTemplate is the template for the analyzer
type AnalyzerTemplate struct {
// description: |
// Name is the name of the analyzer to use
// values:
// - time_delay
Name string `json:"name" yaml:"name"`
// description: |
// Parameters is the parameters for the analyzer
//
// Parameters are different for each analyzer. For example, you can customize
// time_delay analyzer with sleep_duration, time_slope_error_range, etc. Refer
// to the docs for each analyzer to get an idea about parameters.
Parameters map[string]interface{} `json:"parameters" yaml:"parameters"`
}
var (
analyzers map[string]Analyzer
)
// RegisterAnalyzer registers a new analyzer
func RegisterAnalyzer(name string, analyzer Analyzer) {
analyzers[name] = analyzer
}
// GetAnalyzer returns the analyzer for a given name
func GetAnalyzer(name string) Analyzer {
return analyzers[name]
}
func init() {
analyzers = make(map[string]Analyzer)
}
// Options contains the options for the analyzer
type Options struct {
FuzzGenerated fuzz.GeneratedRequest
HttpClient *retryablehttp.Client
ResponseTimeDelay time.Duration
AnalyzerParameters map[string]interface{}
}
var (
random = rand.New(rand.NewSource(time.Now().UnixNano()))
)
// ApplyPayloadTransformations applies the payload transformations to the payload
// It supports the below payloads -
// - [RANDNUM] => random number between 1000 and 9999
// - [RANDSTR] => random string of 4 characters
func ApplyPayloadTransformations(value string) string {
randomInt := GetRandomInteger()
randomStr := randStringBytesMask(4)
value = strings.ReplaceAll(value, "[RANDNUM]", strconv.Itoa(randomInt))
value = strings.ReplaceAll(value, "[RANDSTR]", randomStr)
return value
}
const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
const (
letterIdxBits = 6 // 6 bits to represent a letter index
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
)
func randStringBytesMask(n int) string {
b := make([]byte, n)
for i := 0; i < n; {
if idx := int(random.Int63() & letterIdxMask); idx < len(letterBytes) {
b[i] = letterBytes[idx]
i++
}
}
return string(b)
}
// GetRandomInteger returns a random integer between 1000 and 9999
func GetRandomInteger() int {
return random.Intn(9000) + 1000
}

View File

@ -0,0 +1,168 @@
package time
import (
"fmt"
"io"
"net/http/httptrace"
"strconv"
"strings"
"time"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/analyzers"
"github.com/projectdiscovery/retryablehttp-go"
)
// Analyzer is a time delay analyzer for the fuzzer
type Analyzer struct{}
const (
DefaultSleepDuration = int(5)
DefaultRequestsLimit = int(4)
DefaultTimeCorrelationErrorRange = float64(0.15)
DefaultTimeSlopeErrorRange = float64(0.30)
defaultSleepTimeDuration = 5 * time.Second
)
var _ analyzers.Analyzer = &Analyzer{}
func init() {
analyzers.RegisterAnalyzer("time_delay", &Analyzer{})
}
// Name is the name of the analyzer
func (a *Analyzer) Name() string {
return "time_delay"
}
// ApplyInitialTransformation applies the transformation to the initial payload.
//
// It supports the below payloads -
// - [SLEEPTIME] => sleep_duration
// - [INFERENCE] => Inference payload for time delay analyzer
//
// It also applies the payload transformations to the payload
// which includes [RANDNUM] and [RANDSTR]
func (a *Analyzer) ApplyInitialTransformation(data string, params map[string]interface{}) string {
duration := DefaultSleepDuration
if len(params) > 0 {
if v, ok := params["sleep_duration"]; ok {
duration, ok = v.(int)
if !ok {
duration = DefaultSleepDuration
gologger.Warning().Msgf("Invalid sleep_duration parameter type, using default value: %d", duration)
}
}
}
data = strings.ReplaceAll(data, "[SLEEPTIME]", strconv.Itoa(duration))
data = analyzers.ApplyPayloadTransformations(data)
// Also support [INFERENCE] for the time delay analyzer
if strings.Contains(data, "[INFERENCE]") {
randInt := analyzers.GetRandomInteger()
data = strings.ReplaceAll(data, "[INFERENCE]", fmt.Sprintf("%d=%d", randInt, randInt))
}
return data
}
func (a *Analyzer) parseAnalyzerParameters(params map[string]interface{}) (int, int, float64, float64, error) {
requestsLimit := DefaultRequestsLimit
sleepDuration := DefaultSleepDuration
timeCorrelationErrorRange := DefaultTimeCorrelationErrorRange
timeSlopeErrorRange := DefaultTimeSlopeErrorRange
if len(params) == 0 {
return requestsLimit, sleepDuration, timeCorrelationErrorRange, timeSlopeErrorRange, nil
}
var ok bool
for k, v := range params {
switch k {
case "sleep_duration":
sleepDuration, ok = v.(int)
case "requests_limit":
requestsLimit, ok = v.(int)
case "time_correlation_error_range":
timeCorrelationErrorRange, ok = v.(float64)
case "time_slope_error_range":
timeSlopeErrorRange, ok = v.(float64)
}
if !ok {
return 0, 0, 0, 0, errors.Errorf("invalid parameter type for %s", k)
}
}
return requestsLimit, sleepDuration, timeCorrelationErrorRange, timeSlopeErrorRange, nil
}
// Analyze is the main function for the analyzer
func (a *Analyzer) Analyze(options *analyzers.Options) (bool, string, error) {
if options.ResponseTimeDelay < defaultSleepTimeDuration {
return false, "", nil
}
// Parse parameters for this analyzer if any or use default values
requestsLimit, sleepDuration, timeCorrelationErrorRange, timeSlopeErrorRange, err :=
a.parseAnalyzerParameters(options.AnalyzerParameters)
if err != nil {
return false, "", err
}
reqSender := func(delay int) (float64, error) {
gr := options.FuzzGenerated
replaced := strings.ReplaceAll(gr.OriginalPayload, "[SLEEPTIME]", strconv.Itoa(delay))
replaced = a.ApplyInitialTransformation(replaced, options.AnalyzerParameters)
if err := gr.Component.SetValue(gr.Key, replaced); err != nil {
return 0, errors.Wrap(err, "could not set value in component")
}
rebuilt, err := gr.Component.Rebuild()
if err != nil {
return 0, errors.Wrap(err, "could not rebuild request")
}
gologger.Verbose().Msgf("[%s] Sending request with %d delay for: %s", a.Name(), delay, rebuilt.URL.String())
timeTaken, err := doHTTPRequestWithTimeTracing(rebuilt, options.HttpClient)
if err != nil {
return 0, errors.Wrap(err, "could not do request with time tracing")
}
return timeTaken, nil
}
matched, matchReason, err := checkTimingDependency(
requestsLimit,
sleepDuration,
timeCorrelationErrorRange,
timeSlopeErrorRange,
reqSender,
)
if err != nil {
return false, "", err
}
if matched {
return true, matchReason, nil
}
return false, "", nil
}
// doHTTPRequestWithTimeTracing does a http request with time tracing
func doHTTPRequestWithTimeTracing(req *retryablehttp.Request, httpclient *retryablehttp.Client) (float64, error) {
var ttfb time.Duration
var start time.Time
trace := &httptrace.ClientTrace{
GotFirstResponseByte: func() { ttfb = time.Since(start) },
}
req = req.WithContext(httptrace.WithClientTrace(req.Context(), trace))
start = time.Now()
resp, err := httpclient.Do(req)
if err != nil {
return 0, errors.Wrap(err, "could not do request")
}
_, err = io.ReadAll(resp.Body)
if err != nil {
return 0, errors.Wrap(err, "could not read response body")
}
return ttfb.Seconds(), nil
}

View File

@ -0,0 +1,171 @@
// Package time implements a time delay analyzer using linear
// regression heuristics inspired from ZAP to discover time
// based issues.
//
// The approach is the one used in ZAP for timing based checks.
// Advantages of this approach are many compared to the old approach of
// heuristics of sleep time.
//
// As we are building a statistical model, we can predict if the delay
// is random or not very quickly. Also, the payloads are alternated to send
// a very high sleep and a very low sleep. This way the comparison is
// faster to eliminate negative cases. Only legitimate cases are sent for
// more verification.
//
// For more details on the algorithm, follow the links below:
// - https://groups.google.com/g/zaproxy-develop/c/KGSkNHlLtqk
// - https://github.com/zaproxy/zap-extensions/pull/5053
//
// This file has been implemented from its original version. It was originally licensed under the Apache License 2.0 (see LICENSE file for details).
// The original algorithm is implemented in ZAP Active Scanner.
package time
import (
"errors"
"fmt"
"math"
)
type timeDelayRequestSender func(delay int) (float64, error)
// checkTimingDependency checks the timing dependency for a given request
//
// It alternates and sends first a high request, then a low request. Each time
// it checks if the delay of the application can be predictably controlled.
func checkTimingDependency(
requestsLimit int,
highSleepTimeSeconds int,
correlationErrorRange float64,
slopeErrorRange float64,
requestSender timeDelayRequestSender,
) (bool, string, error) {
if requestsLimit < 2 {
return false, "", errors.New("requests limit should be at least 2")
}
regression := newSimpleLinearRegression()
requestsLeft := requestsLimit
for {
if requestsLeft <= 0 {
break
}
isCorrelationPossible, err := sendRequestAndTestConfidence(regression, highSleepTimeSeconds, requestSender)
if err != nil {
return false, "", err
}
if !isCorrelationPossible {
return false, "", nil
}
isCorrelationPossible, err = sendRequestAndTestConfidence(regression, 1, requestSender)
if err != nil {
return false, "", err
}
if !isCorrelationPossible {
return false, "", nil
}
requestsLeft = requestsLeft - 2
}
result := regression.IsWithinConfidence(correlationErrorRange, 1.0, slopeErrorRange)
if result {
resultReason := fmt.Sprintf(
"[time_delay] made %d requests successfully, with a regression slope of %.2f and correlation %.2f",
requestsLimit,
regression.slope,
regression.correlation,
)
return result, resultReason, nil
}
return result, "", nil
}
// sendRequestAndTestConfidence sends a request and tests the confidence of delay
func sendRequestAndTestConfidence(
regression *simpleLinearRegression,
delay int,
requestSender timeDelayRequestSender,
) (bool, error) {
delayReceived, err := requestSender(delay)
if err != nil {
return false, err
}
if delayReceived < float64(delay) {
return false, nil
}
regression.AddPoint(float64(delay), delayReceived)
if !regression.IsWithinConfidence(0.3, 1.0, 0.5) {
return false, nil
}
return true, nil
}
// simpleLinearRegression is a simple linear regression model that can be updated at runtime.
// It is based on the same algorithm in ZAP for doing timing checks.
type simpleLinearRegression struct {
count float64
independentSum float64
dependentSum float64
// Variances
independentVarianceN float64
dependentVarianceN float64
sampleCovarianceN float64
slope float64
intercept float64
correlation float64
}
func newSimpleLinearRegression() *simpleLinearRegression {
return &simpleLinearRegression{
slope: 1,
correlation: 1,
}
}
func (o *simpleLinearRegression) AddPoint(x, y float64) {
independentResidualAdjustment := x - o.independentSum/o.count
dependentResidualAdjustment := y - o.dependentSum/o.count
o.count += 1
o.independentSum += x
o.dependentSum += y
if math.IsNaN(independentResidualAdjustment) {
return
}
independentResidual := x - o.independentSum/o.count
dependentResidual := y - o.dependentSum/o.count
o.independentVarianceN += independentResidual * independentResidualAdjustment
o.dependentVarianceN += dependentResidual * dependentResidualAdjustment
o.sampleCovarianceN += independentResidual * dependentResidualAdjustment
o.slope = o.sampleCovarianceN / o.independentVarianceN
o.correlation = o.slope * math.Sqrt(o.independentVarianceN/o.dependentVarianceN)
o.correlation *= o.correlation
// NOTE: zap had the reverse formula, changed it to the correct one
// for intercept. Verify if this is correct.
o.intercept = o.dependentSum/o.count - o.slope*(o.independentSum/o.count)
if math.IsNaN(o.correlation) {
o.correlation = 1
}
}
func (o *simpleLinearRegression) Predict(x float64) float64 {
return o.slope*x + o.intercept
}
func (o *simpleLinearRegression) IsWithinConfidence(correlationErrorRange float64, expectedSlope float64, slopeErrorRange float64,
) bool {
return o.correlation > 1.0-correlationErrorRange &&
math.Abs(expectedSlope-o.slope) < slopeErrorRange
}

View File

@ -0,0 +1,143 @@
// Tests ported from ZAP Java version of the algorithm
package time
import (
"math"
"math/rand"
"testing"
"time"
"github.com/stretchr/testify/require"
)
const (
correlationErrorRange = float64(0.1)
slopeErrorRange = float64(0.2)
)
var rng = rand.New(rand.NewSource(time.Now().UnixNano()))
func Test_should_generate_alternating_sequences(t *testing.T) {
var generatedDelays []float64
reqSender := func(delay int) (float64, error) {
generatedDelays = append(generatedDelays, float64(delay))
return float64(delay), nil
}
matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.True(t, matched)
require.EqualValues(t, []float64{15, 1, 15, 1}, generatedDelays)
}
func Test_should_giveup_non_injectable(t *testing.T) {
var timesCalled int
reqSender := func(delay int) (float64, error) {
timesCalled++
return 0.5, nil
}
matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.False(t, matched)
require.Equal(t, 1, timesCalled)
}
func Test_should_giveup_slow_non_injectable(t *testing.T) {
var timesCalled int
reqSender := func(delay int) (float64, error) {
timesCalled++
return 10 + rng.Float64()*0.5, nil
}
matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.False(t, matched)
require.LessOrEqual(t, timesCalled, 3)
}
func Test_should_giveup_slow_non_injectable_realworld(t *testing.T) {
var timesCalled int
var iteration = 0
counts := []float64{21, 11, 21, 11}
reqSender := func(delay int) (float64, error) {
timesCalled++
iteration++
return counts[iteration-1], nil
}
matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.False(t, matched)
require.LessOrEqual(t, timesCalled, 4)
}
func Test_should_detect_dependence_with_small_error(t *testing.T) {
reqSender := func(delay int) (float64, error) {
return float64(delay) + rng.Float64()*0.5, nil
}
matched, reason, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
require.NoError(t, err)
require.True(t, matched)
require.NotEmpty(t, reason)
}
func Test_LinearRegression_Numerical_stability(t *testing.T) {
variables := [][]float64{
{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {1, 1}, {2, 2}, {2, 2}, {2, 2},
}
slope := float64(1)
correlation := float64(1)
regression := newSimpleLinearRegression()
for _, v := range variables {
regression.AddPoint(v[0], v[1])
}
require.True(t, almostEqual(regression.slope, slope))
require.True(t, almostEqual(regression.correlation, correlation))
}
func Test_LinearRegression_exact_verify(t *testing.T) {
variables := [][]float64{
{1, 1}, {2, 3},
}
slope := float64(2)
correlation := float64(1)
regression := newSimpleLinearRegression()
for _, v := range variables {
regression.AddPoint(v[0], v[1])
}
require.True(t, almostEqual(regression.slope, slope))
require.True(t, almostEqual(regression.correlation, correlation))
}
func Test_LinearRegression_known_verify(t *testing.T) {
variables := [][]float64{
{1, 1.348520581}, {2, 2.524046187}, {3, 3.276944688}, {4, 4.735374498}, {5, 5.150291657},
}
slope := float64(0.981487046)
correlation := float64(0.979228906)
regression := newSimpleLinearRegression()
for _, v := range variables {
regression.AddPoint(v[0], v[1])
}
require.True(t, almostEqual(regression.slope, slope))
require.True(t, almostEqual(regression.correlation, correlation))
}
func Test_LinearRegression_nonlinear_verify(t *testing.T) {
variables := [][]float64{
{1, 2}, {2, 4}, {3, 8}, {4, 16}, {5, 32},
}
regression := newSimpleLinearRegression()
for _, v := range variables {
regression.AddPoint(v[0], v[1])
}
require.Less(t, regression.correlation, 0.9)
}
const float64EqualityThreshold = 1e-8
func almostEqual(a, b float64) bool {
return math.Abs(a-b) <= float64EqualityThreshold
}

View File

@ -7,7 +7,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/dataformat" "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/dataformat"
"github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/retryablehttp-go"
"github.com/projectdiscovery/utils/maps" mapsutil "github.com/projectdiscovery/utils/maps"
urlutil "github.com/projectdiscovery/utils/url" urlutil "github.com/projectdiscovery/utils/url"
) )

View File

@ -50,6 +50,11 @@ type ExecuteRuleInput struct {
BaseRequest *retryablehttp.Request BaseRequest *retryablehttp.Request
// DisplayFuzzPoints is a flag to display fuzz points // DisplayFuzzPoints is a flag to display fuzz points
DisplayFuzzPoints bool DisplayFuzzPoints bool
// ApplyPayloadInitialTransformation is an optional function
// to transform the payload initially based on analyzer rules
ApplyPayloadInitialTransformation func(string, map[string]interface{}) string
AnalyzerParams map[string]interface{}
} }
// GeneratedRequest is a single generated request for rule // GeneratedRequest is a single generated request for rule
@ -64,6 +69,15 @@ type GeneratedRequest struct {
Component component.Component Component component.Component
// Parameter being fuzzed // Parameter being fuzzed
Parameter string Parameter string
// Key is the key for the request
Key string
// Value is the value for the request
Value string
// OriginalValue is the original value for the request
OriginalValue string
// OriginalPayload is the original payload for the request
OriginalPayload string
} }
// Execute executes a fuzzing rule accepting a callback on which // Execute executes a fuzzing rule accepting a callback on which
@ -216,7 +230,9 @@ func (rule *Rule) executeRuleValues(input *ExecuteRuleInput, ruleComponent compo
// if we are only fuzzing values // if we are only fuzzing values
if len(rule.Fuzz.Value) > 0 { if len(rule.Fuzz.Value) > 0 {
for _, value := range rule.Fuzz.Value { for _, value := range rule.Fuzz.Value {
if err := rule.executePartRule(input, ValueOrKeyValue{Value: value}, ruleComponent); err != nil { originalPayload := value
if err := rule.executePartRule(input, ValueOrKeyValue{Value: value, OriginalPayload: originalPayload}, ruleComponent); err != nil {
if component.IsErrSetValue(err) { if component.IsErrSetValue(err) {
// this are errors due to format restrictions // this are errors due to format restrictions
// ex: fuzzing string value in a json int field // ex: fuzzing string value in a json int field
@ -257,7 +273,7 @@ func (rule *Rule) executeRuleValues(input *ExecuteRuleInput, ruleComponent compo
if err != nil { if err != nil {
return err return err
} }
if gotErr := rule.execWithInput(input, req, input.InteractURLs, ruleComponent, "", ""); gotErr != nil { if gotErr := rule.execWithInput(input, req, input.InteractURLs, ruleComponent, "", "", "", "", "", ""); gotErr != nil {
return gotErr return gotErr
} }
} }

View File

@ -42,14 +42,14 @@ func (rule *Rule) executePartComponent(input *ExecuteRuleInput, payload ValueOrK
return rule.executePartComponentOnKV(input, payload, ruleComponent.Clone()) return rule.executePartComponentOnKV(input, payload, ruleComponent.Clone())
} else { } else {
// for value only fuzzing // for value only fuzzing
return rule.executePartComponentOnValues(input, payload.Value, ruleComponent.Clone()) return rule.executePartComponentOnValues(input, payload.Value, payload.OriginalPayload, ruleComponent.Clone())
} }
} }
// executePartComponentOnValues executes this rule on a given component and payload // executePartComponentOnValues executes this rule on a given component and payload
// this supports both single and multiple [ruleType] modes // this supports both single and multiple [ruleType] modes
// i.e if component has multiple values, they can be replaced once or all depending on mode // i.e if component has multiple values, they can be replaced once or all depending on mode
func (rule *Rule) executePartComponentOnValues(input *ExecuteRuleInput, payloadStr string, ruleComponent component.Component) error { func (rule *Rule) executePartComponentOnValues(input *ExecuteRuleInput, payloadStr, originalPayload string, ruleComponent component.Component) error {
finalErr := ruleComponent.Iterate(func(key string, value interface{}) error { finalErr := ruleComponent.Iterate(func(key string, value interface{}) error {
valueStr := types.ToString(value) valueStr := types.ToString(value)
if !rule.matchKeyOrValue(key, valueStr) { if !rule.matchKeyOrValue(key, valueStr) {
@ -57,8 +57,13 @@ func (rule *Rule) executePartComponentOnValues(input *ExecuteRuleInput, payloadS
return nil return nil
} }
var evaluated string var evaluated, originalEvaluated string
evaluated, input.InteractURLs = rule.executeEvaluate(input, key, valueStr, payloadStr, input.InteractURLs) evaluated, input.InteractURLs = rule.executeEvaluate(input, key, valueStr, payloadStr, input.InteractURLs)
if input.ApplyPayloadInitialTransformation != nil {
evaluated = input.ApplyPayloadInitialTransformation(evaluated, input.AnalyzerParams)
originalEvaluated, _ = rule.executeEvaluate(input, key, valueStr, originalPayload, input.InteractURLs)
}
if err := ruleComponent.SetValue(key, evaluated); err != nil { if err := ruleComponent.SetValue(key, evaluated); err != nil {
// gologger.Warning().Msgf("could not set value due to format restriction original(%s, %s[%T]) , new(%s,%s[%T])", key, valueStr, value, key, evaluated, evaluated) // gologger.Warning().Msgf("could not set value due to format restriction original(%s, %s[%T]) , new(%s,%s[%T])", key, valueStr, value, key, evaluated, evaluated)
return nil return nil
@ -70,7 +75,7 @@ func (rule *Rule) executePartComponentOnValues(input *ExecuteRuleInput, payloadS
return err return err
} }
if qerr := rule.execWithInput(input, req, input.InteractURLs, ruleComponent, key, valueStr); qerr != nil { if qerr := rule.execWithInput(input, req, input.InteractURLs, ruleComponent, key, valueStr, originalEvaluated, valueStr, key, evaluated); qerr != nil {
return qerr return qerr
} }
// fmt.Printf("executed with value: %s\n", evaluated) // fmt.Printf("executed with value: %s\n", evaluated)
@ -92,7 +97,7 @@ func (rule *Rule) executePartComponentOnValues(input *ExecuteRuleInput, payloadS
if err != nil { if err != nil {
return err return err
} }
if qerr := rule.execWithInput(input, req, input.InteractURLs, ruleComponent, "", ""); qerr != nil { if qerr := rule.execWithInput(input, req, input.InteractURLs, ruleComponent, "", "", "", "", "", ""); qerr != nil {
err = qerr err = qerr
return err return err
} }
@ -127,7 +132,7 @@ func (rule *Rule) executePartComponentOnKV(input *ExecuteRuleInput, payload Valu
return err return err
} }
if qerr := rule.execWithInput(input, req, input.InteractURLs, ruleComponent, key, value); qerr != nil { if qerr := rule.execWithInput(input, req, input.InteractURLs, ruleComponent, key, value, "", "", "", ""); qerr != nil {
return err return err
} }
@ -146,12 +151,13 @@ func (rule *Rule) executePartComponentOnKV(input *ExecuteRuleInput, payload Valu
} }
// execWithInput executes a rule with input via callback // execWithInput executes a rule with input via callback
func (rule *Rule) execWithInput(input *ExecuteRuleInput, httpReq *retryablehttp.Request, interactURLs []string, component component.Component, parameter, parameterValue string) error { func (rule *Rule) execWithInput(input *ExecuteRuleInput, httpReq *retryablehttp.Request, interactURLs []string, component component.Component, parameter, parameterValue, originalPayload, originalValue, key, value string) error {
// If the parameter is a number, replace it with the parameter value // If the parameter is a number, replace it with the parameter value
// or if the parameter is empty and the parameter value is not empty // or if the parameter is empty and the parameter value is not empty
// replace it with the parameter value // replace it with the parameter value
actualParameter := parameter
if _, err := strconv.Atoi(parameter); err == nil || (parameter == "" && parameterValue != "") { if _, err := strconv.Atoi(parameter); err == nil || (parameter == "" && parameterValue != "") {
parameter = parameterValue actualParameter = parameterValue
} }
// If the parameter is frequent, skip it if the option is enabled // If the parameter is frequent, skip it if the option is enabled
if rule.options.FuzzParamsFrequency != nil { if rule.options.FuzzParamsFrequency != nil {
@ -168,7 +174,11 @@ func (rule *Rule) execWithInput(input *ExecuteRuleInput, httpReq *retryablehttp.
InteractURLs: interactURLs, InteractURLs: interactURLs,
DynamicValues: input.Values, DynamicValues: input.Values,
Component: component, Component: component,
Parameter: parameter, Parameter: actualParameter,
Key: key,
Value: value,
OriginalValue: originalValue,
OriginalPayload: originalPayload,
} }
if !input.Callback(request) { if !input.Callback(request) {
return types.ErrNoMoreRequests return types.ErrNoMoreRequests

View File

@ -19,6 +19,8 @@ var (
type ValueOrKeyValue struct { type ValueOrKeyValue struct {
Key string Key string
Value string Value string
OriginalPayload string
} }
func (v *ValueOrKeyValue) IsKV() bool { func (v *ValueOrKeyValue) IsKV() bool {

View File

@ -201,6 +201,7 @@ type ResultEvent struct {
FuzzingMethod string `json:"fuzzing_method,omitempty"` FuzzingMethod string `json:"fuzzing_method,omitempty"`
FuzzingParameter string `json:"fuzzing_parameter,omitempty"` FuzzingParameter string `json:"fuzzing_parameter,omitempty"`
FuzzingPosition string `json:"fuzzing_position,omitempty"` FuzzingPosition string `json:"fuzzing_position,omitempty"`
AnalyzerDetails string `json:"analyzer_details,omitempty"`
FileToIndexPosition map[string]int `json:"-"` FileToIndexPosition map[string]int `json:"-"`
TemplateVerifier string `json:"-"` TemplateVerifier string `json:"-"`

View File

@ -86,6 +86,12 @@ func (request *Request) Compile(options *protocols.ExecutorOptions) error {
Args: request.Args, Args: request.Args,
EarlyCloseFileDescriptor: true, EarlyCloseFileDescriptor: true,
} }
if options.Options.Debug || options.Options.DebugResponse {
// enable debug mode for gozero
gozeroOptions.DebugMode = true
}
engine, err := gozero.New(gozeroOptions) engine, err := gozero.New(gozeroOptions)
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("[%s] engines '%s' not available on host", options.TemplateID, strings.Join(request.Engine, ",")) return errorutil.NewWithErr(err).Msgf("[%s] engines '%s' not available on host", options.TemplateID, strings.Join(request.Engine, ","))
@ -239,7 +245,22 @@ func (request *Request) ExecuteWithResults(input *contextargs.Context, dynamicVa
} }
if request.options.Options.Debug || request.options.Options.DebugRequests { if request.options.Options.Debug || request.options.Options.DebugRequests {
gologger.Debug().Msgf("[%s] Dumped Executed Source Code for %v\n\n%v\n", request.options.TemplateID, input.MetaInput.Input, interpretEnvVars(request.Source, allvars)) gologger.Debug().MsgFunc(func() string {
dashes := strings.Repeat("-", 15)
sb := &strings.Builder{}
sb.WriteString(fmt.Sprintf("[%s] Dumped Executed Source Code for input/stdin: '%v'", request.options.TemplateID, input.MetaInput.Input))
sb.WriteString(fmt.Sprintf("\n%v\n%v\n%v\n", dashes, "Source Code:", dashes))
sb.WriteString(interpretEnvVars(request.Source, allvars))
sb.WriteString("\n")
sb.WriteString(fmt.Sprintf("\n%v\n%v\n%v\n", dashes, "Command Executed:", dashes))
sb.WriteString(interpretEnvVars(gOutput.Command, allvars))
sb.WriteString("\n")
sb.WriteString(fmt.Sprintf("\n%v\n%v\n%v\n", dashes, "Command Output:", dashes))
sb.WriteString(gOutput.DebugData.String())
sb.WriteString("\n")
sb.WriteString("[WRN] Command Output here is stdout+sterr, in response variables they are seperate (use -v -svd flags for more details)")
return sb.String()
})
} }
dataOutputString := fmtStdout(gOutput.Stdout.String()) dataOutputString := fmtStdout(gOutput.Stdout.String())

View File

@ -142,6 +142,9 @@ func (metaInput *MetaInput) Unmarshal(data string) error {
} }
func (metaInput *MetaInput) Clone() *MetaInput { func (metaInput *MetaInput) Clone() *MetaInput {
metaInput.mu.Lock()
defer metaInput.mu.Unlock()
input := NewMetaInput() input := NewMetaInput()
input.Input = metaInput.Input input.Input = metaInput.Input
input.CustomIP = metaInput.CustomIP input.CustomIP = metaInput.CustomIP

View File

@ -19,7 +19,7 @@ var (
) )
func StartActiveMemGuardian(ctx context.Context) { func StartActiveMemGuardian(ctx context.Context) {
if memguardian.DefaultMemGuardian == nil { if memguardian.DefaultMemGuardian == nil || memTimer != nil {
return return
} }

View File

@ -30,7 +30,7 @@ import (
var _ protocols.Request = &Request{} var _ protocols.Request = &Request{}
const errCouldGetHtmlElement = "could get html element" const errCouldNotGetHtmlElement = "could not get html element"
// Type returns the type of the protocol request // Type returns the type of the protocol request
func (request *Request) Type() templateTypes.ProtocolType { func (request *Request) Type() templateTypes.ProtocolType {
@ -117,7 +117,7 @@ func (request *Request) executeRequestWithPayloads(input *contextargs.Context, p
if err != nil { if err != nil {
request.options.Output.Request(request.options.TemplatePath, input.MetaInput.Input, request.Type().String(), err) request.options.Output.Request(request.options.TemplatePath, input.MetaInput.Input, request.Type().String(), err)
request.options.Progress.IncrementFailedRequestsBy(1) request.options.Progress.IncrementFailedRequestsBy(1)
return errors.Wrap(err, errCouldGetHtmlElement) return errors.Wrap(err, errCouldNotGetHtmlElement)
} }
defer instance.Close() defer instance.Close()
@ -130,7 +130,7 @@ func (request *Request) executeRequestWithPayloads(input *contextargs.Context, p
if _, err := url.Parse(input.MetaInput.Input); err != nil { if _, err := url.Parse(input.MetaInput.Input); err != nil {
request.options.Output.Request(request.options.TemplatePath, input.MetaInput.Input, request.Type().String(), err) request.options.Output.Request(request.options.TemplatePath, input.MetaInput.Input, request.Type().String(), err)
request.options.Progress.IncrementFailedRequestsBy(1) request.options.Progress.IncrementFailedRequestsBy(1)
return errors.Wrap(err, errCouldGetHtmlElement) return errors.Wrap(err, errCouldNotGetHtmlElement)
} }
options := &engine.Options{ options := &engine.Options{
Timeout: time.Duration(request.options.Options.PageTimeout) * time.Second, Timeout: time.Duration(request.options.Options.PageTimeout) * time.Second,
@ -146,7 +146,7 @@ func (request *Request) executeRequestWithPayloads(input *contextargs.Context, p
if err != nil { if err != nil {
request.options.Output.Request(request.options.TemplatePath, input.MetaInput.Input, request.Type().String(), err) request.options.Output.Request(request.options.TemplatePath, input.MetaInput.Input, request.Type().String(), err)
request.options.Progress.IncrementFailedRequestsBy(1) request.options.Progress.IncrementFailedRequestsBy(1)
return errors.Wrap(err, errCouldGetHtmlElement) return errors.Wrap(err, errCouldNotGetHtmlElement)
} }
defer page.Close() defer page.Close()

View File

@ -14,6 +14,7 @@ import (
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider" "github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/expressions" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/expressions"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators"
@ -55,6 +56,8 @@ type generatedRequest struct {
// requestURLPattern tracks unmodified request url pattern without values ( it is used for constant vuln_hash) // requestURLPattern tracks unmodified request url pattern without values ( it is used for constant vuln_hash)
// ex: {{BaseURL}}/api/exp?param={{randstr}} // ex: {{BaseURL}}/api/exp?param={{randstr}}
requestURLPattern string requestURLPattern string
fuzzGeneratedRequest fuzz.GeneratedRequest
} }
// setReqURLPattern sets the url request pattern for the generated request // setReqURLPattern sets the url request pattern for the generated request

View File

@ -3,13 +3,18 @@ package http
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"math"
"strings" "strings"
"time"
"github.com/invopop/jsonschema" "github.com/invopop/jsonschema"
json "github.com/json-iterator/go" json "github.com/json-iterator/go"
"github.com/pkg/errors" "github.com/pkg/errors"
_ "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/analyzers/time"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz" "github.com/projectdiscovery/nuclei/v3/pkg/fuzz"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/analyzers"
"github.com/projectdiscovery/nuclei/v3/pkg/operators" "github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/projectdiscovery/nuclei/v3/pkg/operators/matchers" "github.com/projectdiscovery/nuclei/v3/pkg/operators/matchers"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols" "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
@ -126,6 +131,9 @@ type Request struct {
// Fuzzing describes schema to fuzz http requests // Fuzzing describes schema to fuzz http requests
Fuzzing []*fuzz.Rule `yaml:"fuzzing,omitempty" json:"fuzzing,omitempty" jsonschema:"title=fuzzin rules for http fuzzing,description=Fuzzing describes rule schema to fuzz http requests"` Fuzzing []*fuzz.Rule `yaml:"fuzzing,omitempty" json:"fuzzing,omitempty" jsonschema:"title=fuzzin rules for http fuzzing,description=Fuzzing describes rule schema to fuzz http requests"`
// description: |
// Analyzer is an analyzer to use for matching the response.
Analyzer *analyzers.AnalyzerTemplate `yaml:"analyzer,omitempty" json:"analyzer,omitempty" jsonschema:"title=analyzer for http request,description=Analyzer for HTTP Request"`
CompiledOperators *operators.Operators `yaml:"-" json:"-"` CompiledOperators *operators.Operators `yaml:"-" json:"-"`
@ -303,6 +311,21 @@ func (request *Request) Compile(options *protocols.ExecutorOptions) error {
}, },
RedirectFlow: httpclientpool.DontFollowRedirect, RedirectFlow: httpclientpool.DontFollowRedirect,
} }
var customTimeout int
if request.Analyzer != nil && request.Analyzer.Name == "time_delay" {
var timeoutVal int
if timeout, ok := request.Analyzer.Parameters["sleep_duration"]; ok {
timeoutVal, _ = timeout.(int)
} else {
timeoutVal = 5
}
// Add 3x buffer to the timeout
customTimeout = int(math.Ceil(float64(timeoutVal) * 3))
}
if customTimeout > 0 {
connectionConfiguration.Connection.CustomMaxTimeout = time.Duration(customTimeout) * time.Second
}
if request.Redirects || options.Options.FollowRedirects { if request.Redirects || options.Options.FollowRedirects {
connectionConfiguration.RedirectFlow = httpclientpool.FollowAllRedirect connectionConfiguration.RedirectFlow = httpclientpool.FollowAllRedirect
@ -369,6 +392,12 @@ func (request *Request) Compile(options *protocols.ExecutorOptions) error {
} }
} }
if request.Analyzer != nil {
if analyzer := analyzers.GetAnalyzer(request.Analyzer.Name); analyzer == nil {
return errors.Errorf("analyzer %s not found", request.Analyzer.Name)
}
}
// Resolve payload paths from vars if they exists // Resolve payload paths from vars if they exists
for name, payload := range request.options.Options.Vars.AsMap() { for name, payload := range request.options.Options.Vars.AsMap() {
payloadStr, ok := payload.(string) payloadStr, ok := payload.(string)

View File

@ -60,6 +60,9 @@ func Init(options *types.Options) error {
type ConnectionConfiguration struct { type ConnectionConfiguration struct {
// DisableKeepAlive of the connection // DisableKeepAlive of the connection
DisableKeepAlive bool DisableKeepAlive bool
// CustomMaxTimeout is the custom timeout for the connection
// This overrides all other timeouts and is used for accurate time based fuzzing.
CustomMaxTimeout time.Duration
cookiejar *cookiejar.Jar cookiejar *cookiejar.Jar
mu sync.RWMutex mu sync.RWMutex
} }
@ -135,6 +138,10 @@ func (c *Configuration) Hash() string {
builder.WriteString(strconv.FormatBool(c.DisableCookie)) builder.WriteString(strconv.FormatBool(c.DisableCookie))
builder.WriteString("c") builder.WriteString("c")
builder.WriteString(strconv.FormatBool(c.Connection != nil)) builder.WriteString(strconv.FormatBool(c.Connection != nil))
if c.Connection != nil && c.Connection.CustomMaxTimeout > 0 {
builder.WriteString("k")
builder.WriteString(c.Connection.CustomMaxTimeout.String())
}
builder.WriteString("r") builder.WriteString("r")
builder.WriteString(strconv.FormatInt(int64(c.ResponseHeaderTimeout.Seconds()), 10)) builder.WriteString(strconv.FormatInt(int64(c.ResponseHeaderTimeout.Seconds()), 10))
hash := builder.String() hash := builder.String()
@ -247,6 +254,9 @@ func wrappedGet(options *types.Options, configuration *Configuration) (*retryabl
if configuration.ResponseHeaderTimeout != 0 { if configuration.ResponseHeaderTimeout != 0 {
responseHeaderTimeout = configuration.ResponseHeaderTimeout responseHeaderTimeout = configuration.ResponseHeaderTimeout
} }
if configuration.Connection != nil && configuration.Connection.CustomMaxTimeout > 0 {
responseHeaderTimeout = configuration.Connection.CustomMaxTimeout
}
transport := &http.Transport{ transport := &http.Transport{
ForceAttemptHTTP2: options.ForceAttemptHTTP2, ForceAttemptHTTP2: options.ForceAttemptHTTP2,
@ -313,6 +323,9 @@ func wrappedGet(options *types.Options, configuration *Configuration) (*retryabl
} }
if !configuration.NoTimeout { if !configuration.NoTimeout {
httpclient.Timeout = options.GetTimeouts().HttpTimeout httpclient.Timeout = options.GetTimeouts().HttpTimeout
if configuration.Connection != nil && configuration.Connection.CustomMaxTimeout > 0 {
httpclient.Timeout = configuration.Connection.CustomMaxTimeout
}
} }
client := retryablehttp.NewWithHTTPClient(httpclient, retryableHttpOptions) client := retryablehttp.NewWithHTTPClient(httpclient, retryableHttpOptions)
if jar != nil { if jar != nil {

View File

@ -170,6 +170,10 @@ func (request *Request) MakeResultEventItem(wrapped *output.InternalWrappedEvent
if value, ok := wrapped.InternalEvent["global-matchers"]; ok { if value, ok := wrapped.InternalEvent["global-matchers"]; ok {
isGlobalMatchers = value.(bool) isGlobalMatchers = value.(bool)
} }
var analyzerDetails string
if value, ok := wrapped.InternalEvent["analyzer_details"]; ok {
analyzerDetails = value.(string)
}
data := &output.ResultEvent{ data := &output.ResultEvent{
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]), TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]), TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
@ -193,6 +197,7 @@ func (request *Request) MakeResultEventItem(wrapped *output.InternalWrappedEvent
CURLCommand: types.ToString(wrapped.InternalEvent["curl-command"]), CURLCommand: types.ToString(wrapped.InternalEvent["curl-command"]),
TemplateEncoded: request.options.EncodeTemplate(), TemplateEncoded: request.options.EncodeTemplate(),
Error: types.ToString(wrapped.InternalEvent["error"]), Error: types.ToString(wrapped.InternalEvent["error"]),
AnalyzerDetails: analyzerDetails,
} }
return data return data
} }

View File

@ -19,6 +19,7 @@ import (
"github.com/projectdiscovery/fastdialer/fastdialer" "github.com/projectdiscovery/fastdialer/fastdialer"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/analyzers"
"github.com/projectdiscovery/nuclei/v3/pkg/operators" "github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/projectdiscovery/nuclei/v3/pkg/output" "github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols" "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
@ -927,8 +928,26 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ
matchedURL = responseURL matchedURL = responseURL
} }
} }
finalEvent := make(output.InternalEvent) finalEvent := make(output.InternalEvent)
if request.Analyzer != nil {
analyzer := analyzers.GetAnalyzer(request.Analyzer.Name)
analysisMatched, analysisDetails, err := analyzer.Analyze(&analyzers.Options{
FuzzGenerated: generatedRequest.fuzzGeneratedRequest,
HttpClient: request.httpClient,
ResponseTimeDelay: duration,
AnalyzerParameters: request.Analyzer.Parameters,
})
if err != nil {
gologger.Warning().Msgf("Could not analyze response: %v\n", err)
}
if analysisMatched {
finalEvent["analyzer_details"] = analysisDetails
finalEvent["analyzer"] = true
}
}
outputEvent := request.responseToDSLMap(respChain.Response(), input.MetaInput.Input, matchedURL, convUtil.String(dumpedRequest), respChain.FullResponse().String(), respChain.Body().String(), respChain.Headers().String(), duration, generatedRequest.meta) outputEvent := request.responseToDSLMap(respChain.Response(), input.MetaInput.Input, matchedURL, convUtil.String(dumpedRequest), respChain.FullResponse().String(), respChain.Body().String(), respChain.Headers().String(), duration, generatedRequest.meta)
// add response fields to template context and merge templatectx variables to output event // add response fields to template context and merge templatectx variables to output event
request.options.AddTemplateVars(input.MetaInput, request.Type(), request.ID, outputEvent) request.options.AddTemplateVars(input.MetaInput, request.Type(), request.ID, outputEvent)

View File

@ -6,6 +6,7 @@ package http
// -> request.executeGeneratedFuzzingRequest [execute final generated fuzzing request and get result] // -> request.executeGeneratedFuzzingRequest [execute final generated fuzzing request and get result]
import ( import (
"context"
"fmt" "fmt"
"net/http" "net/http"
"strings" "strings"
@ -13,6 +14,7 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz" "github.com/projectdiscovery/nuclei/v3/pkg/fuzz"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/analyzers"
"github.com/projectdiscovery/nuclei/v3/pkg/operators" "github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/projectdiscovery/nuclei/v3/pkg/operators/matchers" "github.com/projectdiscovery/nuclei/v3/pkg/operators/matchers"
"github.com/projectdiscovery/nuclei/v3/pkg/output" "github.com/projectdiscovery/nuclei/v3/pkg/output"
@ -121,7 +123,7 @@ func (request *Request) executeAllFuzzingRules(input *contextargs.Context, value
default: default:
} }
err := rule.Execute(&fuzz.ExecuteRuleInput{ input := &fuzz.ExecuteRuleInput{
Input: input, Input: input,
DisplayFuzzPoints: request.options.Options.DisplayFuzzPoints, DisplayFuzzPoints: request.options.Options.DisplayFuzzPoints,
Callback: func(gr fuzz.GeneratedRequest) bool { Callback: func(gr fuzz.GeneratedRequest) bool {
@ -135,8 +137,14 @@ func (request *Request) executeAllFuzzingRules(input *contextargs.Context, value
return request.executeGeneratedFuzzingRequest(gr, input, callback) return request.executeGeneratedFuzzingRequest(gr, input, callback)
}, },
Values: values, Values: values,
BaseRequest: baseRequest.Clone(input.Context()), BaseRequest: baseRequest.Clone(context.TODO()),
}) }
if request.Analyzer != nil {
analyzer := analyzers.GetAnalyzer(request.Analyzer.Name)
input.ApplyPayloadInitialTransformation = analyzer.ApplyInitialTransformation
input.AnalyzerParams = request.Analyzer.Parameters
}
err := rule.Execute(input)
if err == nil { if err == nil {
applicable = true applicable = true
continue continue
@ -170,6 +178,7 @@ func (request *Request) executeGeneratedFuzzingRequest(gr fuzz.GeneratedRequest,
dynamicValues: gr.DynamicValues, dynamicValues: gr.DynamicValues,
interactshURLs: gr.InteractURLs, interactshURLs: gr.InteractURLs,
original: request, original: request,
fuzzGeneratedRequest: gr,
} }
var gotMatches bool var gotMatches bool
requestErr := request.executeRequest(input, req, gr.DynamicValues, hasInteractMatchers, func(event *output.InternalWrappedEvent) { requestErr := request.executeRequest(input, req, gr.DynamicValues, hasInteractMatchers, func(event *output.InternalWrappedEvent) {

View File

@ -13,13 +13,18 @@ type Exporter struct {
options *Options options *Options
mutex *sync.Mutex mutex *sync.Mutex
rows []output.ResultEvent rows []output.ResultEvent
outputFile *os.File
} }
// Options contains the configuration options for JSONL exporter client // Options contains the configuration options for JSONL exporter client
type Options struct { type Options struct {
// File is the file to export found JSONL result to // File is the file to export found JSONL result to
File string `yaml:"file"` File string `yaml:"file"`
// OmitRaw whether to exclude the raw request and response from the output
OmitRaw bool `yaml:"omit-raw"` OmitRaw bool `yaml:"omit-raw"`
// BatchSize the number of records to keep in memory before writing them out to the JSONL file or 0 to disable
// batching (default)
BatchSize int `yaml:"batch-size"`
} }
// New creates a new JSONL exporter integration client based on options. // New creates a new JSONL exporter integration client based on options.
@ -32,8 +37,7 @@ func New(options *Options) (*Exporter, error) {
return exporter, nil return exporter, nil
} }
// Export appends the passed result event to the list of objects to be exported to // Export appends the passed result event to the list of objects to be exported to the resulting JSONL file
// the resulting JSONL file
func (exporter *Exporter) Export(event *output.ResultEvent) error { func (exporter *Exporter) Export(event *output.ResultEvent) error {
exporter.mutex.Lock() exporter.mutex.Lock()
defer exporter.mutex.Unlock() defer exporter.mutex.Unlock()
@ -46,23 +50,36 @@ func (exporter *Exporter) Export(event *output.ResultEvent) error {
// Add the event to the rows // Add the event to the rows
exporter.rows = append(exporter.rows, *event) exporter.rows = append(exporter.rows, *event)
// If the batch size is greater than 0 and the number of rows has reached the batch, flush it to the database
if exporter.options.BatchSize > 0 && len(exporter.rows) >= exporter.options.BatchSize {
err := exporter.WriteRows()
if err != nil {
// The error is already logged, return it to bubble up to the caller
return err
}
}
return nil return nil
} }
// Close writes the in-memory data to the JSONL file specified by options.JSONLExport // WriteRows writes all rows from the rows list to JSONL file and removes them from the list
// and closes the exporter after operation func (exporter *Exporter) WriteRows() error {
func (exporter *Exporter) Close() error { // Open the file for writing if it's not already.
exporter.mutex.Lock() // This will recreate the file if it exists, but keep the file handle so that batched writes within the same
defer exporter.mutex.Unlock() // execution are appended to the same file.
var err error
if exporter.outputFile == nil {
// Open the JSONL file for writing and create it if it doesn't exist // Open the JSONL file for writing and create it if it doesn't exist
f, err := os.OpenFile(exporter.options.File, os.O_WRONLY|os.O_CREATE, 0644) exporter.outputFile, err = os.OpenFile(exporter.options.File, os.O_WRONLY|os.O_CREATE, 0644)
if err != nil { if err != nil {
return errors.Wrap(err, "failed to create JSONL file") return errors.Wrap(err, "failed to create JSONL file")
} }
}
// Loop through the rows and write them, removing them as they're entered
for len(exporter.rows) > 0 {
row := exporter.rows[0]
// Loop through the rows and convert each to a JSON byte array and write to file
for _, row := range exporter.rows {
// Convert the row to JSON byte array and append a trailing newline. This is treated as a single line in JSONL // Convert the row to JSON byte array and append a trailing newline. This is treated as a single line in JSONL
obj, err := json.Marshal(row) obj, err := json.Marshal(row)
if err != nil { if err != nil {
@ -70,16 +87,36 @@ func (exporter *Exporter) Close() error {
} }
// Add a trailing newline to the JSON byte array to confirm with the JSONL format // Add a trailing newline to the JSON byte array to confirm with the JSONL format
obj = append(obj, '\n') obj = append(obj, ',', '\n')
// Attempt to append the JSON line to file specified in options.JSONLExport // Attempt to append the JSON line to file specified in options.JSONLExport
if _, err = f.Write(obj); err != nil { if _, err = exporter.outputFile.Write(obj); err != nil {
return errors.Wrap(err, "failed to append JSONL line") return errors.Wrap(err, "failed to append JSONL line")
} }
// Remove the item from the list
exporter.rows = exporter.rows[1:]
}
return nil
}
// Close writes the in-memory data to the JSONL file specified by options.JSONLExport and closes the exporter after
// operation
func (exporter *Exporter) Close() error {
exporter.mutex.Lock()
defer exporter.mutex.Unlock()
// Write any remaining rows to the file
// Write all pending rows
err := exporter.WriteRows()
if err != nil {
// The error is already logged, return it to bubble up to the caller
return err
} }
// Close the file // Close the file
if err := f.Close(); err != nil { if err := exporter.outputFile.Close(); err != nil {
return errors.Wrap(err, "failed to close JSONL file") return errors.Wrap(err, "failed to close JSONL file")
} }

View File

@ -83,7 +83,7 @@ func CreateReportDescription(event *output.ResultEvent, formatter ResultFormatte
} }
} }
if len(event.ExtractedResults) > 0 || len(event.Metadata) > 0 { if len(event.ExtractedResults) > 0 || len(event.Metadata) > 0 || event.AnalyzerDetails != "" {
builder.WriteString("\n") builder.WriteString("\n")
builder.WriteString(formatter.MakeBold("Extra Information")) builder.WriteString(formatter.MakeBold("Extra Information"))
builder.WriteString("\n\n") builder.WriteString("\n\n")
@ -99,6 +99,13 @@ func CreateReportDescription(event *output.ResultEvent, formatter ResultFormatte
} }
builder.WriteString("\n") builder.WriteString("\n")
} }
if event.AnalyzerDetails != "" {
builder.WriteString(formatter.MakeBold("Analyzer Details:"))
builder.WriteString("\n\n")
builder.WriteString(event.AnalyzerDetails)
builder.WriteString("\n")
}
if len(event.Metadata) > 0 { if len(event.Metadata) > 0 {
builder.WriteString(formatter.MakeBold("Metadata:")) builder.WriteString(formatter.MakeBold("Metadata:"))
builder.WriteString("\n\n") builder.WriteString("\n\n")

View File

@ -197,7 +197,7 @@ func New(options *Options, db string, doNotDedupe bool) (Client, error) {
return client, nil return client, nil
} }
// CreateConfigIfNotExists creates report-config if it doesn't exists // CreateConfigIfNotExists creates report-config if it doesn't exist
func CreateConfigIfNotExists() error { func CreateConfigIfNotExists() error {
reportingConfig := config.DefaultConfig.GetReportingConfigFilePath() reportingConfig := config.DefaultConfig.GetReportingConfigFilePath()

View File

@ -8,6 +8,7 @@ import (
"sync" "sync"
"github.com/andygrunwald/go-jira" "github.com/andygrunwald/go-jira"
"github.com/pkg/errors"
"github.com/trivago/tgo/tcontainer" "github.com/trivago/tgo/tcontainer"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
@ -241,18 +242,22 @@ func (i *Integration) CreateIssue(event *output.ResultEvent) (*filters.CreateIss
if i.options.UpdateExisting { if i.options.UpdateExisting {
issue, err := i.FindExistingIssue(event) issue, err := i.FindExistingIssue(event)
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "could not find existing issue")
} else if issue.ID != "" { } else if issue.ID != "" {
_, _, err = i.jira.Issue.AddComment(issue.ID, &jira.Comment{ _, _, err = i.jira.Issue.AddComment(issue.ID, &jira.Comment{
Body: format.CreateReportDescription(event, i, i.options.OmitRaw), Body: format.CreateReportDescription(event, i, i.options.OmitRaw),
}) })
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "could not add comment to existing issue")
} }
return getIssueResponseFromJira(&issue) return getIssueResponseFromJira(&issue)
} }
} }
return i.CreateNewIssue(event) resp, err := i.CreateNewIssue(event)
if err != nil {
return nil, errors.Wrap(err, "could not create new issue")
}
return resp, nil
} }
func (i *Integration) CloseIssue(event *output.ResultEvent) error { func (i *Integration) CloseIssue(event *output.ResultEvent) error {
@ -297,7 +302,11 @@ func (i *Integration) CloseIssue(event *output.ResultEvent) error {
// FindExistingIssue checks if the issue already exists and returns its ID // FindExistingIssue checks if the issue already exists and returns its ID
func (i *Integration) FindExistingIssue(event *output.ResultEvent) (jira.Issue, error) { func (i *Integration) FindExistingIssue(event *output.ResultEvent) (jira.Issue, error) {
template := format.GetMatchedTemplateName(event) template := format.GetMatchedTemplateName(event)
jql := fmt.Sprintf("summary ~ \"%s\" AND summary ~ \"%s\" AND status != \"%s\" AND project = \"%s\"", template, event.Host, i.options.StatusNot, i.options.ProjectName) project := i.options.ProjectName
if i.options.ProjectID != "" {
project = i.options.ProjectID
}
jql := fmt.Sprintf("summary ~ \"%s\" AND summary ~ \"%s\" AND status != \"%s\" AND project = \"%s\"", template, event.Host, i.options.StatusNot, project)
searchOptions := &jira.SearchOptions{ searchOptions := &jira.SearchOptions{
MaxResults: 1, // if any issue exists, then we won't create a new one MaxResults: 1, // if any issue exists, then we won't create a new one

View File

@ -9,3 +9,6 @@ func AddScanEvent(event ScanEvent) {
func InitWithConfig(config *ScanConfig, statsDirectory string) { func InitWithConfig(config *ScanConfig, statsDirectory string) {
} }
func Close() {
}

View File

@ -23,6 +23,7 @@ type ScanStatsWorker struct {
config *ScanConfig config *ScanConfig
m *sync.Mutex m *sync.Mutex
directory string directory string
file *os.File
enc *json.Encoder enc *json.Encoder
} }
@ -56,7 +57,7 @@ func (s *ScanStatsWorker) initEventsFile() error {
if err != nil { if err != nil {
return err return err
} }
defer f.Close() s.file = f
s.enc = json.NewEncoder(f) s.enc = json.NewEncoder(f)
return nil return nil
} }
@ -79,3 +80,22 @@ func AddScanEvent(event ScanEvent) {
} }
defaultWorker.AddScanEvent(event) defaultWorker.AddScanEvent(event)
} }
// Close closes the file associated with the worker
func (s *ScanStatsWorker) Close() {
s.m.Lock()
defer s.m.Unlock()
if s.file != nil {
_ = s.file.Close()
s.file = nil
}
}
// Close closes the file associated with the worker
func Close() {
if defaultWorker == nil {
return
}
defaultWorker.Close()
}

View File

@ -10,5 +10,7 @@ const (
ExcludedCodeTmplStats = "code-flag-missing-warnings" ExcludedCodeTmplStats = "code-flag-missing-warnings"
ExludedDastTmplStats = "fuzz-flag-missing-warnings" ExludedDastTmplStats = "fuzz-flag-missing-warnings"
SkippedUnsignedStats = "skipped-unsigned-stats" // tracks loading of unsigned templates SkippedUnsignedStats = "skipped-unsigned-stats" // tracks loading of unsigned templates
ExcludedSelfContainedStats = "excluded-self-contained-stats"
ExcludedFileStats = "excluded-file-stats"
SkippedRequestSignatureStats = "skipped-request-signature-stats" SkippedRequestSignatureStats = "skipped-request-signature-stats"
) )

View File

@ -9,6 +9,8 @@ func init() {
stats.NewEntry(SkippedCodeTmplTamperedStats, "Found %d unsigned or tampered code template (carefully examine before using it & use -sign flag to sign them)") stats.NewEntry(SkippedCodeTmplTamperedStats, "Found %d unsigned or tampered code template (carefully examine before using it & use -sign flag to sign them)")
stats.NewEntry(ExcludedHeadlessTmplStats, "Excluded %d headless template[s] (disabled as default), use -headless option to run headless templates.") stats.NewEntry(ExcludedHeadlessTmplStats, "Excluded %d headless template[s] (disabled as default), use -headless option to run headless templates.")
stats.NewEntry(ExcludedCodeTmplStats, "Excluded %d code template[s] (disabled as default), use -code option to run code templates.") stats.NewEntry(ExcludedCodeTmplStats, "Excluded %d code template[s] (disabled as default), use -code option to run code templates.")
stats.NewEntry(ExcludedSelfContainedStats, "Excluded %d self-contained template[s] (disabled as default), use -esc option to run self-contained templates.")
stats.NewEntry(ExcludedFileStats, "Excluded %d file template[s] (disabled as default), use -file option to run file templates.")
stats.NewEntry(TemplatesExcludedStats, "Excluded %d template[s] with known weak matchers / tags excluded from default run using .nuclei-ignore") stats.NewEntry(TemplatesExcludedStats, "Excluded %d template[s] with known weak matchers / tags excluded from default run using .nuclei-ignore")
stats.NewEntry(ExludedDastTmplStats, "Excluded %d dast template[s] (disabled as default), use -dast option to run dast templates.") stats.NewEntry(ExludedDastTmplStats, "Excluded %d dast template[s] (disabled as default), use -dast option to run dast templates.")
stats.NewEntry(SkippedUnsignedStats, "Skipping %d unsigned template[s]") stats.NewEntry(SkippedUnsignedStats, "Skipping %d unsigned template[s]")

View File

@ -555,3 +555,8 @@ func (template *Template) UnmarshalJSON(data []byte) error {
} }
return nil return nil
} }
// HasFileProtocol returns true if the template has a file protocol section
func (template *Template) HasFileProtocol() bool {
return len(template.RequestsFile) > 0
}

View File

@ -20,6 +20,7 @@ var (
HTTPMethodTypeHolderDoc encoder.Doc HTTPMethodTypeHolderDoc encoder.Doc
FUZZRuleDoc encoder.Doc FUZZRuleDoc encoder.Doc
SliceOrMapSliceDoc encoder.Doc SliceOrMapSliceDoc encoder.Doc
ANALYZERSAnalyzerTemplateDoc encoder.Doc
SignatureTypeHolderDoc encoder.Doc SignatureTypeHolderDoc encoder.Doc
MATCHERSMatcherDoc encoder.Doc MATCHERSMatcherDoc encoder.Doc
MatcherTypeHolderDoc encoder.Doc MatcherTypeHolderDoc encoder.Doc
@ -459,7 +460,7 @@ func init() {
Value: "HTTP response headers in name:value format", Value: "HTTP response headers in name:value format",
}, },
} }
HTTPRequestDoc.Fields = make([]encoder.Doc, 37) HTTPRequestDoc.Fields = make([]encoder.Doc, 38)
HTTPRequestDoc.Fields[0].Name = "path" HTTPRequestDoc.Fields[0].Name = "path"
HTTPRequestDoc.Fields[0].Type = "[]string" HTTPRequestDoc.Fields[0].Type = "[]string"
HTTPRequestDoc.Fields[0].Note = "" HTTPRequestDoc.Fields[0].Note = ""
@ -565,114 +566,119 @@ func init() {
HTTPRequestDoc.Fields[15].Note = "" HTTPRequestDoc.Fields[15].Note = ""
HTTPRequestDoc.Fields[15].Description = "Fuzzing describes schema to fuzz http requests" HTTPRequestDoc.Fields[15].Description = "Fuzzing describes schema to fuzz http requests"
HTTPRequestDoc.Fields[15].Comments[encoder.LineComment] = " Fuzzing describes schema to fuzz http requests" HTTPRequestDoc.Fields[15].Comments[encoder.LineComment] = " Fuzzing describes schema to fuzz http requests"
HTTPRequestDoc.Fields[16].Name = "self-contained" HTTPRequestDoc.Fields[16].Name = "analyzer"
HTTPRequestDoc.Fields[16].Type = "bool" HTTPRequestDoc.Fields[16].Type = "analyzers.AnalyzerTemplate"
HTTPRequestDoc.Fields[16].Note = "" HTTPRequestDoc.Fields[16].Note = ""
HTTPRequestDoc.Fields[16].Description = "SelfContained specifies if the request is self-contained." HTTPRequestDoc.Fields[16].Description = "Analyzer is an analyzer to use for matching the response."
HTTPRequestDoc.Fields[16].Comments[encoder.LineComment] = "SelfContained specifies if the request is self-contained." HTTPRequestDoc.Fields[16].Comments[encoder.LineComment] = "Analyzer is an analyzer to use for matching the response."
HTTPRequestDoc.Fields[17].Name = "signature" HTTPRequestDoc.Fields[17].Name = "self-contained"
HTTPRequestDoc.Fields[17].Type = "SignatureTypeHolder" HTTPRequestDoc.Fields[17].Type = "bool"
HTTPRequestDoc.Fields[17].Note = "" HTTPRequestDoc.Fields[17].Note = ""
HTTPRequestDoc.Fields[17].Description = "Signature is the request signature method" HTTPRequestDoc.Fields[17].Description = "SelfContained specifies if the request is self-contained."
HTTPRequestDoc.Fields[17].Comments[encoder.LineComment] = "Signature is the request signature method" HTTPRequestDoc.Fields[17].Comments[encoder.LineComment] = "SelfContained specifies if the request is self-contained."
HTTPRequestDoc.Fields[17].Values = []string{ HTTPRequestDoc.Fields[18].Name = "signature"
HTTPRequestDoc.Fields[18].Type = "SignatureTypeHolder"
HTTPRequestDoc.Fields[18].Note = ""
HTTPRequestDoc.Fields[18].Description = "Signature is the request signature method"
HTTPRequestDoc.Fields[18].Comments[encoder.LineComment] = "Signature is the request signature method"
HTTPRequestDoc.Fields[18].Values = []string{
"AWS", "AWS",
} }
HTTPRequestDoc.Fields[18].Name = "skip-secret-file" HTTPRequestDoc.Fields[19].Name = "skip-secret-file"
HTTPRequestDoc.Fields[18].Type = "bool"
HTTPRequestDoc.Fields[18].Note = ""
HTTPRequestDoc.Fields[18].Description = "SkipSecretFile skips the authentication or authorization configured in the secret file."
HTTPRequestDoc.Fields[18].Comments[encoder.LineComment] = "SkipSecretFile skips the authentication or authorization configured in the secret file."
HTTPRequestDoc.Fields[19].Name = "cookie-reuse"
HTTPRequestDoc.Fields[19].Type = "bool" HTTPRequestDoc.Fields[19].Type = "bool"
HTTPRequestDoc.Fields[19].Note = "" HTTPRequestDoc.Fields[19].Note = ""
HTTPRequestDoc.Fields[19].Description = "CookieReuse is an optional setting that enables cookie reuse for\nall requests defined in raw section." HTTPRequestDoc.Fields[19].Description = "SkipSecretFile skips the authentication or authorization configured in the secret file."
HTTPRequestDoc.Fields[19].Comments[encoder.LineComment] = "CookieReuse is an optional setting that enables cookie reuse for" HTTPRequestDoc.Fields[19].Comments[encoder.LineComment] = "SkipSecretFile skips the authentication or authorization configured in the secret file."
HTTPRequestDoc.Fields[20].Name = "disable-cookie" HTTPRequestDoc.Fields[20].Name = "cookie-reuse"
HTTPRequestDoc.Fields[20].Type = "bool" HTTPRequestDoc.Fields[20].Type = "bool"
HTTPRequestDoc.Fields[20].Note = "" HTTPRequestDoc.Fields[20].Note = ""
HTTPRequestDoc.Fields[20].Description = "DisableCookie is an optional setting that disables cookie reuse" HTTPRequestDoc.Fields[20].Description = "CookieReuse is an optional setting that enables cookie reuse for\nall requests defined in raw section."
HTTPRequestDoc.Fields[20].Comments[encoder.LineComment] = "DisableCookie is an optional setting that disables cookie reuse" HTTPRequestDoc.Fields[20].Comments[encoder.LineComment] = "CookieReuse is an optional setting that enables cookie reuse for"
HTTPRequestDoc.Fields[21].Name = "read-all" HTTPRequestDoc.Fields[21].Name = "disable-cookie"
HTTPRequestDoc.Fields[21].Type = "bool" HTTPRequestDoc.Fields[21].Type = "bool"
HTTPRequestDoc.Fields[21].Note = "" HTTPRequestDoc.Fields[21].Note = ""
HTTPRequestDoc.Fields[21].Description = "Enables force reading of the entire raw unsafe request body ignoring\nany specified content length headers." HTTPRequestDoc.Fields[21].Description = "DisableCookie is an optional setting that disables cookie reuse"
HTTPRequestDoc.Fields[21].Comments[encoder.LineComment] = "Enables force reading of the entire raw unsafe request body ignoring" HTTPRequestDoc.Fields[21].Comments[encoder.LineComment] = "DisableCookie is an optional setting that disables cookie reuse"
HTTPRequestDoc.Fields[22].Name = "redirects" HTTPRequestDoc.Fields[22].Name = "read-all"
HTTPRequestDoc.Fields[22].Type = "bool" HTTPRequestDoc.Fields[22].Type = "bool"
HTTPRequestDoc.Fields[22].Note = "" HTTPRequestDoc.Fields[22].Note = ""
HTTPRequestDoc.Fields[22].Description = "Redirects specifies whether redirects should be followed by the HTTP Client.\n\nThis can be used in conjunction with `max-redirects` to control the HTTP request redirects." HTTPRequestDoc.Fields[22].Description = "Enables force reading of the entire raw unsafe request body ignoring\nany specified content length headers."
HTTPRequestDoc.Fields[22].Comments[encoder.LineComment] = "Redirects specifies whether redirects should be followed by the HTTP Client." HTTPRequestDoc.Fields[22].Comments[encoder.LineComment] = "Enables force reading of the entire raw unsafe request body ignoring"
HTTPRequestDoc.Fields[23].Name = "host-redirects" HTTPRequestDoc.Fields[23].Name = "redirects"
HTTPRequestDoc.Fields[23].Type = "bool" HTTPRequestDoc.Fields[23].Type = "bool"
HTTPRequestDoc.Fields[23].Note = "" HTTPRequestDoc.Fields[23].Note = ""
HTTPRequestDoc.Fields[23].Description = "Redirects specifies whether only redirects to the same host should be followed by the HTTP Client.\n\nThis can be used in conjunction with `max-redirects` to control the HTTP request redirects." HTTPRequestDoc.Fields[23].Description = "Redirects specifies whether redirects should be followed by the HTTP Client.\n\nThis can be used in conjunction with `max-redirects` to control the HTTP request redirects."
HTTPRequestDoc.Fields[23].Comments[encoder.LineComment] = "Redirects specifies whether only redirects to the same host should be followed by the HTTP Client." HTTPRequestDoc.Fields[23].Comments[encoder.LineComment] = "Redirects specifies whether redirects should be followed by the HTTP Client."
HTTPRequestDoc.Fields[24].Name = "pipeline" HTTPRequestDoc.Fields[24].Name = "host-redirects"
HTTPRequestDoc.Fields[24].Type = "bool" HTTPRequestDoc.Fields[24].Type = "bool"
HTTPRequestDoc.Fields[24].Note = "" HTTPRequestDoc.Fields[24].Note = ""
HTTPRequestDoc.Fields[24].Description = "Pipeline defines if the attack should be performed with HTTP 1.1 Pipelining\n\nAll requests must be idempotent (GET/POST). This can be used for race conditions/billions requests." HTTPRequestDoc.Fields[24].Description = "Redirects specifies whether only redirects to the same host should be followed by the HTTP Client.\n\nThis can be used in conjunction with `max-redirects` to control the HTTP request redirects."
HTTPRequestDoc.Fields[24].Comments[encoder.LineComment] = "Pipeline defines if the attack should be performed with HTTP 1.1 Pipelining" HTTPRequestDoc.Fields[24].Comments[encoder.LineComment] = "Redirects specifies whether only redirects to the same host should be followed by the HTTP Client."
HTTPRequestDoc.Fields[25].Name = "unsafe" HTTPRequestDoc.Fields[25].Name = "pipeline"
HTTPRequestDoc.Fields[25].Type = "bool" HTTPRequestDoc.Fields[25].Type = "bool"
HTTPRequestDoc.Fields[25].Note = "" HTTPRequestDoc.Fields[25].Note = ""
HTTPRequestDoc.Fields[25].Description = "Unsafe specifies whether to use rawhttp engine for sending Non RFC-Compliant requests.\n\nThis uses the [rawhttp](https://github.com/projectdiscovery/rawhttp) engine to achieve complete\ncontrol over the request, with no normalization performed by the client." HTTPRequestDoc.Fields[25].Description = "Pipeline defines if the attack should be performed with HTTP 1.1 Pipelining\n\nAll requests must be idempotent (GET/POST). This can be used for race conditions/billions requests."
HTTPRequestDoc.Fields[25].Comments[encoder.LineComment] = "Unsafe specifies whether to use rawhttp engine for sending Non RFC-Compliant requests." HTTPRequestDoc.Fields[25].Comments[encoder.LineComment] = "Pipeline defines if the attack should be performed with HTTP 1.1 Pipelining"
HTTPRequestDoc.Fields[26].Name = "race" HTTPRequestDoc.Fields[26].Name = "unsafe"
HTTPRequestDoc.Fields[26].Type = "bool" HTTPRequestDoc.Fields[26].Type = "bool"
HTTPRequestDoc.Fields[26].Note = "" HTTPRequestDoc.Fields[26].Note = ""
HTTPRequestDoc.Fields[26].Description = "Race determines if all the request have to be attempted at the same time (Race Condition)\n\nThe actual number of requests that will be sent is determined by the `race_count` field." HTTPRequestDoc.Fields[26].Description = "Unsafe specifies whether to use rawhttp engine for sending Non RFC-Compliant requests.\n\nThis uses the [rawhttp](https://github.com/projectdiscovery/rawhttp) engine to achieve complete\ncontrol over the request, with no normalization performed by the client."
HTTPRequestDoc.Fields[26].Comments[encoder.LineComment] = "Race determines if all the request have to be attempted at the same time (Race Condition)" HTTPRequestDoc.Fields[26].Comments[encoder.LineComment] = "Unsafe specifies whether to use rawhttp engine for sending Non RFC-Compliant requests."
HTTPRequestDoc.Fields[27].Name = "req-condition" HTTPRequestDoc.Fields[27].Name = "race"
HTTPRequestDoc.Fields[27].Type = "bool" HTTPRequestDoc.Fields[27].Type = "bool"
HTTPRequestDoc.Fields[27].Note = "" HTTPRequestDoc.Fields[27].Note = ""
HTTPRequestDoc.Fields[27].Description = "ReqCondition automatically assigns numbers to requests and preserves their history.\n\nThis allows matching on them later for multi-request conditions." HTTPRequestDoc.Fields[27].Description = "Race determines if all the request have to be attempted at the same time (Race Condition)\n\nThe actual number of requests that will be sent is determined by the `race_count` field."
HTTPRequestDoc.Fields[27].Comments[encoder.LineComment] = "ReqCondition automatically assigns numbers to requests and preserves their history." HTTPRequestDoc.Fields[27].Comments[encoder.LineComment] = "Race determines if all the request have to be attempted at the same time (Race Condition)"
HTTPRequestDoc.Fields[28].Name = "stop-at-first-match" HTTPRequestDoc.Fields[28].Name = "req-condition"
HTTPRequestDoc.Fields[28].Type = "bool" HTTPRequestDoc.Fields[28].Type = "bool"
HTTPRequestDoc.Fields[28].Note = "" HTTPRequestDoc.Fields[28].Note = ""
HTTPRequestDoc.Fields[28].Description = "StopAtFirstMatch stops the execution of the requests and template as soon as a match is found." HTTPRequestDoc.Fields[28].Description = "ReqCondition automatically assigns numbers to requests and preserves their history.\n\nThis allows matching on them later for multi-request conditions."
HTTPRequestDoc.Fields[28].Comments[encoder.LineComment] = "StopAtFirstMatch stops the execution of the requests and template as soon as a match is found." HTTPRequestDoc.Fields[28].Comments[encoder.LineComment] = "ReqCondition automatically assigns numbers to requests and preserves their history."
HTTPRequestDoc.Fields[29].Name = "skip-variables-check" HTTPRequestDoc.Fields[29].Name = "stop-at-first-match"
HTTPRequestDoc.Fields[29].Type = "bool" HTTPRequestDoc.Fields[29].Type = "bool"
HTTPRequestDoc.Fields[29].Note = "" HTTPRequestDoc.Fields[29].Note = ""
HTTPRequestDoc.Fields[29].Description = "SkipVariablesCheck skips the check for unresolved variables in request" HTTPRequestDoc.Fields[29].Description = "StopAtFirstMatch stops the execution of the requests and template as soon as a match is found."
HTTPRequestDoc.Fields[29].Comments[encoder.LineComment] = "SkipVariablesCheck skips the check for unresolved variables in request" HTTPRequestDoc.Fields[29].Comments[encoder.LineComment] = "StopAtFirstMatch stops the execution of the requests and template as soon as a match is found."
HTTPRequestDoc.Fields[30].Name = "iterate-all" HTTPRequestDoc.Fields[30].Name = "skip-variables-check"
HTTPRequestDoc.Fields[30].Type = "bool" HTTPRequestDoc.Fields[30].Type = "bool"
HTTPRequestDoc.Fields[30].Note = "" HTTPRequestDoc.Fields[30].Note = ""
HTTPRequestDoc.Fields[30].Description = "IterateAll iterates all the values extracted from internal extractors" HTTPRequestDoc.Fields[30].Description = "SkipVariablesCheck skips the check for unresolved variables in request"
HTTPRequestDoc.Fields[30].Comments[encoder.LineComment] = "IterateAll iterates all the values extracted from internal extractors" HTTPRequestDoc.Fields[30].Comments[encoder.LineComment] = "SkipVariablesCheck skips the check for unresolved variables in request"
HTTPRequestDoc.Fields[31].Name = "digest-username" HTTPRequestDoc.Fields[31].Name = "iterate-all"
HTTPRequestDoc.Fields[31].Type = "string" HTTPRequestDoc.Fields[31].Type = "bool"
HTTPRequestDoc.Fields[31].Note = "" HTTPRequestDoc.Fields[31].Note = ""
HTTPRequestDoc.Fields[31].Description = "DigestAuthUsername specifies the username for digest authentication" HTTPRequestDoc.Fields[31].Description = "IterateAll iterates all the values extracted from internal extractors"
HTTPRequestDoc.Fields[31].Comments[encoder.LineComment] = "DigestAuthUsername specifies the username for digest authentication" HTTPRequestDoc.Fields[31].Comments[encoder.LineComment] = "IterateAll iterates all the values extracted from internal extractors"
HTTPRequestDoc.Fields[32].Name = "digest-password" HTTPRequestDoc.Fields[32].Name = "digest-username"
HTTPRequestDoc.Fields[32].Type = "string" HTTPRequestDoc.Fields[32].Type = "string"
HTTPRequestDoc.Fields[32].Note = "" HTTPRequestDoc.Fields[32].Note = ""
HTTPRequestDoc.Fields[32].Description = "DigestAuthPassword specifies the password for digest authentication" HTTPRequestDoc.Fields[32].Description = "DigestAuthUsername specifies the username for digest authentication"
HTTPRequestDoc.Fields[32].Comments[encoder.LineComment] = "DigestAuthPassword specifies the password for digest authentication" HTTPRequestDoc.Fields[32].Comments[encoder.LineComment] = "DigestAuthUsername specifies the username for digest authentication"
HTTPRequestDoc.Fields[33].Name = "disable-path-automerge" HTTPRequestDoc.Fields[33].Name = "digest-password"
HTTPRequestDoc.Fields[33].Type = "bool" HTTPRequestDoc.Fields[33].Type = "string"
HTTPRequestDoc.Fields[33].Note = "" HTTPRequestDoc.Fields[33].Note = ""
HTTPRequestDoc.Fields[33].Description = "DisablePathAutomerge disables merging target url path with raw request path" HTTPRequestDoc.Fields[33].Description = "DigestAuthPassword specifies the password for digest authentication"
HTTPRequestDoc.Fields[33].Comments[encoder.LineComment] = "DisablePathAutomerge disables merging target url path with raw request path" HTTPRequestDoc.Fields[33].Comments[encoder.LineComment] = "DigestAuthPassword specifies the password for digest authentication"
HTTPRequestDoc.Fields[34].Name = "pre-condition" HTTPRequestDoc.Fields[34].Name = "disable-path-automerge"
HTTPRequestDoc.Fields[34].Type = "[]matchers.Matcher" HTTPRequestDoc.Fields[34].Type = "bool"
HTTPRequestDoc.Fields[34].Note = "" HTTPRequestDoc.Fields[34].Note = ""
HTTPRequestDoc.Fields[34].Description = "Fuzz PreCondition is matcher-like field to check if fuzzing should be performed on this request or not" HTTPRequestDoc.Fields[34].Description = "DisablePathAutomerge disables merging target url path with raw request path"
HTTPRequestDoc.Fields[34].Comments[encoder.LineComment] = "Fuzz PreCondition is matcher-like field to check if fuzzing should be performed on this request or not" HTTPRequestDoc.Fields[34].Comments[encoder.LineComment] = "DisablePathAutomerge disables merging target url path with raw request path"
HTTPRequestDoc.Fields[35].Name = "pre-condition-operator" HTTPRequestDoc.Fields[35].Name = "pre-condition"
HTTPRequestDoc.Fields[35].Type = "string" HTTPRequestDoc.Fields[35].Type = "[]matchers.Matcher"
HTTPRequestDoc.Fields[35].Note = "" HTTPRequestDoc.Fields[35].Note = ""
HTTPRequestDoc.Fields[35].Description = "FuzzPreConditionOperator is the operator between multiple PreConditions for fuzzing Default is OR" HTTPRequestDoc.Fields[35].Description = "Fuzz PreCondition is matcher-like field to check if fuzzing should be performed on this request or not"
HTTPRequestDoc.Fields[35].Comments[encoder.LineComment] = "FuzzPreConditionOperator is the operator between multiple PreConditions for fuzzing Default is OR" HTTPRequestDoc.Fields[35].Comments[encoder.LineComment] = "Fuzz PreCondition is matcher-like field to check if fuzzing should be performed on this request or not"
HTTPRequestDoc.Fields[36].Name = "global-matchers" HTTPRequestDoc.Fields[36].Name = "pre-condition-operator"
HTTPRequestDoc.Fields[36].Type = "bool" HTTPRequestDoc.Fields[36].Type = "string"
HTTPRequestDoc.Fields[36].Note = "" HTTPRequestDoc.Fields[36].Note = ""
HTTPRequestDoc.Fields[36].Description = "GlobalMatchers marks matchers as static and applies globally to all result events from other templates" HTTPRequestDoc.Fields[36].Description = "FuzzPreConditionOperator is the operator between multiple PreConditions for fuzzing Default is OR"
HTTPRequestDoc.Fields[36].Comments[encoder.LineComment] = "GlobalMatchers marks matchers as static and applies globally to all result events from other templates" HTTPRequestDoc.Fields[36].Comments[encoder.LineComment] = "FuzzPreConditionOperator is the operator between multiple PreConditions for fuzzing Default is OR"
HTTPRequestDoc.Fields[37].Name = "global-matchers"
HTTPRequestDoc.Fields[37].Type = "bool"
HTTPRequestDoc.Fields[37].Note = ""
HTTPRequestDoc.Fields[37].Description = "GlobalMatchers marks matchers as static and applies globally to all result events from other templates"
HTTPRequestDoc.Fields[37].Comments[encoder.LineComment] = "GlobalMatchers marks matchers as static and applies globally to all result events from other templates"
GENERATORSAttackTypeHolderDoc.Type = "generators.AttackTypeHolder" GENERATORSAttackTypeHolderDoc.Type = "generators.AttackTypeHolder"
GENERATORSAttackTypeHolderDoc.Comments[encoder.LineComment] = " AttackTypeHolder is used to hold internal type of the protocol" GENERATORSAttackTypeHolderDoc.Comments[encoder.LineComment] = " AttackTypeHolder is used to hold internal type of the protocol"
@ -847,6 +853,30 @@ func init() {
} }
SliceOrMapSliceDoc.Fields = make([]encoder.Doc, 0) SliceOrMapSliceDoc.Fields = make([]encoder.Doc, 0)
ANALYZERSAnalyzerTemplateDoc.Type = "analyzers.AnalyzerTemplate"
ANALYZERSAnalyzerTemplateDoc.Comments[encoder.LineComment] = " AnalyzerTemplate is the template for the analyzer"
ANALYZERSAnalyzerTemplateDoc.Description = "AnalyzerTemplate is the template for the analyzer"
ANALYZERSAnalyzerTemplateDoc.AppearsIn = []encoder.Appearance{
{
TypeName: "http.Request",
FieldName: "analyzer",
},
}
ANALYZERSAnalyzerTemplateDoc.Fields = make([]encoder.Doc, 2)
ANALYZERSAnalyzerTemplateDoc.Fields[0].Name = "name"
ANALYZERSAnalyzerTemplateDoc.Fields[0].Type = "string"
ANALYZERSAnalyzerTemplateDoc.Fields[0].Note = ""
ANALYZERSAnalyzerTemplateDoc.Fields[0].Description = "Name is the name of the analyzer to use"
ANALYZERSAnalyzerTemplateDoc.Fields[0].Comments[encoder.LineComment] = "Name is the name of the analyzer to use"
ANALYZERSAnalyzerTemplateDoc.Fields[0].Values = []string{
"time_delay",
}
ANALYZERSAnalyzerTemplateDoc.Fields[1].Name = "parameters"
ANALYZERSAnalyzerTemplateDoc.Fields[1].Type = "map[string]interface{}"
ANALYZERSAnalyzerTemplateDoc.Fields[1].Note = ""
ANALYZERSAnalyzerTemplateDoc.Fields[1].Description = "Parameters is the parameters for the analyzer\n\nParameters are different for each analyzer. For example, you can customize\ntime_delay analyzer with sleep_duration, time_slope_error_range, etc. Refer\nto the docs for each analyzer to get an idea about parameters."
ANALYZERSAnalyzerTemplateDoc.Fields[1].Comments[encoder.LineComment] = "Parameters is the parameters for the analyzer"
SignatureTypeHolderDoc.Type = "SignatureTypeHolder" SignatureTypeHolderDoc.Type = "SignatureTypeHolder"
SignatureTypeHolderDoc.Comments[encoder.LineComment] = " SignatureTypeHolder is used to hold internal type of the signature" SignatureTypeHolderDoc.Comments[encoder.LineComment] = " SignatureTypeHolder is used to hold internal type of the signature"
SignatureTypeHolderDoc.Description = "SignatureTypeHolder is used to hold internal type of the signature" SignatureTypeHolderDoc.Description = "SignatureTypeHolder is used to hold internal type of the signature"
@ -2127,6 +2157,7 @@ func GetTemplateDoc() *encoder.FileDoc {
&HTTPMethodTypeHolderDoc, &HTTPMethodTypeHolderDoc,
&FUZZRuleDoc, &FUZZRuleDoc,
&SliceOrMapSliceDoc, &SliceOrMapSliceDoc,
&ANALYZERSAnalyzerTemplateDoc,
&SignatureTypeHolderDoc, &SignatureTypeHolderDoc,
&MATCHERSMatcherDoc, &MATCHERSMatcherDoc,
&MatcherTypeHolderDoc, &MatcherTypeHolderDoc,

View File

@ -137,7 +137,7 @@ func TestFlowWithConditionPositive(t *testing.T) {
err = Template.Executer.Compile() err = Template.Executer.Compile()
require.Nil(t, err, "could not compile template") require.Nil(t, err, "could not compile template")
input := contextargs.NewWithInput(context.Background(), "blog.projectdiscovery.io") input := contextargs.NewWithInput(context.Background(), "cloud.projectdiscovery.io")
ctx := scan.NewScanContext(context.Background(), input) ctx := scan.NewScanContext(context.Background(), input)
// positive match . expect results also verify that both dns() and http() were executed // positive match . expect results also verify that both dns() and http() were executed
gotresults, err := Template.Executer.Execute(ctx) gotresults, err := Template.Executer.Execute(ctx)
@ -150,7 +150,22 @@ func TestFlowWithNoMatchers(t *testing.T) {
// when using conditional flow with no matchers at all // when using conditional flow with no matchers at all
// we implicitly assume that request was successful and internally changed the result to true (for scope of condition only) // we implicitly assume that request was successful and internally changed the result to true (for scope of condition only)
// testcase-1 : no matchers but contains extractor Template, err := templates.Parse("testcases/condition-flow-no-operators.yaml", nil, executerOpts)
require.Nil(t, err, "could not parse template")
require.True(t, Template.Flow != "", "not a flow template") // this is classifer if template is flow or not
err = Template.Executer.Compile()
require.Nil(t, err, "could not compile template")
anotherInput := contextargs.NewWithInput(context.Background(), "cloud.projectdiscovery.io")
anotherCtx := scan.NewScanContext(context.Background(), anotherInput)
// positive match . expect results also verify that both dns() and http() were executed
gotresults, err := Template.Executer.Execute(anotherCtx)
require.Nil(t, err, "could not execute template")
require.True(t, gotresults)
t.Run("Contains Extractor", func(t *testing.T) {
Template, err := templates.Parse("testcases/condition-flow-extractors.yaml", nil, executerOpts) Template, err := templates.Parse("testcases/condition-flow-extractors.yaml", nil, executerOpts)
require.Nil(t, err, "could not parse template") require.Nil(t, err, "could not parse template")
@ -159,27 +174,11 @@ func TestFlowWithNoMatchers(t *testing.T) {
err = Template.Executer.Compile() err = Template.Executer.Compile()
require.Nil(t, err, "could not compile template") require.Nil(t, err, "could not compile template")
input := contextargs.NewWithInput(context.Background(), "blog.projectdiscovery.io") input := contextargs.NewWithInput(context.Background(), "scanme.sh")
ctx := scan.NewScanContext(context.Background(), input) ctx := scan.NewScanContext(context.Background(), input)
// positive match . expect results also verify that both dns() and http() were executed // positive match . expect results also verify that both dns() and http() were executed
gotresults, err := Template.Executer.Execute(ctx) gotresults, err := Template.Executer.Execute(ctx)
require.Nil(t, err, "could not execute template") require.Nil(t, err, "could not execute template")
require.True(t, gotresults) require.True(t, gotresults)
})
// testcase-2 : no matchers and no extractors
Template, err = templates.Parse("testcases/condition-flow-no-operators.yaml", nil, executerOpts)
require.Nil(t, err, "could not parse template")
require.True(t, Template.Flow != "", "not a flow template") // this is classifer if template is flow or not
err = Template.Executer.Compile()
require.Nil(t, err, "could not compile template")
anotherInput := contextargs.NewWithInput(context.Background(), "blog.projectdiscovery.io")
anotherCtx := scan.NewScanContext(context.Background(), anotherInput)
// positive match . expect results also verify that both dns() and http() were executed
gotresults, err = Template.Executer.Execute(anotherCtx)
require.Nil(t, err, "could not execute template")
require.True(t, gotresults)
} }

View File

@ -1,29 +1,28 @@
id: ghost-blog-detection id: condition-flow-extractors
info: info:
name: Ghost blog detection name: Condition Flow Extractors
author: pdteam author: pdteam
severity: info severity: info
flow: dns() && http() flow: dns() && http()
dns: dns:
- name: "{{FQDN}}" - name: "{{FQDN}}"
type: CNAME type: A
extractors: extractors:
- type: dsl - type: dsl
name: cname name: a
internal: true internal: true
dsl: dsl:
- cname - a
http: http:
- method: GET - method: GET
path: path:
- "{{BaseURL}}?ref={{cname}}" - "{{BaseURL}}/?ref={{a}}"
matchers: matchers:
- type: word - type: word
words: words:
- "ghost.io" - "ok"

View File

@ -1,13 +1,11 @@
id: ghost-blog-detection id: condition-flow-no-operators
info: info:
name: Ghost blog detection name: Condition Flow No Operators
author: pdteam author: pdteam
severity: info severity: info
flow: dns() && http() flow: dns() && http()
dns: dns:
- name: "{{FQDN}}" - name: "{{FQDN}}"
type: CNAME type: CNAME
@ -15,9 +13,9 @@ dns:
http: http:
- method: GET - method: GET
path: path:
- "{{BaseURL}}?ref={{dns_cname}}" - "{{BaseURL}}/?ref={{dns_cname}}"
matchers: matchers:
- type: word - type: word
words: words:
- "ghost.io" - "html>"

View File

@ -1,6 +1,6 @@
id: ghost-blog-detection id: vercel-hosted-detection
info: info:
name: Ghost blog detection name: Vercel-hosted detection
author: pdteam author: pdteam
severity: info severity: info
@ -14,14 +14,14 @@ dns:
matchers: matchers:
- type: word - type: word
words: words:
- "ghost.io" - "vercel-dns"
http: http:
- method: GET - method: GET
path: path:
- "{{BaseURL}}" - "{{dns_cname}}"
matchers: matchers:
- type: word - type: word
words: words:
- "ghost.io" - "DEPLOYMENT_NOT_FOUND"

View File

@ -56,7 +56,7 @@ func TestMultiProtoWithDynamicExtractor(t *testing.T) {
err = Template.Executer.Compile() err = Template.Executer.Compile()
require.Nil(t, err, "could not compile template") require.Nil(t, err, "could not compile template")
input := contextargs.NewWithInput(context.Background(), "blog.projectdiscovery.io") input := contextargs.NewWithInput(context.Background(), "http://scanme.sh")
ctx := scan.NewScanContext(context.Background(), input) ctx := scan.NewScanContext(context.Background(), input)
gotresults, err := Template.Executer.Execute(ctx) gotresults, err := Template.Executer.Execute(ctx)
require.Nil(t, err, "could not execute template") require.Nil(t, err, "could not execute template")
@ -72,7 +72,7 @@ func TestMultiProtoWithProtoPrefix(t *testing.T) {
err = Template.Executer.Compile() err = Template.Executer.Compile()
require.Nil(t, err, "could not compile template") require.Nil(t, err, "could not compile template")
input := contextargs.NewWithInput(context.Background(), "blog.projectdiscovery.io") input := contextargs.NewWithInput(context.Background(), "https://cloud.projectdiscovery.io/sign-in")
ctx := scan.NewScanContext(context.Background(), input) ctx := scan.NewScanContext(context.Background(), input)
gotresults, err := Template.Executer.Execute(ctx) gotresults, err := Template.Executer.Execute(ctx)
require.Nil(t, err, "could not execute template") require.Nil(t, err, "could not execute template")

View File

@ -7,15 +7,7 @@ info:
dns: dns:
- name: "{{FQDN}}" # DNS Request - name: "{{FQDN}}" # DNS Request
type: cname type: a
extractors:
- type: dsl
name: blogid
dsl:
- trim_suffix(cname,'.ghost.io')
internal: true
http: http:
- method: GET # http request - method: GET # http request
@ -25,6 +17,6 @@ http:
matchers: matchers:
- type: dsl - type: dsl
dsl: dsl:
- contains(body,'ProjectDiscovery.io') # check for http string - body == "ok"
- blogid == 'projectdiscovery' # check for cname (extracted information from dns response) - dns_a == '128.199.158.128' # check for A record (extracted information from dns response)
condition: and condition: and

View File

@ -20,7 +20,7 @@ http:
matchers: matchers:
- type: dsl - type: dsl
dsl: dsl:
- contains(http_body,'ProjectDiscovery.io') # check for http string - contains(http_body, 'ProjectDiscovery Cloud Platform') # check for http string
- trim_suffix(dns_cname,'.ghost.io') == 'projectdiscovery' # check for cname (extracted information from dns response) - dns_cname == 'cname.vercel-dns.com' # check for cname (extracted information from dns response)
- ssl_subject_cn == 'blog.projectdiscovery.io' - ssl_subject_cn == 'cloud.projectdiscovery.io'
condition: and condition: and

View File

@ -383,6 +383,10 @@ type Options struct {
EnableCodeTemplates bool EnableCodeTemplates bool
// DisableUnsignedTemplates disables processing of unsigned templates // DisableUnsignedTemplates disables processing of unsigned templates
DisableUnsignedTemplates bool DisableUnsignedTemplates bool
// EnableSelfContainedTemplates disables processing of self-contained templates
EnableSelfContainedTemplates bool
// EnableFileTemplates enables file templates
EnableFileTemplates bool
// Disables cloud upload // Disables cloud upload
EnableCloudUpload bool EnableCloudUpload bool
// ScanID is the scan ID to use for cloud upload // ScanID is the scan ID to use for cloud upload