Merge branch 'dev' into RDP-Enc-func

This commit is contained in:
Mzack9999 2025-09-25 22:07:17 +02:00
commit 61bd0828dc
373 changed files with 10772 additions and 3223 deletions

View File

@ -0,0 +1,35 @@
{
"permissions": {
"allow": [
"Bash(find:*)",
"Bash(mkdir:*)",
"Bash(cp:*)",
"Bash(ls:*)",
"Bash(make:*)",
"Bash(go:*)",
"Bash(golangci-lint:*)",
"Bash(git merge:*)",
"Bash(git add:*)",
"Bash(git commit:*)",
"Bash(git push:*)",
"Bash(git pull:*)",
"Bash(git fetch:*)",
"Bash(git checkout:*)",
"WebFetch(*)",
"Write(*)",
"WebSearch(*)",
"MultiEdit(*)",
"Edit(*)",
"Bash(gh:*)",
"Bash(grep:*)",
"Bash(tree:*)",
"Bash(./nuclei:*)",
"WebFetch(domain:github.com)"
],
"deny": [
"Bash(make run:*)",
"Bash(./bin/nuclei:*)"
],
"defaultMode": "acceptEdits"
}
}

76
.github/DISCUSSION_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,76 @@
# Nuclei Discussion Guidelines
## Before Creating a Discussion
1. **Search existing discussions and issues** to avoid duplicates
2. **Check the documentation** and README first
3. **Browse the FAQ** and common questions
## Bug Reports in Discussions
When reporting a bug in [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a), please include:
### Required Information:
- **Clear title** with `[BUG]` prefix (e.g., "[BUG] Nuclei crashes when...")
- **Current behavior** - What's happening now?
- **Expected behavior** - What should happen instead?
- **Steps to reproduce** - Commands or actions that trigger the issue
- **Environment details**:
- OS and version
- Nuclei version (`nuclei -version`)
- Go version (if installed via `go install`)
- **Log output** - Run with `-verbose` or `-debug` for detailed logs
- **Redact sensitive information** - Remove target URLs, credentials, etc.
### After Discussion:
- Maintainers will review and validate the bug report
- Valid bugs will be converted to issues with proper labels and tracking
- Questions and misconfigurations will be resolved in the discussion
## Feature Requests in Discussions
When requesting a feature in [Ideas Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/ideas), please include:
### Required Information:
- **Clear title** with `[FEATURE]` prefix (e.g., "[FEATURE] Add support for...")
- **Feature description** - What do you want to be added?
- **Use case** - Why is this feature needed? What problem does it solve?
- **Implementation ideas** - If you have suggestions on how it could work
- **Alternatives considered** - What other solutions have you thought about?
### After Discussion:
- Community and maintainers will discuss the feasibility
- Popular and viable features will be converted to issues
- Similar features may be grouped together
- Rejected features will be explained in the discussion
## Getting Help
For general questions, troubleshooting, and "how-to" topics:
- Use [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a)
- Join the [Discord server](https://discord.gg/projectdiscovery) #nuclei channel
- Check existing discussions for similar questions
## Discussion to Issue Conversion Process
Only maintainers can convert discussions to issues. The process:
1. **Validation** - Maintainers review the discussion for completeness and validity
2. **Classification** - Determine if it's a bug, feature, enhancement, etc.
3. **Issue creation** - Create a properly formatted issue with appropriate labels
4. **Linking** - Link the issue back to the original discussion
5. **Resolution** - Mark the discussion as resolved or close it
This process ensures:
- High-quality issues that are actionable
- Proper triage and labeling
- Reduced noise in the issue tracker
- Community involvement in the validation process
## Why This Process?
- **Better organization** - Issues contain only validated, actionable items
- **Community input** - Discussions allow for community feedback before escalation
- **Quality control** - Maintainers ensure proper formatting and information
- **Reduced maintenance** - Fewer invalid or duplicate issues to manage
- **Clear separation** - Questions vs. actual bugs/features are clearly distinguished

View File

@ -2,14 +2,22 @@ blank_issues_enabled: false
contact_links: contact_links:
- name: Ask an question / advise on using nuclei - name: 🐛 Report a Bug (Start with Discussion)
url: https://github.com/projectdiscovery/nuclei/discussions/categories/q-a url: https://github.com/orgs/projectdiscovery/discussions/new?category=q-a
about: Ask a question or request support for using nuclei about: Start by reporting your issue in discussions for proper triage. Issues will be created after review to avoid duplicate/invalid reports.
- name: Share idea / feature to discuss for nuclei - name: 💡 Request a Feature (Start with Discussion)
url: https://github.com/projectdiscovery/nuclei/discussions/categories/ideas url: https://github.com/orgs/projectdiscovery/discussions/new?category=ideas
about: Share idea / feature to discuss for nuclei about: Share your feature idea in discussions first. This helps validate and refine the request before creating an issue.
- name: Connect with PD Team (Discord) - name: ❓ Ask Questions / Get Help
url: https://github.com/orgs/projectdiscovery/discussions
about: Get help and ask questions about using Nuclei. Many questions don't require issues.
- name: 🔍 Browse Existing Issues
url: https://github.com/projectdiscovery/nuclei/issues
about: Check existing issues to see if your problem has already been reported or is being worked on.
- name: 💬 Connect with PD Team (Discord)
url: https://discord.gg/projectdiscovery url: https://discord.gg/projectdiscovery
about: Connect with PD Team for direct communication about: Join our Discord for real-time discussions and community support on the #nuclei channel.

View File

@ -0,0 +1,45 @@
# Issue Template References
## Overview
This folder contains the preserved issue templates that are **not** directly accessible to users. These templates serve as references for maintainers when converting discussions to issues.
## New Workflow
### For Users:
1. **All reports start in Discussions** - Users cannot create issues directly
2. Bug reports go to [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a)
3. Feature requests go to [Ideas Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/ideas)
4. This helps filter out duplicate questions, invalid reports, and ensures proper triage
### For Maintainers:
1. **Review discussions** in both Q&A and Ideas categories
2. **Validate the reports** - ensure they're actual bugs/valid feature requests
3. **Use reference templates** when converting discussions to issues:
- Copy content from `bug-report-reference.yml` or `feature-request-reference.yml`
- Create a new issue manually with the appropriate template structure
- Link back to the original discussion
- Close the discussion or mark it as resolved
## Benefits
- **Better triage**: Avoid cluttering issues with questions and invalid reports
- **Community involvement**: Discussions allow for community input before creating issues
- **Quality control**: Maintainers can ensure issues follow proper format and contain necessary information
- **Reduced noise**: Only validated, actionable items become issues
## Reference Templates
- `bug-report-reference.yml` - Use when converting bug reports from discussions to issues
- `feature-request-reference.yml` - Use when converting feature requests from discussions to issues
## Converting a Discussion to Issue
1. Identify a valid discussion that needs to become an issue
2. Go to the main repository's Issues tab
3. Click "New Issue"
4. Manually create the issue using the reference template structure
5. Include all relevant information from the discussion
6. Add a comment linking back to the original discussion
7. Apply appropriate labels
8. Close or mark the discussion as resolved with a link to the created issue

View File

@ -2,6 +2,7 @@ addReviewers: true
reviewers: reviewers:
- dogancanbakir - dogancanbakir
- dwisiswant0 - dwisiswant0
- mzack9999
numberOfReviewers: 1 numberOfReviewers: 1
skipKeywords: skipKeywords:

27
.github/stale.yml vendored
View File

@ -1,27 +0,0 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 7
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
# exemptLabels:
# - pinned
# - security
# Only issues or pull requests with all of these labels are check if stale.
onlyLabels:
- "Status: Abandoned"
- "Type: Question"
# Label to use when marking as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View File

@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.actor == 'dependabot[bot]' if: github.actor == 'dependabot[bot]'
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
with: with:
token: ${{ secrets.DEPENDABOT_PAT }} token: ${{ secrets.DEPENDABOT_PAT }}

View File

@ -13,7 +13,7 @@ jobs:
permissions: permissions:
contents: write contents: write
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go/compat-checks@v1 - uses: projectdiscovery/actions/setup/go/compat-checks@v1
with: with:
release-test: true release-test: true

View File

@ -11,7 +11,7 @@ jobs:
if: "${{ !endsWith(github.actor, '[bot]') }}" if: "${{ !endsWith(github.actor, '[bot]') }}"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/git@v1 - uses: projectdiscovery/actions/setup/git@v1
- run: make syntax-docs - run: make syntax-docs

View File

@ -28,7 +28,7 @@ jobs:
LIST_FILE: "/tmp/targets-${{ matrix.targets }}.txt" LIST_FILE: "/tmp/targets-${{ matrix.targets }}.txt"
PROFILE_MEM: "/tmp/nuclei-profile-${{ matrix.targets }}-targets" PROFILE_MEM: "/tmp/nuclei-profile-${{ matrix.targets }}-targets"
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/git@v1 - uses: projectdiscovery/actions/setup/git@v1
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- name: Generate list - name: Generate list

View File

@ -16,7 +16,7 @@ jobs:
env: env:
OUTPUT: "/tmp/results.sarif" OUTPUT: "/tmp/results.sarif"
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- run: go install golang.org/x/vuln/cmd/govulncheck@latest - run: go install golang.org/x/vuln/cmd/govulncheck@latest
- run: govulncheck -scan package -format sarif ./... > $OUTPUT - run: govulncheck -scan package -format sarif ./... > $OUTPUT

View File

@ -11,7 +11,7 @@ jobs:
env: env:
BENCH_OUT: "/tmp/bench.out" BENCH_OUT: "/tmp/bench.out"
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- run: make build-test - run: make build-test
- run: ./bin/nuclei.test -test.run - -test.bench=. -test.benchmem ./cmd/nuclei/ | tee $BENCH_OUT - run: ./bin/nuclei.test -test.run - -test.bench=. -test.benchmem ./cmd/nuclei/ | tee $BENCH_OUT

View File

@ -16,7 +16,7 @@ jobs:
LIST_FILE: "/tmp/targets-${{ matrix.count }}.txt" LIST_FILE: "/tmp/targets-${{ matrix.count }}.txt"
PROFILE_MEM: "/tmp/nuclei-perf-test-${{ matrix.count }}" PROFILE_MEM: "/tmp/nuclei-perf-test-${{ matrix.count }}"
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- run: make verify - run: make verify
- name: Generate list - name: Generate list

View File

@ -10,7 +10,7 @@ jobs:
release: release:
runs-on: ubuntu-latest-16-cores runs-on: ubuntu-latest-16-cores
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1

41
.github/workflows/stale.yaml vendored Normal file
View File

@ -0,0 +1,41 @@
name: 💤 Stale
on:
schedule:
- cron: '0 0 * * 0' # Weekly
jobs:
stale:
runs-on: ubuntu-latest
permissions:
actions: write
contents: write # only for delete-branch option
issues: write
pull-requests: write
steps:
- uses: actions/stale@v10
with:
days-before-stale: 90
days-before-close: 7
stale-issue-label: "Status: Stale"
stale-pr-label: "Status: Stale"
stale-issue-message: >
This issue has been automatically marked as stale because it has not
had recent activity. It will be closed in 7 days if no further
activity occurs. Thank you for your contributions!
stale-pr-message: >
This pull request has been automatically marked as stale due to
inactivity. It will be closed in 7 days if no further activity
occurs. Please update if you wish to keep it open.
close-issue-message: >
This issue has been automatically closed due to inactivity. If you
think this is a mistake or would like to continue the discussion,
please comment or feel free to reopen it.
close-pr-message: >
This pull request has been automatically closed due to inactivity.
If you think this is a mistake or would like to continue working on
it, please comment or feel free to reopen it.
close-issue-label: "Status: Abandoned"
close-pr-label: "Status: Abandoned"
exempt-issue-labels: "Status: Abandoned"
exempt-pr-labels: "Status: Abandoned"

View File

@ -22,9 +22,9 @@ jobs:
if: "${{ !endsWith(github.actor, '[bot]') }}" if: "${{ !endsWith(github.actor, '[bot]') }}"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/golangci-lint@v1 - uses: projectdiscovery/actions/golangci-lint/v2@v1
tests: tests:
name: "Tests" name: "Tests"
@ -35,7 +35,7 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest] os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: "${{ matrix.os }}" runs-on: "${{ matrix.os }}"
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- run: make vet - run: make vet
- run: make build - run: make build
@ -52,16 +52,18 @@ jobs:
needs: ["tests"] needs: ["tests"]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- name: "Simple" - name: "Simple"
run: go run . run: go run .
working-directory: examples/simple/ working-directory: examples/simple/
# - run: go run . # Temporarily disabled very flaky in github actions # - run: go run . # Temporarily disabled very flaky in github actions
# working-directory: examples/advanced/ # working-directory: examples/advanced/
- name: "with Speed Control"
run: go run . # TODO: FIX with ExecutionID (ref: https://github.com/projectdiscovery/nuclei/pull/6296)
working-directory: examples/with_speed_control/ # - name: "with Speed Control"
# run: go run .
# working-directory: examples/with_speed_control/
integration: integration:
name: "Integration tests" name: "Integration tests"
@ -72,7 +74,7 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest] os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/python@v1 - uses: projectdiscovery/actions/setup/python@v1
- run: bash run.sh "${{ matrix.os }}" - run: bash run.sh "${{ matrix.os }}"
@ -91,10 +93,10 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest] os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/python@v1 - uses: projectdiscovery/actions/setup/python@v1
- run: bash run.sh "${{ matrix.os }}" - run: bash run.sh
env: env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
working-directory: cmd/functional-test/ working-directory: cmd/functional-test/
@ -104,7 +106,7 @@ jobs:
needs: ["tests"] needs: ["tests"]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- run: make template-validate - run: make template-validate
@ -117,7 +119,7 @@ jobs:
contents: read contents: read
security-events: write security-events: write
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: github/codeql-action/init@v3 - uses: github/codeql-action/init@v3
with: with:
languages: 'go' languages: 'go'
@ -129,7 +131,7 @@ jobs:
needs: ["tests"] needs: ["tests"]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/goreleaser@v1 - uses: projectdiscovery/actions/goreleaser@v1
@ -141,7 +143,7 @@ jobs:
TARGET_URL: "http://scanme.sh/a/?b=c" TARGET_URL: "http://scanme.sh/a/?b=c"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v5
- run: make build - run: make build
- name: "Setup environment (push)" - name: "Setup environment (push)"
if: ${{ github.event_name == 'push' }} if: ${{ github.event_name == 'push' }}

2
.gitignore vendored
View File

@ -28,6 +28,8 @@
/scrapefunc /scrapefunc
/scrapefuncs /scrapefuncs
/tsgen /tsgen
/integration_tests/integration-test
/integration_tests/nuclei
# Templates # Templates
/*.yaml /*.yaml

View File

@ -38,9 +38,9 @@ builds:
# goarch: [amd64] # goarch: [amd64]
archives: archives:
- format: zip - formats: [zip]
id: nuclei id: nuclei
builds: [nuclei-cli] ids: [nuclei-cli]
name_template: '{{ .ProjectName }}_{{ .Version }}_{{ if eq .Os "darwin" }}macOS{{ else }}{{ .Os }}{{ end }}_{{ .Arch }}' name_template: '{{ .ProjectName }}_{{ .Version }}_{{ if eq .Os "darwin" }}macOS{{ else }}{{ .Os }}{{ end }}_{{ .Arch }}'
checksum: checksum:

83
CLAUDE.md Normal file
View File

@ -0,0 +1,83 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Nuclei is a modern, high-performance vulnerability scanner built in Go that leverages YAML-based templates for customizable vulnerability detection. It supports multiple protocols (HTTP, DNS, TCP, SSL, WebSocket, WHOIS, JavaScript, Code) and is designed for zero false positives through real-world condition simulation.
## Development Commands
### Building and Testing
- `make build` - Build the main nuclei binary to ./bin/nuclei
- `make test` - Run unit tests with race detection
- `make integration` - Run integration tests (builds and runs test suite)
- `make functional` - Run functional tests
- `make vet` - Run go vet for code analysis
- `make tidy` - Clean up go modules
### Validation and Linting
- `make template-validate` - Validate nuclei templates using the built binary
- `go fmt ./...` - Format Go code
- `go vet ./...` - Static analysis
### Development Tools
- `make devtools-all` - Build all development tools (bindgen, tsgen, scrapefuncs)
- `make jsupdate-all` - Update JavaScript bindings and TypeScript definitions
- `make docs` - Generate documentation
- `make memogen` - Generate memoization code for JavaScript libraries
### Testing Specific Components
- Run single test: `go test -v ./pkg/path/to/package -run TestName`
- Integration tests are in `integration_tests/` and can be run via `make integration`
## Architecture Overview
### Core Components
- **cmd/nuclei** - Main CLI entry point with flag parsing and configuration
- **internal/runner** - Core runner that orchestrates the entire scanning process
- **pkg/core** - Execution engine with work pools and template clustering
- **pkg/templates** - Template parsing, compilation, and management
- **pkg/protocols** - Protocol implementations (HTTP, DNS, Network, etc.)
- **pkg/operators** - Matching and extraction logic (matchers/extractors)
- **pkg/catalog** - Template discovery and loading from disk/remote sources
### Protocol Architecture
Each protocol (HTTP, DNS, Network, etc.) implements:
- Request interface with Compile(), ExecuteWithResults(), Match(), Extract() methods
- Operators embedding for matching/extraction functionality
- Protocol-specific request building and execution logic
### Template System
- Templates are YAML files defining vulnerability detection logic
- Compiled into executable requests with operators (matchers/extractors)
- Support for workflows (multi-step template execution)
- Template clustering optimizes identical requests across multiple templates
### Key Execution Flow
1. Template loading and compilation via pkg/catalog/loader
2. Input provider setup for targets
3. Engine creation with work pools for concurrency
4. Template execution with result collection via operators
5. Output writing and reporting integration
### JavaScript Integration
- Custom JavaScript runtime for code protocol templates
- Auto-generated bindings in pkg/js/generated/
- Library implementations in pkg/js/libs/
- Development tools for binding generation in pkg/js/devtools/
## Template Development
- Templates located in separate nuclei-templates repository
- YAML format with info, requests, and operators sections
- Support for multiple protocol types in single template
- Built-in DSL functions for dynamic content generation
- Template validation available via `make template-validate`
## Key Directories
- **lib/** - SDK for embedding nuclei as a library
- **examples/** - Usage examples for different scenarios
- **integration_tests/** - Integration test suite with protocol-specific tests
- **pkg/fuzz/** - Fuzzing engine and DAST capabilities
- **pkg/input/** - Input processing for various formats (Burp, OpenAPI, etc.)
- **pkg/reporting/** - Result export and issue tracking integrations

View File

@ -1,5 +1,5 @@
# Build # Build
FROM golang:1.22-alpine AS builder FROM golang:1.24-alpine AS builder
RUN apk add build-base RUN apk add build-base
WORKDIR /app WORKDIR /app
@ -13,4 +13,4 @@ FROM alpine:latest
RUN apk add --no-cache bind-tools chromium ca-certificates RUN apk add --no-cache bind-tools chromium ca-certificates
COPY --from=builder /app/bin/nuclei /usr/local/bin/ COPY --from=builder /app/bin/nuclei /usr/local/bin/
ENTRYPOINT ["nuclei"] ENTRYPOINT ["nuclei"]

View File

@ -15,8 +15,8 @@ ifneq ($(shell go env GOOS),darwin)
endif endif
.PHONY: all build build-stats clean devtools-all devtools-bindgen devtools-scrapefuncs .PHONY: all build build-stats clean devtools-all devtools-bindgen devtools-scrapefuncs
.PHONY: devtools-tsgen docs docgen dsl-docs functional fuzzplayground go-build syntax-docs .PHONY: devtools-tsgen docs docgen dsl-docs functional fuzzplayground go-build lint lint-strict syntax-docs
.PHONY: integration jsupdate-all jsupdate-bindgen jsupdate-tsgen memogen scan-charts test .PHONY: integration jsupdate-all jsupdate-bindgen jsupdate-tsgen memogen scan-charts test test-with-lint
.PHONY: tidy ts verify download vet template-validate .PHONY: tidy ts verify download vet template-validate
all: build all: build
@ -146,5 +146,18 @@ dsl-docs:
template-validate: build template-validate: build
template-validate: template-validate:
./bin/nuclei -ut ./bin/nuclei -ut
./bin/nuclei -validate -et http/technologies ./bin/nuclei -validate \
./bin/nuclei -validate -w workflows -et http/technologies -et .github/ \
-et helpers/payloads/ \
-et http/technologies \
-t dns \
-t ssl \
-t network \
-t http/exposures \
-ept code
./bin/nuclei -validate \
-w workflows \
-et .github/ \
-et helpers/payloads/ \
-et http/technologies \
-ept code

View File

@ -7,7 +7,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">`Korean`</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">`Korean`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">`Indonesia`</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">`Indonesia`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">`Spanish`</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">`Spanish`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_JP.md">`日本語`</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_JP.md">`日本語`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">`Portuguese`</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">`Portuguese`</a>
</div> </div>
@ -111,7 +111,7 @@ Browse the full Nuclei [**`documentation here`**](https://docs.projectdiscovery.
### Installation ### Installation
`nuclei` requires **go1.22** to install successfully. Run the following command to get the repo: `nuclei` requires **go1.23** to install successfully. Run the following command to get the repo:
```sh ```sh
go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest
@ -356,6 +356,7 @@ CLOUD:
AUTHENTICATION: AUTHENTICATION:
-sf, -secret-file string[] path to config file containing secrets for nuclei authenticated scan -sf, -secret-file string[] path to config file containing secrets for nuclei authenticated scan
-ps, -prefetch-secrets prefetch secrets from the secrets file -ps, -prefetch-secrets prefetch secrets from the secrets file
# NOTE: Headers in secrets files preserve exact casing (useful for case-sensitive APIs)
EXAMPLES: EXAMPLES:

View File

@ -33,7 +33,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a>
</p> </p>

View File

@ -31,7 +31,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a>
</p> </p>

View File

@ -33,7 +33,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a>
</p> </p>

View File

@ -30,7 +30,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中国語</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中国語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">韓国語</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">韓国語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">インドネシア語</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">インドネシア語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">スペイン語</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">スペイン語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">ポルトガル語</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">ポルトガル語</a>
</p> </p>

View File

@ -31,7 +31,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README.md">English</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README.md">English</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">한국어</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">한국어</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">스페인어</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">스페인어</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">포르투갈어</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">포르투갈어</a>
</p> </p>
@ -341,7 +341,7 @@ Nuclei를 사용하면 자체 검사 모음으로 테스트 접근 방식을 사
- 몇 분 안에 수천 개의 호스트를 처리할 수 있음. - 몇 분 안에 수천 개의 호스트를 처리할 수 있음.
- 간단한 YAML DSL로 사용자 지정 테스트 접근 방식을 쉽게 자동화할 수 있음. - 간단한 YAML DSL로 사용자 지정 테스트 접근 방식을 쉽게 자동화할 수 있음.
버그 바운티 워크플로에 맞는 다른 오픈 소스 프로젝트를 확인할 수 있습니다.: [github.com/projectdiscovery](http://github.com/projectdiscovery), 또한, 우리는 매일 [Chaos에서 DNS 데이터를 갱신해 호스팅합니다.](http://chaos.projectdiscovery.io). 버그 바운티 워크플로에 맞는 다른 오픈 소스 프로젝트를 확인할 수 있습니다.: [github.com/projectdiscovery](http://github.com/projectdiscovery), 또한, 우리는 매일 [Chaos에서 DNS 데이터를 갱신해 호스팅합니다](http://chaos.projectdiscovery.io).
</td> </td>
</tr> </tr>

View File

@ -31,7 +31,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a> <a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a>
</p> </p>

View File

@ -19,7 +19,9 @@ func writeToFile(filename string, data []byte) {
if err != nil { if err != nil {
log.Fatalf("Could not create file %s: %s\n", filename, err) log.Fatalf("Could not create file %s: %s\n", filename, err)
} }
defer file.Close() defer func() {
_ = file.Close()
}()
_, err = file.Write(data) _, err = file.Write(data)
if err != nil { if err != nil {

View File

@ -27,7 +27,7 @@ var (
func main() { func main() {
flag.Parse() flag.Parse()
debug := os.Getenv("DEBUG") == "true" debug := os.Getenv("DEBUG") == "true" || os.Getenv("RUNNER_DEBUG") == "1"
if err, errored := runFunctionalTests(debug); err != nil { if err, errored := runFunctionalTests(debug); err != nil {
log.Fatalf("Could not run functional tests: %s\n", err) log.Fatalf("Could not run functional tests: %s\n", err)
@ -41,7 +41,9 @@ func runFunctionalTests(debug bool) (error, bool) {
if err != nil { if err != nil {
return errors.Wrap(err, "could not open test cases"), true return errors.Wrap(err, "could not open test cases"), true
} }
defer file.Close() defer func() {
_ = file.Close()
}()
errored, failedTestCases := runTestCases(file, debug) errored, failedTestCases := runTestCases(file, debug)

View File

@ -1,27 +1,43 @@
#!/bin/bash #!/bin/bash
# reading os type from arguments if [ "${RUNNER_OS}" == "Windows" ]; then
CURRENT_OS=$1 EXT=".exe"
elif [ "${RUNNER_OS}" == "macOS" ]; then
if [ "${CI}" == "true" ]; then
sudo sysctl -w kern.maxfiles{,perproc}=524288
sudo launchctl limit maxfiles 65536 524288
fi
if [ "${CURRENT_OS}" == "windows-latest" ];then ORIGINAL_ULIMIT="$(ulimit -n)"
extension=.exe ulimit -n 65536 || true
fi fi
mkdir -p .nuclei-config/nuclei/
touch .nuclei-config/nuclei/.nuclei-ignore
echo "::group::Building functional-test binary" echo "::group::Building functional-test binary"
go build -o functional-test$extension go build -o "functional-test${EXT}"
echo "::endgroup::" echo "::endgroup::"
echo "::group::Building Nuclei binary from current branch" echo "::group::Building Nuclei binary from current branch"
go build -o nuclei_dev$extension ../nuclei go build -o "nuclei-dev${EXT}" ../nuclei
echo "::endgroup::"
echo "::group::Installing nuclei templates"
./nuclei_dev$extension -update-templates
echo "::endgroup::" echo "::endgroup::"
echo "::group::Building latest release of nuclei" echo "::group::Building latest release of nuclei"
go build -o nuclei$extension -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei go build -o "nuclei${EXT}" -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei
echo "::endgroup::" echo "::endgroup::"
echo 'Starting Nuclei functional test' echo "::group::Installing nuclei templates"
./functional-test$extension -main ./nuclei$extension -dev ./nuclei_dev$extension -testcases testcases.txt eval "./nuclei-dev${EXT} -update-templates"
echo "::endgroup::"
echo "::group::Validating templates"
eval "./nuclei-dev${EXT} -validate"
echo "::endgroup::"
echo "Starting Nuclei functional test"
eval "./functional-test${EXT} -main ./nuclei${EXT} -dev ./nuclei-dev${EXT} -testcases testcases.txt"
if [ "${RUNNER_OS}" == "macOS" ]; then
ulimit -n "${ORIGINAL_ULIMIT}" || true
fi

View File

@ -23,7 +23,9 @@ func main() {
if err != nil { if err != nil {
log.Fatalf("Could not create file: %s\n", err) log.Fatalf("Could not create file: %s\n", err)
} }
defer file.Close() defer func() {
_ = file.Close()
}()
err = filepath.WalkDir(templatesDirectory, func(path string, d fs.DirEntry, err error) error { err = filepath.WalkDir(templatesDirectory, func(path string, d fs.DirEntry, err error) error {
if err != nil || d.IsDir() { if err != nil || d.IsDir() {

View File

@ -18,7 +18,9 @@ func (h *customConfigDirTest) Execute(filePath string) error {
if err != nil { if err != nil {
return err return err
} }
defer os.RemoveAll(customTempDirectory) defer func() {
_ = os.RemoveAll(customTempDirectory)
}()
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, []string{"NUCLEI_CONFIG_DIR=" + customTempDirectory}, "-t", filePath, "-u", "8x8exch02.8x8.com") results, err := testutils.RunNucleiBareArgsAndGetResults(debug, []string{"NUCLEI_CONFIG_DIR=" + customTempDirectory}, "-t", filePath, "-u", "8x8exch02.8x8.com")
if err != nil { if err != nil {
return err return err

View File

@ -21,7 +21,7 @@ type dslVersionWarning struct{}
func (d *dslVersionWarning) Execute(templatePath string) error { func (d *dslVersionWarning) Execute(templatePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "DSL version parsing warning test") _, _ = fmt.Fprintf(w, "DSL version parsing warning test")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -37,7 +37,7 @@ type dslShowVersionWarning struct{}
func (d *dslShowVersionWarning) Execute(templatePath string) error { func (d *dslShowVersionWarning) Execute(templatePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "DSL version parsing warning test") _, _ = fmt.Fprintf(w, "DSL version parsing warning test")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()

View File

@ -0,0 +1,104 @@
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/mongo"
"github.com/testcontainers/testcontainers-go"
mongocontainer "github.com/testcontainers/testcontainers-go/modules/mongodb"
osutil "github.com/projectdiscovery/utils/os"
mongoclient "go.mongodb.org/mongo-driver/mongo"
mongooptions "go.mongodb.org/mongo-driver/mongo/options"
)
const (
dbName = "test"
dbImage = "mongo:8"
)
var exportersTestCases = []TestCaseInfo{
{Path: "exporters/mongo", TestCase: &mongoExporter{}, DisableOn: func() bool {
return osutil.IsWindows() || osutil.IsOSX()
}},
}
type mongoExporter struct{}
func (m *mongoExporter) Execute(filepath string) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute)
defer cancel()
// Start a MongoDB container
mongodbContainer, err := mongocontainer.Run(ctx, dbImage)
defer func() {
if err := testcontainers.TerminateContainer(mongodbContainer); err != nil {
log.Printf("failed to terminate container: %s", err)
}
}()
if err != nil {
return fmt.Errorf("failed to start container: %w", err)
}
connString, err := mongodbContainer.ConnectionString(ctx)
if err != nil {
return fmt.Errorf("failed to get connection string for MongoDB container: %s", err)
}
connString = connString + dbName
// Create a MongoDB exporter and write a test result to the database
opts := mongo.Options{
ConnectionString: connString,
CollectionName: "test",
BatchSize: 1, // Ensure we write the result immediately
}
exporter, err := mongo.New(&opts)
if err != nil {
return fmt.Errorf("failed to create MongoDB exporter: %s", err)
}
defer func() {
if err := exporter.Close(); err != nil {
fmt.Printf("failed to close exporter: %s\n", err)
}
}()
res := &output.ResultEvent{
Request: "test request",
Response: "test response",
}
err = exporter.Export(res)
if err != nil {
return fmt.Errorf("failed to export result event to MongoDB: %s", err)
}
// Verify that the result was written to the database
clientOptions := mongooptions.Client().ApplyURI(connString)
client, err := mongoclient.Connect(ctx, clientOptions)
if err != nil {
return fmt.Errorf("error creating MongoDB client: %s", err)
}
defer func() {
if err := client.Disconnect(ctx); err != nil {
fmt.Printf("failed to disconnect from MongoDB: %s\n", err)
}
}()
collection := client.Database(dbName).Collection(opts.CollectionName)
var actualRes output.ResultEvent
err = collection.FindOne(ctx, map[string]interface{}{"request": res.Request}).Decode(&actualRes)
if err != nil {
return fmt.Errorf("failed to find document in MongoDB: %s", err)
}
if actualRes.Request != res.Request || actualRes.Response != res.Response {
return fmt.Errorf("exported result does not match expected result: got %v, want %v", actualRes, res)
}
return nil
}

View File

@ -49,7 +49,7 @@ func (t *iterateValuesFlow) Execute(filePath string) error {
} }
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte(fmt.Sprint(testemails))) _, _ = fmt.Fprint(w, testemails)
}) })
router.GET("/user/"+getBase64(testemails[0]), func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/user/"+getBase64(testemails[0]), func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)

View File

@ -55,7 +55,7 @@ func (h *httpFuzzQuery) Execute(filePath string) error {
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header().Set("Content-Type", "text/html") w.Header().Set("Content-Type", "text/html")
value := r.URL.Query().Get("id") value := r.URL.Query().Get("id")
fmt.Fprintf(w, "This is test matcher text: %v", value) _, _ = fmt.Fprintf(w, "This is test matcher text: %v", value)
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -75,7 +75,7 @@ func (h *fuzzModeOverride) Execute(filePath string) error {
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header().Set("Content-Type", "text/html") w.Header().Set("Content-Type", "text/html")
value := r.URL.Query().Get("id") value := r.URL.Query().Get("id")
fmt.Fprintf(w, "This is test matcher text: %v", value) _, _ = fmt.Fprintf(w, "This is test matcher text: %v", value)
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -120,7 +120,7 @@ func (h *fuzzTypeOverride) Execute(filePath string) error {
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header().Set("Content-Type", "text/html") w.Header().Set("Content-Type", "text/html")
value := r.URL.Query().Get("id") value := r.URL.Query().Get("id")
fmt.Fprintf(w, "This is test matcher text: %v", value) _, _ = fmt.Fprintf(w, "This is test matcher text: %v", value)
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -164,7 +164,7 @@ func (h *HeadlessFuzzingQuery) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
resp := fmt.Sprintf("<html><body>%s</body></html>", r.URL.Query().Get("url")) resp := fmt.Sprintf("<html><body>%s</body></html>", r.URL.Query().Get("url"))
fmt.Fprint(w, resp) _, _ = fmt.Fprint(w, resp)
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -190,7 +190,7 @@ func (h *fuzzMultipleMode) Execute(filePath string) error {
} }
w.Header().Set("Content-Type", "text/html") w.Header().Set("Content-Type", "text/html")
resp := fmt.Sprintf("<html><body><h1>This is multi-mode fuzzing test: %v <h1></body></html>", xClientId) resp := fmt.Sprintf("<html><body><h1>This is multi-mode fuzzing test: %v <h1></body></html>", xClientId)
fmt.Fprint(w, resp) _, _ = fmt.Fprint(w, resp)
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()

View File

@ -82,14 +82,15 @@ func (h *clientCertificate) Execute(filePath string) error {
return return
} }
fmt.Fprintf(w, "Hello, %s!\n", r.TLS.PeerCertificates[0].Subject) _, _ = fmt.Fprintf(w, "Hello, %s!\n", r.TLS.PeerCertificates[0].Subject)
}) })
_ = os.WriteFile("server.crt", []byte(serverCRT), permissionutil.ConfigFilePermission) _ = os.WriteFile("server.crt", []byte(serverCRT), permissionutil.ConfigFilePermission)
_ = os.WriteFile("server.key", []byte(serverKey), permissionutil.ConfigFilePermission) _ = os.WriteFile("server.key", []byte(serverKey), permissionutil.ConfigFilePermission)
defer os.Remove("server.crt") defer func() {
defer os.Remove("server.key") _ = os.Remove("server.crt")
_ = os.Remove("server.key")
}()
serverCert, _ := tls.LoadX509KeyPair("server.crt", "server.key") serverCert, _ := tls.LoadX509KeyPair("server.crt", "server.key")
certPool := x509.NewCertPool() certPool := x509.NewCertPool()

View File

@ -178,7 +178,9 @@ func (h *headlessFileUpload) Execute(filePath string) error {
return return
} }
defer file.Close() defer func() {
_ = file.Close()
}()
content, err := io.ReadAll(file) content, err := io.ReadAll(file)
if err != nil { if err != nil {
@ -235,7 +237,9 @@ func (h *headlessFileUploadNegative) Execute(filePath string) error {
return return
} }
defer file.Close() defer func() {
_ = file.Close()
}()
content, err := io.ReadAll(file) content, err := io.ReadAll(file)
if err != nil { if err != nil {

View File

@ -19,7 +19,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/testutils" "github.com/projectdiscovery/nuclei/v3/pkg/testutils"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json" "github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
logutil "github.com/projectdiscovery/utils/log" logutil "github.com/projectdiscovery/utils/log"
sliceutil "github.com/projectdiscovery/utils/slice" sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings" stringsutil "github.com/projectdiscovery/utils/strings"
@ -108,7 +108,7 @@ func (h *httpMatcherExtractorDynamicExtractor) Execute(filePath string) error {
<a href="/domains">Domains</a> <a href="/domains">Domains</a>
</body> </body>
</html>` </html>`
fmt.Fprint(w, html) _, _ = fmt.Fprint(w, html)
}) })
router.GET("/domains", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/domains", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
html := `<!DOCTYPE html> html := `<!DOCTYPE html>
@ -121,7 +121,7 @@ func (h *httpMatcherExtractorDynamicExtractor) Execute(filePath string) error {
</body> </body>
</html> </html>
` `
fmt.Fprint(w, html) _, _ = fmt.Fprint(w, html)
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -143,7 +143,7 @@ func (h *httpInteractshRequest) Execute(filePath string) error {
value := r.Header.Get("url") value := r.Header.Get("url")
if value != "" { if value != "" {
if resp, _ := retryablehttp.DefaultClient().Get(value); resp != nil { if resp, _ := retryablehttp.DefaultClient().Get(value); resp != nil {
resp.Body.Close() _ = resp.Body.Close()
} }
} }
}) })
@ -196,7 +196,7 @@ func (d *httpDefaultMatcherCondition) Execute(filePath string) error {
return err return err
} }
if routerErr != nil { if routerErr != nil {
return errorutil.NewWithErr(routerErr).Msgf("failed to send http request to interactsh server") return errkit.Wrap(routerErr, "failed to send http request to interactsh server")
} }
if err := expectResultsCount(results, 1); err != nil { if err := expectResultsCount(results, 1); err != nil {
return err return err
@ -213,7 +213,7 @@ func (h *httpInteractshStopAtFirstMatchRequest) Execute(filePath string) error {
value := r.Header.Get("url") value := r.Header.Get("url")
if value != "" { if value != "" {
if resp, _ := retryablehttp.DefaultClient().Get(value); resp != nil { if resp, _ := retryablehttp.DefaultClient().Get(value); resp != nil {
resp.Body.Close() _ = resp.Body.Close()
} }
} }
}) })
@ -235,7 +235,7 @@ func (h *httpGetHeaders) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
if strings.EqualFold(r.Header.Get("test"), "nuclei") { if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text") _, _ = fmt.Fprintf(w, "This is test headers matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -256,7 +256,7 @@ func (h *httpGetQueryString) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
if strings.EqualFold(r.URL.Query().Get("test"), "nuclei") { if strings.EqualFold(r.URL.Query().Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test querystring matcher text") _, _ = fmt.Fprintf(w, "This is test querystring matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -279,7 +279,7 @@ func (h *httpGetRedirects) Execute(filePath string) error {
http.Redirect(w, r, "/redirected", http.StatusFound) http.Redirect(w, r, "/redirected", http.StatusFound)
}) })
router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test redirects matcher text") _, _ = fmt.Fprintf(w, "This is test redirects matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -329,7 +329,7 @@ func (h *httpDisableRedirects) Execute(filePath string) error {
http.Redirect(w, r, "/redirected", http.StatusMovedPermanently) http.Redirect(w, r, "/redirected", http.StatusMovedPermanently)
}) })
router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test redirects matcher text") _, _ = fmt.Fprintf(w, "This is test redirects matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -348,7 +348,7 @@ type httpGet struct{}
func (h *httpGet) Execute(filePath string) error { func (h *httpGet) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -367,7 +367,7 @@ type httpDSLVariable struct{}
func (h *httpDSLVariable) Execute(filePath string) error { func (h *httpDSLVariable) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -450,7 +450,7 @@ func (h *httpPostBody) Execute(filePath string) error {
return return
} }
if strings.EqualFold(r.Form.Get("username"), "test") && strings.EqualFold(r.Form.Get("password"), "nuclei") { if strings.EqualFold(r.Form.Get("username"), "test") && strings.EqualFold(r.Form.Get("password"), "nuclei") {
fmt.Fprintf(w, "This is test post-body matcher text") _, _ = fmt.Fprintf(w, "This is test post-body matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -485,7 +485,7 @@ func (h *httpPostJSONBody) Execute(filePath string) error {
return return
} }
if strings.EqualFold(obj.Username, "test") && strings.EqualFold(obj.Password, "nuclei") { if strings.EqualFold(obj.Username, "test") && strings.EqualFold(obj.Password, "nuclei") {
fmt.Fprintf(w, "This is test post-json-body matcher text") _, _ = fmt.Fprintf(w, "This is test post-json-body matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -525,7 +525,7 @@ func (h *httpPostMultipartBody) Execute(filePath string) error {
return return
} }
if strings.EqualFold(password[0], "nuclei") && strings.EqualFold(file[0].Filename, "username") { if strings.EqualFold(password[0], "nuclei") && strings.EqualFold(file[0].Filename, "username") {
fmt.Fprintf(w, "This is test post-multipart matcher text") _, _ = fmt.Fprintf(w, "This is test post-multipart matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -555,12 +555,12 @@ func (h *httpRawDynamicExtractor) Execute(filePath string) error {
return return
} }
if strings.EqualFold(r.Form.Get("testing"), "parameter") { if strings.EqualFold(r.Form.Get("testing"), "parameter") {
fmt.Fprintf(w, "Token: 'nuclei'") _, _ = fmt.Fprintf(w, "Token: 'nuclei'")
} }
}) })
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
if strings.EqualFold(r.URL.Query().Get("username"), "nuclei") { if strings.EqualFold(r.URL.Query().Get("username"), "nuclei") {
fmt.Fprintf(w, "Test is test-dynamic-extractor-raw matcher text") _, _ = fmt.Fprintf(w, "Test is test-dynamic-extractor-raw matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -584,7 +584,7 @@ func (h *httpRawGetQuery) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
if strings.EqualFold(r.URL.Query().Get("test"), "nuclei") { if strings.EqualFold(r.URL.Query().Get("test"), "nuclei") {
fmt.Fprintf(w, "Test is test raw-get-query-matcher text") _, _ = fmt.Fprintf(w, "Test is test raw-get-query-matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -604,7 +604,7 @@ type httpRawGet struct{}
func (h *httpRawGet) Execute(filePath string) error { func (h *httpRawGet) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "Test is test raw-get-matcher text") _, _ = fmt.Fprintf(w, "Test is test raw-get-matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -628,12 +628,12 @@ func (h *httpRawWithParams) Execute(filePath string) error {
// we intentionally use params["test"] instead of params.Get("test") to test the case where // we intentionally use params["test"] instead of params.Get("test") to test the case where
// there are multiple parameters with the same name // there are multiple parameters with the same name
if !reflect.DeepEqual(params["key1"], []string{"value1"}) { if !reflect.DeepEqual(params["key1"], []string{"value1"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value1"}, params["key1"]) errx = errkit.Append(errx, errkit.New("key1 not found in params", "expected", []string{"value1"}, "got", params["key1"]))
} }
if !reflect.DeepEqual(params["key2"], []string{"value2"}) { if !reflect.DeepEqual(params["key2"], []string{"value2"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value2"}, params["key2"]) errx = errkit.Append(errx, errkit.New("key2 not found in params", "expected", []string{"value2"}, "got", params["key2"]))
} }
fmt.Fprintf(w, "Test is test raw-params-matcher text") _, _ = fmt.Fprintf(w, "Test is test raw-params-matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -685,11 +685,11 @@ func (h *httpRawPayload) Execute(filePath string) error {
routerErr = err routerErr = err
return return
} }
if !(strings.EqualFold(r.Header.Get("another_header"), "bnVjbGVp") || strings.EqualFold(r.Header.Get("another_header"), "Z3Vlc3Q=")) { if !strings.EqualFold(r.Header.Get("another_header"), "bnVjbGVp") && !strings.EqualFold(r.Header.Get("another_header"), "Z3Vlc3Q=") {
return return
} }
if strings.EqualFold(r.Form.Get("username"), "test") && (strings.EqualFold(r.Form.Get("password"), "nuclei") || strings.EqualFold(r.Form.Get("password"), "guest")) { if strings.EqualFold(r.Form.Get("username"), "test") && (strings.EqualFold(r.Form.Get("password"), "nuclei") || strings.EqualFold(r.Form.Get("password"), "guest")) {
fmt.Fprintf(w, "Test is raw-payload matcher text") _, _ = fmt.Fprintf(w, "Test is raw-payload matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -719,7 +719,7 @@ func (h *httpRawPostBody) Execute(filePath string) error {
return return
} }
if strings.EqualFold(r.Form.Get("username"), "test") && strings.EqualFold(r.Form.Get("password"), "nuclei") { if strings.EqualFold(r.Form.Get("username"), "test") && strings.EqualFold(r.Form.Get("password"), "nuclei") {
fmt.Fprintf(w, "Test is test raw-post-body-matcher text") _, _ = fmt.Fprintf(w, "Test is test raw-post-body-matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -829,10 +829,7 @@ func (h *httpPaths) Execute(filepath string) error {
} }
if len(expected) > len(actual) { if len(expected) > len(actual) {
actualValuesIndex := len(actual) - 1 actualValuesIndex := max(len(actual)-1, 0)
if actualValuesIndex < 0 {
actualValuesIndex = 0
}
return fmt.Errorf("missing values : %v", expected[actualValuesIndex:]) return fmt.Errorf("missing values : %v", expected[actualValuesIndex:])
} else if len(expected) < len(actual) { } else if len(expected) < len(actual) {
return fmt.Errorf("unexpected values : %v", actual[len(expected)-1:]) return fmt.Errorf("unexpected values : %v", actual[len(expected)-1:])
@ -872,7 +869,7 @@ func (h *httpRawCookieReuse) Execute(filePath string) error {
} }
if strings.EqualFold(cookie.Value, "test") { if strings.EqualFold(cookie.Value, "test") {
fmt.Fprintf(w, "Test is test-cookie-reuse matcher text") _, _ = fmt.Fprintf(w, "Test is test-cookie-reuse matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -950,7 +947,9 @@ func (h *httpRequestSelfContained) Execute(filePath string) error {
go func() { go func() {
_ = server.ListenAndServe() _ = server.ListenAndServe()
}() }()
defer server.Close() defer func() {
_ = server.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc") results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil { if err != nil {
@ -972,10 +971,10 @@ func (h *httpRequestSelfContainedWithParams) Execute(filePath string) error {
// we intentionally use params["test"] instead of params.Get("test") to test the case where // we intentionally use params["test"] instead of params.Get("test") to test the case where
// there are multiple parameters with the same name // there are multiple parameters with the same name
if !reflect.DeepEqual(params["something"], []string{"here"}) { if !reflect.DeepEqual(params["something"], []string{"here"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"here"}, params["something"]) errx = errkit.Append(errx, errkit.New("something not found in params", "expected", []string{"here"}, "got", params["something"]))
} }
if !reflect.DeepEqual(params["key"], []string{"value"}) { if !reflect.DeepEqual(params["key"], []string{"value"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value"}, params["key"]) errx = errkit.Append(errx, errkit.New("key not found in params", "expected", []string{"value"}, "got", params["key"]))
} }
_, _ = w.Write([]byte("This is self-contained response")) _, _ = w.Write([]byte("This is self-contained response"))
}) })
@ -986,7 +985,9 @@ func (h *httpRequestSelfContainedWithParams) Execute(filePath string) error {
go func() { go func() {
_ = server.ListenAndServe() _ = server.ListenAndServe()
}() }()
defer server.Close() defer func() {
_ = server.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc") results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil { if err != nil {
@ -1019,17 +1020,21 @@ func (h *httpRequestSelfContainedFileInput) Execute(filePath string) error {
go func() { go func() {
_ = server.ListenAndServe() _ = server.ListenAndServe()
}() }()
defer server.Close() defer func() {
_ = server.Close()
}()
// create temp file // create temp file
FileLoc, err := os.CreateTemp("", "self-contained-payload-*.txt") FileLoc, err := os.CreateTemp("", "self-contained-payload-*.txt")
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to create temp file") return errkit.Wrap(err, "failed to create temp file")
} }
if _, err := FileLoc.Write([]byte("one\ntwo\n")); err != nil { if _, err := FileLoc.Write([]byte("one\ntwo\n")); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write payload to temp file") return errkit.Wrap(err, "failed to write payload to temp file")
} }
defer FileLoc.Close() defer func() {
_ = FileLoc.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-V", "test="+FileLoc.Name(), "-esc") results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-V", "test="+FileLoc.Name(), "-esc")
if err != nil { if err != nil {
@ -1041,7 +1046,7 @@ func (h *httpRequestSelfContainedFileInput) Execute(filePath string) error {
} }
if !sliceutil.ElementsMatch(gotReqToEndpoints, []string{"/one", "/two", "/one", "/two"}) { if !sliceutil.ElementsMatch(gotReqToEndpoints, []string{"/one", "/two", "/one", "/two"}) {
return errorutil.NewWithTag(filePath, "expected requests to be sent to `/one` and `/two` endpoints but were sent to `%v`", gotReqToEndpoints) return errkit.New("expected requests to be sent to `/one` and `/two` endpoints but were sent to `%v`", gotReqToEndpoints, "filePath", filePath)
} }
return nil return nil
} }
@ -1052,7 +1057,7 @@ type httpGetCaseInsensitive struct{}
func (h *httpGetCaseInsensitive) Execute(filePath string) error { func (h *httpGetCaseInsensitive) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "THIS IS TEST MATCHER TEXT") _, _ = fmt.Fprintf(w, "THIS IS TEST MATCHER TEXT")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -1071,7 +1076,7 @@ type httpGetCaseInsensitiveCluster struct{}
func (h *httpGetCaseInsensitiveCluster) Execute(filesPath string) error { func (h *httpGetCaseInsensitiveCluster) Execute(filesPath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -1154,7 +1159,7 @@ type httpStopAtFirstMatch struct{}
func (h *httpStopAtFirstMatch) Execute(filePath string) error { func (h *httpStopAtFirstMatch) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test") _, _ = fmt.Fprintf(w, "This is test")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -1173,7 +1178,7 @@ type httpStopAtFirstMatchWithExtractors struct{}
func (h *httpStopAtFirstMatchWithExtractors) Execute(filePath string) error { func (h *httpStopAtFirstMatchWithExtractors) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test") _, _ = fmt.Fprintf(w, "This is test")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -1192,7 +1197,7 @@ type httpVariables struct{}
func (h *httpVariables) Execute(filePath string) error { func (h *httpVariables) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "%s\n%s\n%s", r.Header.Get("Test"), r.Header.Get("Another"), r.Header.Get("Email")) _, _ = fmt.Fprintf(w, "%s\n%s\n%s", r.Header.Get("Test"), r.Header.Get("Another"), r.Header.Get("Email"))
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -1294,7 +1299,7 @@ func (h *httpRedirectMatchURL) Execute(filePath string) error {
_, _ = w.Write([]byte("This is test redirects matcher text")) _, _ = w.Write([]byte("This is test redirects matcher text"))
}) })
router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test redirects matcher text") _, _ = fmt.Fprintf(w, "This is test redirects matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -1342,7 +1347,7 @@ func (h *annotationTimeout) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
time.Sleep(4 * time.Second) time.Sleep(4 * time.Second)
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -1362,7 +1367,7 @@ func (h *customAttackType) Execute(filePath string) error {
got := []string{} got := []string{}
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
got = append(got, r.URL.RawQuery) got = append(got, r.URL.RawQuery)
fmt.Fprintf(w, "This is test custom payload") _, _ = fmt.Fprintf(w, "This is test custom payload")
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -1410,7 +1415,7 @@ func (h *httpCLBodyWithoutHeader) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header()["Content-Length"] = []string{"-1"} w.Header()["Content-Length"] = []string{"-1"}
fmt.Fprintf(w, "this is a test") _, _ = fmt.Fprintf(w, "this is a test")
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -1430,7 +1435,7 @@ func (h *httpCLBodyWithHeader) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header()["Content-Length"] = []string{"50000"} w.Header()["Content-Length"] = []string{"50000"}
fmt.Fprintf(w, "this is a test") _, _ = fmt.Fprintf(w, "this is a test")
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -1449,7 +1454,7 @@ type ConstantWithCliVar struct{}
func (h *ConstantWithCliVar) Execute(filePath string) error { func (h *ConstantWithCliVar) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprint(w, r.URL.Query().Get("p")) _, _ = fmt.Fprint(w, r.URL.Query().Get("p"))
}) })
ts := httptest.NewTLSServer(router) ts := httptest.NewTLSServer(router)
defer ts.Close() defer ts.Close()
@ -1486,10 +1491,10 @@ type httpDisablePathAutomerge struct{}
func (h *httpDisablePathAutomerge) Execute(filePath string) error { func (h *httpDisablePathAutomerge) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/api/v1/test", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/api/v1/test", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprint(w, r.URL.Query().Get("id")) _, _ = fmt.Fprint(w, r.URL.Query().Get("id"))
}) })
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprint(w, "empty path in raw request") _, _ = fmt.Fprint(w, "empty path in raw request")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -1523,10 +1528,10 @@ func (h *httpPreprocessor) Execute(filePath string) error {
value := r.URL.RequestURI() value := r.URL.RequestURI()
if re.MatchString(value) { if re.MatchString(value) {
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
fmt.Fprint(w, "ok") _, _ = fmt.Fprint(w, "ok")
} else { } else {
w.WriteHeader(http.StatusBadRequest) w.WriteHeader(http.StatusBadRequest)
fmt.Fprint(w, "not ok") _, _ = fmt.Fprint(w, "not ok")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -1547,11 +1552,11 @@ func (h *httpMultiRequest) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/ping", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/ping", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
fmt.Fprint(w, "ping") _, _ = fmt.Fprint(w, "ping")
}) })
router.GET("/pong", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/pong", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
fmt.Fprint(w, "pong") _, _ = fmt.Fprint(w, "pong")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()

View File

@ -4,6 +4,7 @@ import (
"flag" "flag"
"fmt" "fmt"
"os" "os"
"regexp"
"runtime" "runtime"
"strings" "strings"
@ -56,6 +57,7 @@ var (
"flow": flowTestcases, "flow": flowTestcases,
"javascript": jsTestcases, "javascript": jsTestcases,
"matcher-status": matcherStatusTestcases, "matcher-status": matcherStatusTestcases,
"exporters": exportersTestCases,
} }
// flakyTests are run with a retry count of 3 // flakyTests are run with a retry count of 3
flakyTests = map[string]bool{ flakyTests = map[string]bool{
@ -89,7 +91,9 @@ func main() {
// start fuzz playground server // start fuzz playground server
defer fuzzplayground.Cleanup() defer fuzzplayground.Cleanup()
server := fuzzplayground.GetPlaygroundServer() server := fuzzplayground.GetPlaygroundServer()
defer server.Close() defer func() {
_ = server.Close()
}()
go func() { go func() {
if err := server.Start("localhost:8082"); err != nil { if err := server.Start("localhost:8082"); err != nil {
if !strings.Contains(err.Error(), "Server closed") { if !strings.Contains(err.Error(), "Server closed") {
@ -208,7 +212,7 @@ func execute(testCase testutils.TestCase, templatePath string) (string, error) {
} }
func expectResultsCount(results []string, expectedNumbers ...int) error { func expectResultsCount(results []string, expectedNumbers ...int) error {
results = filterHeadlessLogs(results) results = filterLines(results)
match := sliceutil.Contains(expectedNumbers, len(results)) match := sliceutil.Contains(expectedNumbers, len(results))
if !match { if !match {
return fmt.Errorf("incorrect number of results: %d (actual) vs %v (expected) \nResults:\n\t%s\n", len(results), expectedNumbers, strings.Join(results, "\n\t")) // nolint:all return fmt.Errorf("incorrect number of results: %d (actual) vs %v (expected) \nResults:\n\t%s\n", len(results), expectedNumbers, strings.Join(results, "\n\t")) // nolint:all
@ -222,6 +226,13 @@ func normalizeSplit(str string) []string {
}) })
} }
// filterLines applies all filtering functions to the results
func filterLines(results []string) []string {
results = filterHeadlessLogs(results)
results = filterUnsignedTemplatesWarnings(results)
return results
}
// if chromium is not installed go-rod installs it in .cache directory // if chromium is not installed go-rod installs it in .cache directory
// this function filters out the logs from download and installation // this function filters out the logs from download and installation
func filterHeadlessLogs(results []string) []string { func filterHeadlessLogs(results []string) []string {
@ -235,3 +246,16 @@ func filterHeadlessLogs(results []string) []string {
} }
return filtered return filtered
} }
// filterUnsignedTemplatesWarnings filters out warning messages about unsigned templates
func filterUnsignedTemplatesWarnings(results []string) []string {
filtered := []string{}
unsignedTemplatesRegex := regexp.MustCompile(`Loading \d+ unsigned templates for scan\. Use with caution\.`)
for _, result := range results {
if unsignedTemplatesRegex.MatchString(result) {
continue
}
filtered = append(filtered, result)
}
return filtered
}

View File

@ -15,13 +15,17 @@ var jsTestcases = []TestCaseInfo{
{Path: "protocols/javascript/ssh-server-fingerprint.yaml", TestCase: &javascriptSSHServerFingerprint{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }}, {Path: "protocols/javascript/ssh-server-fingerprint.yaml", TestCase: &javascriptSSHServerFingerprint{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }},
{Path: "protocols/javascript/net-multi-step.yaml", TestCase: &networkMultiStep{}}, {Path: "protocols/javascript/net-multi-step.yaml", TestCase: &networkMultiStep{}},
{Path: "protocols/javascript/net-https.yaml", TestCase: &javascriptNetHttps{}}, {Path: "protocols/javascript/net-https.yaml", TestCase: &javascriptNetHttps{}},
{Path: "protocols/javascript/oracle-auth-test.yaml", TestCase: &javascriptOracleAuthTest{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }},
{Path: "protocols/javascript/vnc-pass-brute.yaml", TestCase: &javascriptVncPassBrute{}},
} }
var ( var (
redisResource *dockertest.Resource redisResource *dockertest.Resource
sshResource *dockertest.Resource sshResource *dockertest.Resource
pool *dockertest.Pool oracleResource *dockertest.Resource
defaultRetry = 3 vncResource *dockertest.Resource
pool *dockertest.Pool
defaultRetry = 3
) )
type javascriptNetHttps struct{} type javascriptNetHttps struct{}
@ -98,6 +102,71 @@ func (j *javascriptSSHServerFingerprint) Execute(filePath string) error {
return multierr.Combine(errs...) return multierr.Combine(errs...)
} }
type javascriptOracleAuthTest struct{}
func (j *javascriptOracleAuthTest) Execute(filePath string) error {
if oracleResource == nil || pool == nil {
// skip test as oracle is not running
return nil
}
tempPort := oracleResource.GetPort("1521/tcp")
finalURL := "localhost:" + tempPort
defer purge(oracleResource)
errs := []error{}
for i := 0; i < defaultRetry; i++ {
results := []string{}
var err error
_ = pool.Retry(func() error {
//let ssh server start
time.Sleep(3 * time.Second)
results, err = testutils.RunNucleiTemplateAndGetResults(filePath, finalURL, debug)
return nil
})
if err != nil {
return err
}
if err := expectResultsCount(results, 1); err == nil {
return nil
} else {
errs = append(errs, err)
}
}
return multierr.Combine(errs...)
}
type javascriptVncPassBrute struct{}
func (j *javascriptVncPassBrute) Execute(filePath string) error {
if vncResource == nil || pool == nil {
// skip test as vnc is not running
return nil
}
tempPort := vncResource.GetPort("5900/tcp")
finalURL := "localhost:" + tempPort
defer purge(vncResource)
errs := []error{}
for i := 0; i < defaultRetry; i++ {
results := []string{}
var err error
_ = pool.Retry(func() error {
//let ssh server start
time.Sleep(3 * time.Second)
results, err = testutils.RunNucleiTemplateAndGetResults(filePath, finalURL, debug)
return nil
})
if err != nil {
return err
}
if err := expectResultsCount(results, 1); err == nil {
return nil
} else {
errs = append(errs, err)
}
}
return multierr.Combine(errs...)
}
// purge any given resource if it is not nil // purge any given resource if it is not nil
func purge(resource *dockertest.Resource) { func purge(resource *dockertest.Resource) {
if resource != nil && pool != nil { if resource != nil && pool != nil {
@ -163,4 +232,41 @@ func init() {
if err := sshResource.Expire(30); err != nil { if err := sshResource.Expire(30); err != nil {
log.Printf("Could not expire resource: %s", err) log.Printf("Could not expire resource: %s", err)
} }
// setup a temporary oracle instance
oracleResource, err = pool.RunWithOptions(&dockertest.RunOptions{
Repository: "gvenzl/oracle-xe",
Tag: "latest",
Env: []string{
"ORACLE_PASSWORD=mysecret",
},
Platform: "linux/amd64",
})
if err != nil {
log.Printf("Could not start Oracle resource: %s", err)
return
}
// by default expire after 30 sec
if err := oracleResource.Expire(30); err != nil {
log.Printf("Could not expire Oracle resource: %s", err)
}
// setup a temporary vnc server
vncResource, err = pool.RunWithOptions(&dockertest.RunOptions{
Repository: "dorowu/ubuntu-desktop-lxde-vnc",
Tag: "latest",
Env: []string{
"VNC_PASSWORD=mysecret",
},
Platform: "linux/amd64",
})
if err != nil {
log.Printf("Could not start resource: %s", err)
return
}
// by default expire after 30 sec
if err := vncResource.Expire(30); err != nil {
log.Printf("Could not expire resource: %s", err)
}
} }

View File

@ -48,9 +48,9 @@ func (h *goIntegrationTest) Execute(templatePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") { if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text") _, _ = fmt.Fprintf(w, "This is test headers matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -68,17 +68,21 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
cache := hosterrorscache.New(30, hosterrorscache.DefaultMaxHostsCount, nil) cache := hosterrorscache.New(30, hosterrorscache.DefaultMaxHostsCount, nil)
defer cache.Close() defer cache.Close()
defaultOpts := types.DefaultOptions()
defaultOpts.ExecutionId = "test"
mockProgress := &testutils.MockProgressClient{} mockProgress := &testutils.MockProgressClient{}
reportingClient, err := reporting.New(&reporting.Options{}, "", false) reportingClient, err := reporting.New(&reporting.Options{ExecutionId: defaultOpts.ExecutionId}, "", false)
if err != nil { if err != nil {
return nil, err return nil, err
} }
defer reportingClient.Close() defer reportingClient.Close()
defaultOpts := types.DefaultOptions()
_ = protocolstate.Init(defaultOpts) _ = protocolstate.Init(defaultOpts)
_ = protocolinit.Init(defaultOpts) _ = protocolinit.Init(defaultOpts)
defer protocolstate.Close(defaultOpts.ExecutionId)
defaultOpts.Templates = goflags.StringSlice{templatePath} defaultOpts.Templates = goflags.StringSlice{templatePath}
defaultOpts.ExcludeTags = config.ReadIgnoreFile().Tags defaultOpts.ExcludeTags = config.ReadIgnoreFile().Tags
@ -100,7 +104,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
ratelimiter := ratelimit.New(context.Background(), 150, time.Second) ratelimiter := ratelimit.New(context.Background(), 150, time.Second)
defer ratelimiter.Stop() defer ratelimiter.Stop()
executerOpts := protocols.ExecutorOptions{ executerOpts := &protocols.ExecutorOptions{
Output: outputWriter, Output: outputWriter,
Options: defaultOpts, Options: defaultOpts,
Progress: mockProgress, Progress: mockProgress,
@ -116,7 +120,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
engine := core.New(defaultOpts) engine := core.New(defaultOpts)
engine.SetExecuterOptions(executerOpts) engine.SetExecuterOptions(executerOpts)
workflowLoader, err := parsers.NewLoader(&executerOpts) workflowLoader, err := parsers.NewLoader(executerOpts)
if err != nil { if err != nil {
log.Fatalf("Could not create workflow loader: %s\n", err) log.Fatalf("Could not create workflow loader: %s\n", err)
} }
@ -128,7 +132,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
} }
store.Load() store.Load()
_ = engine.Execute(context.Background(), store.Templates(), provider.NewSimpleInputProviderWithUrls(templateURL)) _ = engine.Execute(context.Background(), store.Templates(), provider.NewSimpleInputProviderWithUrls(defaultOpts.ExecutionId, templateURL))
engine.WorkPool().Wait() // Wait for the scan to finish engine.WorkPool().Wait() // Wait for the scan to finish
return results, nil return results, nil

View File

@ -10,7 +10,7 @@ import (
"github.com/julienschmidt/httprouter" "github.com/julienschmidt/httprouter"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils" "github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
permissionutil "github.com/projectdiscovery/utils/permission" permissionutil "github.com/projectdiscovery/utils/permission"
) )
@ -31,9 +31,9 @@ func (h *remoteTemplateList) Execute(templateList string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") { if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text") _, _ = fmt.Fprintf(w, "This is test headers matcher text")
} }
}) })
@ -55,7 +55,9 @@ func (h *remoteTemplateList) Execute(templateList string) error {
if err != nil { if err != nil {
return err return err
} }
defer os.Remove("test-config.yaml") defer func() {
_ = os.Remove("test-config.yaml")
}()
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-template-url", ts.URL+"/template_list", "-config", "test-config.yaml") results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-template-url", ts.URL+"/template_list", "-config", "test-config.yaml")
if err != nil { if err != nil {
@ -72,9 +74,9 @@ func (h *excludedTemplate) Execute(templateList string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") { if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text") _, _ = fmt.Fprintf(w, "This is test headers matcher text")
} }
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
@ -95,9 +97,9 @@ func (h *remoteTemplateListNotAllowed) Execute(templateList string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") { if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text") _, _ = fmt.Fprintf(w, "This is test headers matcher text")
} }
}) })
@ -130,9 +132,9 @@ func (h *remoteWorkflowList) Execute(workflowList string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") { if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text") _, _ = fmt.Fprintf(w, "This is test headers matcher text")
} }
}) })
@ -154,7 +156,9 @@ func (h *remoteWorkflowList) Execute(workflowList string) error {
if err != nil { if err != nil {
return err return err
} }
defer os.Remove("test-config.yaml") defer func() {
_ = os.Remove("test-config.yaml")
}()
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-workflow-url", ts.URL+"/workflow_list", "-config", "test-config.yaml") results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-workflow-url", ts.URL+"/workflow_list", "-config", "test-config.yaml")
if err != nil { if err != nil {
@ -177,7 +181,9 @@ func (h *nonExistentTemplateList) Execute(nonExistingTemplateList string) error
if err != nil { if err != nil {
return err return err
} }
defer os.Remove("test-config.yaml") defer func() {
_ = os.Remove("test-config.yaml")
}()
_, err = testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-template-url", ts.URL+"/404", "-config", "test-config.yaml") _, err = testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-template-url", ts.URL+"/404", "-config", "test-config.yaml")
if err == nil { if err == nil {
@ -200,7 +206,9 @@ func (h *nonExistentWorkflowList) Execute(nonExistingWorkflowList string) error
if err != nil { if err != nil {
return err return err
} }
defer os.Remove("test-config.yaml") defer func() {
_ = os.Remove("test-config.yaml")
}()
_, err = testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-workflow-url", ts.URL+"/404", "-config", "test-config.yaml") _, err = testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-workflow-url", ts.URL+"/404", "-config", "test-config.yaml")
if err == nil { if err == nil {
@ -215,7 +223,7 @@ type loadTemplateWithID struct{}
func (h *loadTemplateWithID) Execute(nooop string) error { func (h *loadTemplateWithID) Execute(nooop string) error {
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", "scanme.sh", "-id", "self-signed-ssl") results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", "scanme.sh", "-id", "self-signed-ssl")
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id") return errkit.Wrap(err, "failed to load template with id")
} }
return expectResultsCount(results, 1) return expectResultsCount(results, 1)
} }

View File

@ -33,7 +33,9 @@ func (h *networkBasic) Execute(filePath string) error {
var routerErr error var routerErr error
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second) data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil { if err != nil {
@ -50,11 +52,11 @@ func (h *networkBasic) Execute(filePath string) error {
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug)
if err != nil { if err != nil {
fmt.Fprintf(os.Stderr, "Could not run nuclei: %s\n", err) _, _ = fmt.Fprintf(os.Stderr, "Could not run nuclei: %s\n", err)
return err return err
} }
if routerErr != nil { if routerErr != nil {
fmt.Fprintf(os.Stderr, "routerErr: %s\n", routerErr) _, _ = fmt.Fprintf(os.Stderr, "routerErr: %s\n", routerErr)
return routerErr return routerErr
} }
@ -68,7 +70,9 @@ func (h *networkMultiStep) Execute(filePath string) error {
var routerErr error var routerErr error
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 5, time.Duration(5)*time.Second) data, err := reader.ConnReadNWithTimeout(conn, 5, time.Duration(5)*time.Second)
if err != nil { if err != nil {
@ -114,7 +118,9 @@ type networkRequestSelContained struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *networkRequestSelContained) Execute(filePath string) error { func (h *networkRequestSelContained) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
_, _ = conn.Write([]byte("Authentication successful")) _, _ = conn.Write([]byte("Authentication successful"))
}) })
@ -134,7 +140,9 @@ func (h *networkVariables) Execute(filePath string) error {
var routerErr error var routerErr error
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second) data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil { if err != nil {
@ -162,7 +170,9 @@ type networkPort struct{}
func (n *networkPort) Execute(filePath string) error { func (n *networkPort) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, 23846, func(conn net.Conn) { ts := testutils.NewTCPServer(nil, 23846, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second) data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil { if err != nil {
@ -195,7 +205,9 @@ func (n *networkPort) Execute(filePath string) error {
// this is positive test case where we expect port to be overridden and 34567 to be used // this is positive test case where we expect port to be overridden and 34567 to be used
ts2 := testutils.NewTCPServer(nil, 34567, func(conn net.Conn) { ts2 := testutils.NewTCPServer(nil, 34567, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second) data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil { if err != nil {

View File

@ -4,7 +4,7 @@ import (
"fmt" "fmt"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils" "github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
) )
var profileLoaderTestcases = []TestCaseInfo{ var profileLoaderTestcases = []TestCaseInfo{
@ -16,9 +16,9 @@ var profileLoaderTestcases = []TestCaseInfo{
type profileLoaderByRelFile struct{} type profileLoaderByRelFile struct{}
func (h *profileLoaderByRelFile) Execute(testName string) error { func (h *profileLoaderByRelFile) Execute(testName string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", "cloud.yml") results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", "cloud.yml")
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id") return errkit.Wrap(err, "failed to load template with id")
} }
if len(results) <= 10 { if len(results) <= 10 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 10, len(results)) return fmt.Errorf("incorrect result: expected more results than %d, got %v", 10, len(results))
@ -29,9 +29,9 @@ func (h *profileLoaderByRelFile) Execute(testName string) error {
type profileLoaderById struct{} type profileLoaderById struct{}
func (h *profileLoaderById) Execute(testName string) error { func (h *profileLoaderById) Execute(testName string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", "cloud") results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", "cloud")
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id") return errkit.Wrap(err, "failed to load template with id")
} }
if len(results) <= 10 { if len(results) <= 10 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 10, len(results)) return fmt.Errorf("incorrect result: expected more results than %d, got %v", 10, len(results))
@ -43,9 +43,9 @@ func (h *profileLoaderById) Execute(testName string) error {
type customProfileLoader struct{} type customProfileLoader struct{}
func (h *customProfileLoader) Execute(filepath string) error { func (h *customProfileLoader) Execute(filepath string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", filepath) results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", filepath)
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id") return errkit.Wrap(err, "failed to load template with id")
} }
if len(results) < 1 { if len(results) < 1 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 1, len(results)) return fmt.Errorf("incorrect result: expected more results than %d, got %v", 1, len(results))

View File

@ -21,7 +21,9 @@ type sslBasic struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *sslBasic) Execute(filePath string) error { func (h *sslBasic) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data := make([]byte, 4) data := make([]byte, 4)
if _, err := conn.Read(data); err != nil { if _, err := conn.Read(data); err != nil {
return return
@ -42,7 +44,9 @@ type sslBasicZtls struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *sslBasicZtls) Execute(filePath string) error { func (h *sslBasicZtls) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data := make([]byte, 4) data := make([]byte, 4)
if _, err := conn.Read(data); err != nil { if _, err := conn.Read(data); err != nil {
return return
@ -63,7 +67,9 @@ type sslCustomCipher struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *sslCustomCipher) Execute(filePath string) error { func (h *sslCustomCipher) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{CipherSuites: []uint16{tls.TLS_AES_128_GCM_SHA256}}, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(&tls.Config{CipherSuites: []uint16{tls.TLS_AES_128_GCM_SHA256}}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data := make([]byte, 4) data := make([]byte, 4)
if _, err := conn.Read(data); err != nil { if _, err := conn.Read(data); err != nil {
return return
@ -84,7 +90,9 @@ type sslCustomVersion struct{}
// Execute executes a test case and returns an error if occurred // Execute executes a test case and returns an error if occurred
func (h *sslCustomVersion) Execute(filePath string) error { func (h *sslCustomVersion) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{MinVersion: tls.VersionTLS12, MaxVersion: tls.VersionTLS12}, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(&tls.Config{MinVersion: tls.VersionTLS12, MaxVersion: tls.VersionTLS12}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data := make([]byte, 4) data := make([]byte, 4)
if _, err := conn.Read(data); err != nil { if _, err := conn.Read(data); err != nil {
return return
@ -104,7 +112,9 @@ type sslWithVars struct{}
func (h *sslWithVars) Execute(filePath string) error { func (h *sslWithVars) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) { ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data := make([]byte, 4) data := make([]byte, 4)
if _, err := conn.Read(data); err != nil { if _, err := conn.Read(data); err != nil {
return return
@ -128,7 +138,9 @@ func (h *sslMultiReq) Execute(filePath string) error {
MinVersion: tls.VersionSSL30, MinVersion: tls.VersionSSL30,
MaxVersion: tls.VersionTLS11, MaxVersion: tls.VersionTLS11,
}, defaultStaticPort, func(conn net.Conn) { }, defaultStaticPort, func(conn net.Conn) {
defer conn.Close() defer func() {
_ = conn.Close()
}()
data := make([]byte, 4) data := make([]byte, 4)
if _, err := conn.Read(data); err != nil { if _, err := conn.Read(data); err != nil {
return return

View File

@ -4,7 +4,7 @@ import (
"os" "os"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils" "github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
) )
var templatesDirTestCases = []TestCaseInfo{ var templatesDirTestCases = []TestCaseInfo{
@ -17,9 +17,11 @@ type templateDirWithTargetTest struct{}
func (h *templateDirWithTargetTest) Execute(filePath string) error { func (h *templateDirWithTargetTest) Execute(filePath string) error {
tempdir, err := os.MkdirTemp("", "nuclei-update-dir-*") tempdir, err := os.MkdirTemp("", "nuclei-update-dir-*")
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to create temp dir") return errkit.Wrap(err, "failed to create temp dir")
} }
defer os.RemoveAll(tempdir) defer func() {
_ = os.RemoveAll(tempdir)
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "8x8exch02.8x8.com", debug, "-ud", tempdir) results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "8x8exch02.8x8.com", debug, "-ud", tempdir)
if err != nil { if err != nil {

View File

@ -62,7 +62,7 @@ type workflowBasic struct{}
func (h *workflowBasic) Execute(filePath string) error { func (h *workflowBasic) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -81,7 +81,7 @@ type workflowConditionMatched struct{}
func (h *workflowConditionMatched) Execute(filePath string) error { func (h *workflowConditionMatched) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -100,7 +100,7 @@ type workflowConditionUnmatch struct{}
func (h *workflowConditionUnmatch) Execute(filePath string) error { func (h *workflowConditionUnmatch) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -119,7 +119,7 @@ type workflowMatcherName struct{}
func (h *workflowMatcherName) Execute(filePath string) error { func (h *workflowMatcherName) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -138,7 +138,7 @@ type workflowComplexConditions struct{}
func (h *workflowComplexConditions) Execute(filePath string) error { func (h *workflowComplexConditions) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -162,11 +162,11 @@ type workflowHttpKeyValueShare struct{}
func (h *workflowHttpKeyValueShare) Execute(filePath string) error { func (h *workflowHttpKeyValueShare) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "href=\"test-value\"") _, _ = fmt.Fprintf(w, "href=\"test-value\"")
}) })
router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
body, _ := io.ReadAll(r.Body) body, _ := io.ReadAll(r.Body)
fmt.Fprintf(w, "%s", body) _, _ = fmt.Fprintf(w, "%s", body)
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -214,11 +214,11 @@ func (h *workflowMultiProtocolKeyValueShare) Execute(filePath string) error {
router := httprouter.New() router := httprouter.New()
// the response of path1 contains a domain that will be extracted and shared with the second template // the response of path1 contains a domain that will be extracted and shared with the second template
router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "href=\"blog.projectdiscovery.io\"") _, _ = fmt.Fprintf(w, "href=\"blog.projectdiscovery.io\"")
}) })
// path2 responds with the value of the "extracted" query parameter, e.g.: /path2?extracted=blog.projectdiscovery.io => blog.projectdiscovery.io // path2 responds with the value of the "extracted" query parameter, e.g.: /path2?extracted=blog.projectdiscovery.io => blog.projectdiscovery.io
router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "%s", r.URL.Query().Get("extracted")) _, _ = fmt.Fprintf(w, "%s", r.URL.Query().Get("extracted"))
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()
@ -238,15 +238,15 @@ func (h *workflowMultiMatchKeyValueShare) Execute(filePath string) error {
var receivedData []string var receivedData []string
router := httprouter.New() router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text") _, _ = fmt.Fprintf(w, "This is test matcher text")
}) })
router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "href=\"test-value-%s\"", r.URL.Query().Get("v")) _, _ = fmt.Fprintf(w, "href=\"test-value-%s\"", r.URL.Query().Get("v"))
}) })
router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
body, _ := io.ReadAll(r.Body) body, _ := io.ReadAll(r.Body)
receivedData = append(receivedData, string(body)) receivedData = append(receivedData, string(body))
fmt.Fprintf(w, "test-value") _, _ = fmt.Fprintf(w, "test-value")
}) })
ts := httptest.NewServer(router) ts := httptest.NewServer(router)
defer ts.Close() defer ts.Close()

View File

@ -13,14 +13,16 @@ import (
"strings" "strings"
"time" "time"
"github.com/projectdiscovery/gologger"
_pdcp "github.com/projectdiscovery/nuclei/v3/internal/pdcp" _pdcp "github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/utils/auth/pdcp" "github.com/projectdiscovery/utils/auth/pdcp"
"github.com/projectdiscovery/utils/env" "github.com/projectdiscovery/utils/env"
_ "github.com/projectdiscovery/utils/pprof" _ "github.com/projectdiscovery/utils/pprof"
stringsutil "github.com/projectdiscovery/utils/strings" stringsutil "github.com/projectdiscovery/utils/strings"
"github.com/rs/xid"
"gopkg.in/yaml.v2"
"github.com/projectdiscovery/goflags" "github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/gologger/levels" "github.com/projectdiscovery/gologger/levels"
"github.com/projectdiscovery/interactsh/pkg/client" "github.com/projectdiscovery/interactsh/pkg/client"
"github.com/projectdiscovery/nuclei/v3/internal/runner" "github.com/projectdiscovery/nuclei/v3/internal/runner"
@ -38,7 +40,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/types/scanstrategy" "github.com/projectdiscovery/nuclei/v3/pkg/types/scanstrategy"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/monitor" "github.com/projectdiscovery/nuclei/v3/pkg/utils/monitor"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file" fileutil "github.com/projectdiscovery/utils/file"
unitutils "github.com/projectdiscovery/utils/unit" unitutils "github.com/projectdiscovery/utils/unit"
updateutils "github.com/projectdiscovery/utils/update" updateutils "github.com/projectdiscovery/utils/update"
@ -52,16 +54,18 @@ var (
) )
func main() { func main() {
options.Logger = gologger.DefaultLogger
// enables CLI specific configs mostly interactive behavior // enables CLI specific configs mostly interactive behavior
config.CurrentAppMode = config.AppModeCLI config.CurrentAppMode = config.AppModeCLI
if err := runner.ConfigureOptions(); err != nil { if err := runner.ConfigureOptions(); err != nil {
gologger.Fatal().Msgf("Could not initialize options: %s\n", err) options.Logger.Fatal().Msgf("Could not initialize options: %s\n", err)
} }
_ = readConfig() _ = readConfig()
if options.ListDslSignatures { if options.ListDslSignatures {
gologger.Info().Msgf("The available custom DSL functions are:") options.Logger.Info().Msgf("The available custom DSL functions are:")
fmt.Println(dsl.GetPrintableDslFunctionSignatures(options.NoColor)) fmt.Println(dsl.GetPrintableDslFunctionSignatures(options.NoColor))
return return
} }
@ -72,7 +76,7 @@ func main() {
templates.UseOptionsForSigner(options) templates.UseOptionsForSigner(options)
tsigner, err := signer.NewTemplateSigner(nil, nil) // will read from env , config or generate new keys tsigner, err := signer.NewTemplateSigner(nil, nil) // will read from env , config or generate new keys
if err != nil { if err != nil {
gologger.Fatal().Msgf("couldn't initialize signer crypto engine: %s\n", err) options.Logger.Fatal().Msgf("couldn't initialize signer crypto engine: %s\n", err)
} }
successCounter := 0 successCounter := 0
@ -88,7 +92,7 @@ func main() {
if err != templates.ErrNotATemplate { if err != templates.ErrNotATemplate {
// skip warnings and errors as given items are not templates // skip warnings and errors as given items are not templates
errorCounter++ errorCounter++
gologger.Error().Msgf("could not sign '%s': %s\n", iterItem, err) options.Logger.Error().Msgf("could not sign '%s': %s\n", iterItem, err)
} }
} else { } else {
successCounter++ successCounter++
@ -97,10 +101,10 @@ func main() {
return nil return nil
}) })
if err != nil { if err != nil {
gologger.Error().Msgf("%s\n", err) options.Logger.Error().Msgf("%s\n", err)
} }
} }
gologger.Info().Msgf("All templates signatures were elaborated success=%d failed=%d\n", successCounter, errorCounter) options.Logger.Info().Msgf("All templates signatures were elaborated success=%d failed=%d\n", successCounter, errorCounter)
return return
} }
@ -111,7 +115,7 @@ func main() {
createProfileFile := func(ext, profileType string) *os.File { createProfileFile := func(ext, profileType string) *os.File {
f, err := os.Create(memProfile + ext) f, err := os.Create(memProfile + ext)
if err != nil { if err != nil {
gologger.Fatal().Msgf("profile: could not create %s profile %q file: %v", profileType, f.Name(), err) options.Logger.Fatal().Msgf("profile: could not create %s profile %q file: %v", profileType, f.Name(), err)
} }
return f return f
} }
@ -125,45 +129,47 @@ func main() {
// Start tracing // Start tracing
if err := trace.Start(traceFile); err != nil { if err := trace.Start(traceFile); err != nil {
gologger.Fatal().Msgf("profile: could not start trace: %v", err) options.Logger.Fatal().Msgf("profile: could not start trace: %v", err)
} }
// Start CPU profiling // Start CPU profiling
if err := pprof.StartCPUProfile(cpuProfileFile); err != nil { if err := pprof.StartCPUProfile(cpuProfileFile); err != nil {
gologger.Fatal().Msgf("profile: could not start CPU profile: %v", err) options.Logger.Fatal().Msgf("profile: could not start CPU profile: %v", err)
} }
defer func() { defer func() {
// Start heap memory snapshot // Start heap memory snapshot
if err := pprof.WriteHeapProfile(memProfileFile); err != nil { if err := pprof.WriteHeapProfile(memProfileFile); err != nil {
gologger.Fatal().Msgf("profile: could not write memory profile: %v", err) options.Logger.Fatal().Msgf("profile: could not write memory profile: %v", err)
} }
pprof.StopCPUProfile() pprof.StopCPUProfile()
memProfileFile.Close() _ = memProfileFile.Close()
traceFile.Close() _ = traceFile.Close()
trace.Stop() trace.Stop()
runtime.MemProfileRate = oldMemProfileRate runtime.MemProfileRate = oldMemProfileRate
gologger.Info().Msgf("CPU profile saved at %q", cpuProfileFile.Name()) options.Logger.Info().Msgf("CPU profile saved at %q", cpuProfileFile.Name())
gologger.Info().Msgf("Memory usage snapshot saved at %q", memProfileFile.Name()) options.Logger.Info().Msgf("Memory usage snapshot saved at %q", memProfileFile.Name())
gologger.Info().Msgf("Traced at %q", traceFile.Name()) options.Logger.Info().Msgf("Traced at %q", traceFile.Name())
}() }()
} }
options.ExecutionId = xid.New().String()
runner.ParseOptions(options) runner.ParseOptions(options)
if options.ScanUploadFile != "" { if options.ScanUploadFile != "" {
if err := runner.UploadResultsToCloud(options); err != nil { if err := runner.UploadResultsToCloud(options); err != nil {
gologger.Fatal().Msgf("could not upload scan results to cloud dashboard: %s\n", err) options.Logger.Fatal().Msgf("could not upload scan results to cloud dashboard: %s\n", err)
} }
return return
} }
nucleiRunner, err := runner.New(options) nucleiRunner, err := runner.New(options)
if err != nil { if err != nil {
gologger.Fatal().Msgf("Could not create runner: %s\n", err) options.Logger.Fatal().Msgf("Could not create runner: %s\n", err)
} }
if nucleiRunner == nil { if nucleiRunner == nil {
return return
@ -176,13 +182,13 @@ func main() {
stackMonitor.RegisterCallback(func(dumpID string) error { stackMonitor.RegisterCallback(func(dumpID string) error {
resumeFileName := fmt.Sprintf("crash-resume-file-%s.dump", dumpID) resumeFileName := fmt.Sprintf("crash-resume-file-%s.dump", dumpID)
if options.EnableCloudUpload { if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...") options.Logger.Info().Msgf("Uploading scan results to cloud...")
} }
nucleiRunner.Close() nucleiRunner.Close()
gologger.Info().Msgf("Creating resume file: %s\n", resumeFileName) options.Logger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
err := nucleiRunner.SaveResumeConfig(resumeFileName) err := nucleiRunner.SaveResumeConfig(resumeFileName)
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("couldn't create crash resume file") return errkit.Wrap(err, "couldn't create crash resume file")
} }
return nil return nil
}) })
@ -191,43 +197,41 @@ func main() {
// Setup graceful exits // Setup graceful exits
resumeFileName := types.DefaultResumeFilePath() resumeFileName := types.DefaultResumeFilePath()
c := make(chan os.Signal, 1) c := make(chan os.Signal, 1)
defer close(c)
signal.Notify(c, os.Interrupt) signal.Notify(c, os.Interrupt)
go func() { go func() {
for range c { <-c
gologger.Info().Msgf("CTRL+C pressed: Exiting\n") options.Logger.Info().Msgf("CTRL+C pressed: Exiting\n")
if options.DASTServer { if options.DASTServer {
nucleiRunner.Close()
os.Exit(1)
}
gologger.Info().Msgf("Attempting graceful shutdown...")
if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...")
}
nucleiRunner.Close() nucleiRunner.Close()
if options.ShouldSaveResume() {
gologger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
err := nucleiRunner.SaveResumeConfig(resumeFileName)
if err != nil {
gologger.Error().Msgf("Couldn't create resume file: %s\n", err)
}
}
os.Exit(1) os.Exit(1)
} }
options.Logger.Info().Msgf("Attempting graceful shutdown...")
if options.EnableCloudUpload {
options.Logger.Info().Msgf("Uploading scan results to cloud...")
}
nucleiRunner.Close()
if options.ShouldSaveResume() {
options.Logger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
err := nucleiRunner.SaveResumeConfig(resumeFileName)
if err != nil {
options.Logger.Error().Msgf("Couldn't create resume file: %s\n", err)
}
}
os.Exit(1)
}() }()
if err := nucleiRunner.RunEnumeration(); err != nil { if err := nucleiRunner.RunEnumeration(); err != nil {
if options.Validate { if options.Validate {
gologger.Fatal().Msgf("Could not validate templates: %s\n", err) options.Logger.Fatal().Msgf("Could not validate templates: %s\n", err)
} else { } else {
gologger.Fatal().Msgf("Could not run nuclei: %s\n", err) options.Logger.Fatal().Msgf("Could not run nuclei: %s\n", err)
} }
} }
nucleiRunner.Close() nucleiRunner.Close()
// on successful execution remove the resume file in case it exists // on successful execution remove the resume file in case it exists
if fileutil.FileExists(resumeFileName) { if fileutil.FileExists(resumeFileName) {
os.Remove(resumeFileName) _ = os.Remove(resumeFileName)
} }
} }
@ -260,6 +264,8 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.StringVarP(&options.InputFileMode, "input-mode", "im", "list", fmt.Sprintf("mode of input file (%v)", provider.SupportedInputFormats())), flagSet.StringVarP(&options.InputFileMode, "input-mode", "im", "list", fmt.Sprintf("mode of input file (%v)", provider.SupportedInputFormats())),
flagSet.BoolVarP(&options.FormatUseRequiredOnly, "required-only", "ro", false, "use only required fields in input format when generating requests"), flagSet.BoolVarP(&options.FormatUseRequiredOnly, "required-only", "ro", false, "use only required fields in input format when generating requests"),
flagSet.BoolVarP(&options.SkipFormatValidation, "skip-format-validation", "sfv", false, "skip format validation (like missing vars) when parsing input file"), flagSet.BoolVarP(&options.SkipFormatValidation, "skip-format-validation", "sfv", false, "skip format validation (like missing vars) when parsing input file"),
flagSet.BoolVarP(&options.VarsTextTemplating, "vars-text-templating", "vtt", false, "enable text templating for vars in input file (only for yaml input mode)"),
flagSet.StringSliceVarP(&options.VarsFilePaths, "var-file-paths", "vfp", nil, "list of yaml file contained vars to inject into yaml input", goflags.CommaSeparatedStringSliceOptions),
) )
flagSet.CreateGroup("templates", "Templates", flagSet.CreateGroup("templates", "Templates",
@ -542,11 +548,11 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
h := &pdcp.PDCPCredHandler{} h := &pdcp.PDCPCredHandler{}
_, err := h.GetCreds() _, err := h.GetCreds()
if err != nil { if err != nil {
gologger.Fatal().Msg("To utilize the `-ai` flag, please configure your API key with the `-auth` flag or set the `PDCP_API_KEY` environment variable") options.Logger.Fatal().Msg("To utilize the `-ai` flag, please configure your API key with the `-auth` flag or set the `PDCP_API_KEY` environment variable")
} }
} }
gologger.DefaultLogger.SetTimestamp(options.Timestamp, levels.LevelDebug) options.Logger.SetTimestamp(options.Timestamp, levels.LevelDebug)
if options.VerboseVerbose { if options.VerboseVerbose {
// hide release notes if silent mode is enabled // hide release notes if silent mode is enabled
@ -568,13 +574,49 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
config.DefaultConfig.SetConfigDir(customConfigDir) config.DefaultConfig.SetConfigDir(customConfigDir)
readFlagsConfig(flagSet) readFlagsConfig(flagSet)
} }
if cfgFile != "" { if cfgFile != "" {
if !fileutil.FileExists(cfgFile) { if !fileutil.FileExists(cfgFile) {
gologger.Fatal().Msgf("given config file '%s' does not exist", cfgFile) options.Logger.Fatal().Msgf("given config file '%s' does not exist", cfgFile)
} }
// merge config file with flags // merge config file with flags
if err := flagSet.MergeConfigFile(cfgFile); err != nil { if err := flagSet.MergeConfigFile(cfgFile); err != nil {
gologger.Fatal().Msgf("Could not read config: %s\n", err) options.Logger.Fatal().Msgf("Could not read config: %s\n", err)
}
if !options.Vars.IsEmpty() {
// Maybe we should add vars to the config file as well even if they are set via flags?
file, err := os.Open(cfgFile)
if err != nil {
gologger.Fatal().Msgf("Could not open config file: %s\n", err)
}
defer func() {
_ = file.Close()
}()
data := make(map[string]interface{})
err = yaml.NewDecoder(file).Decode(&data)
if err != nil {
gologger.Fatal().Msgf("Could not decode config file: %s\n", err)
}
variables := data["var"]
if variables != nil {
if varSlice, ok := variables.([]interface{}); ok {
for _, value := range varSlice {
if strVal, ok := value.(string); ok {
err = options.Vars.Set(strVal)
if err != nil {
gologger.Warning().Msgf("Could not set variable from config file: %s\n", err)
}
} else {
gologger.Warning().Msgf("Skipping non-string variable in config: %#v", value)
}
}
} else {
gologger.Warning().Msgf("No 'var' section found in config file: %s", cfgFile)
}
}
} }
} }
if options.NewTemplatesDirectory != "" { if options.NewTemplatesDirectory != "" {
@ -587,7 +629,7 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
if tp := findProfilePathById(templateProfile, defaultProfilesPath); tp != "" { if tp := findProfilePathById(templateProfile, defaultProfilesPath); tp != "" {
templateProfile = tp templateProfile = tp
} else { } else {
gologger.Fatal().Msgf("'%s' is not a profile-id or profile path", templateProfile) options.Logger.Fatal().Msgf("'%s' is not a profile-id or profile path", templateProfile)
} }
} }
if !filepath.IsAbs(templateProfile) { if !filepath.IsAbs(templateProfile) {
@ -602,17 +644,17 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
} }
} }
if !fileutil.FileExists(templateProfile) { if !fileutil.FileExists(templateProfile) {
gologger.Fatal().Msgf("given template profile file '%s' does not exist", templateProfile) options.Logger.Fatal().Msgf("given template profile file '%s' does not exist", templateProfile)
} }
if err := flagSet.MergeConfigFile(templateProfile); err != nil { if err := flagSet.MergeConfigFile(templateProfile); err != nil {
gologger.Fatal().Msgf("Could not read template profile: %s\n", err) options.Logger.Fatal().Msgf("Could not read template profile: %s\n", err)
} }
} }
if len(options.SecretsFile) > 0 { if len(options.SecretsFile) > 0 {
for _, secretFile := range options.SecretsFile { for _, secretFile := range options.SecretsFile {
if !fileutil.FileExists(secretFile) { if !fileutil.FileExists(secretFile) {
gologger.Fatal().Msgf("given secrets file '%s' does not exist", options.SecretsFile) options.Logger.Fatal().Msgf("given secrets file '%s' does not exist", secretFile)
} }
} }
} }
@ -638,25 +680,25 @@ func readFlagsConfig(flagset *goflags.FlagSet) {
if err != nil { if err != nil {
// something went wrong either dir is not readable or something else went wrong upstream in `goflags` // something went wrong either dir is not readable or something else went wrong upstream in `goflags`
// warn and exit in this case // warn and exit in this case
gologger.Warning().Msgf("Could not read config file: %s\n", err) options.Logger.Warning().Msgf("Could not read config file: %s\n", err)
return return
} }
cfgFile := config.DefaultConfig.GetFlagsConfigFilePath() cfgFile := config.DefaultConfig.GetFlagsConfigFilePath()
if !fileutil.FileExists(cfgFile) { if !fileutil.FileExists(cfgFile) {
if !fileutil.FileExists(defaultCfgFile) { if !fileutil.FileExists(defaultCfgFile) {
// if default config does not exist, warn and exit // if default config does not exist, warn and exit
gologger.Warning().Msgf("missing default config file : %s", defaultCfgFile) options.Logger.Warning().Msgf("missing default config file : %s", defaultCfgFile)
return return
} }
// if does not exist copy it from the default config // if does not exist copy it from the default config
if err = fileutil.CopyFile(defaultCfgFile, cfgFile); err != nil { if err = fileutil.CopyFile(defaultCfgFile, cfgFile); err != nil {
gologger.Warning().Msgf("Could not copy config file: %s\n", err) options.Logger.Warning().Msgf("Could not copy config file: %s\n", err)
} }
return return
} }
// if config file exists, merge it with the default config // if config file exists, merge it with the default config
if err = flagset.MergeConfigFile(cfgFile); err != nil { if err = flagset.MergeConfigFile(cfgFile); err != nil {
gologger.Warning().Msgf("failed to merge configfile with flags got: %s\n", err) options.Logger.Warning().Msgf("failed to merge configfile with flags got: %s\n", err)
} }
} }
@ -667,29 +709,29 @@ func disableUpdatesCallback() {
// printVersion prints the nuclei version and exits. // printVersion prints the nuclei version and exits.
func printVersion() { func printVersion() {
gologger.Info().Msgf("Nuclei Engine Version: %s", config.Version) options.Logger.Info().Msgf("Nuclei Engine Version: %s", config.Version)
gologger.Info().Msgf("Nuclei Config Directory: %s", config.DefaultConfig.GetConfigDir()) options.Logger.Info().Msgf("Nuclei Config Directory: %s", config.DefaultConfig.GetConfigDir())
gologger.Info().Msgf("Nuclei Cache Directory: %s", config.DefaultConfig.GetCacheDir()) // cache dir contains resume files options.Logger.Info().Msgf("Nuclei Cache Directory: %s", config.DefaultConfig.GetCacheDir()) // cache dir contains resume files
gologger.Info().Msgf("PDCP Directory: %s", pdcp.PDCPDir) options.Logger.Info().Msgf("PDCP Directory: %s", pdcp.PDCPDir)
os.Exit(0) os.Exit(0)
} }
// printTemplateVersion prints the nuclei template version and exits. // printTemplateVersion prints the nuclei template version and exits.
func printTemplateVersion() { func printTemplateVersion() {
cfg := config.DefaultConfig cfg := config.DefaultConfig
gologger.Info().Msgf("Public nuclei-templates version: %s (%s)\n", cfg.TemplateVersion, cfg.TemplatesDirectory) options.Logger.Info().Msgf("Public nuclei-templates version: %s (%s)\n", cfg.TemplateVersion, cfg.TemplatesDirectory)
if fileutil.FolderExists(cfg.CustomS3TemplatesDirectory) { if fileutil.FolderExists(cfg.CustomS3TemplatesDirectory) {
gologger.Info().Msgf("Custom S3 templates location: %s\n", cfg.CustomS3TemplatesDirectory) options.Logger.Info().Msgf("Custom S3 templates location: %s\n", cfg.CustomS3TemplatesDirectory)
} }
if fileutil.FolderExists(cfg.CustomGitHubTemplatesDirectory) { if fileutil.FolderExists(cfg.CustomGitHubTemplatesDirectory) {
gologger.Info().Msgf("Custom GitHub templates location: %s ", cfg.CustomGitHubTemplatesDirectory) options.Logger.Info().Msgf("Custom GitHub templates location: %s ", cfg.CustomGitHubTemplatesDirectory)
} }
if fileutil.FolderExists(cfg.CustomGitLabTemplatesDirectory) { if fileutil.FolderExists(cfg.CustomGitLabTemplatesDirectory) {
gologger.Info().Msgf("Custom GitLab templates location: %s ", cfg.CustomGitLabTemplatesDirectory) options.Logger.Info().Msgf("Custom GitLab templates location: %s ", cfg.CustomGitLabTemplatesDirectory)
} }
if fileutil.FolderExists(cfg.CustomAzureTemplatesDirectory) { if fileutil.FolderExists(cfg.CustomAzureTemplatesDirectory) {
gologger.Info().Msgf("Custom Azure templates location: %s ", cfg.CustomAzureTemplatesDirectory) options.Logger.Info().Msgf("Custom Azure templates location: %s ", cfg.CustomAzureTemplatesDirectory)
} }
os.Exit(0) os.Exit(0)
} }
@ -705,13 +747,13 @@ Following files will be deleted:
Note: Make sure you have backup of your custom nuclei-templates before proceeding Note: Make sure you have backup of your custom nuclei-templates before proceeding
`, config.DefaultConfig.GetConfigDir(), config.DefaultConfig.TemplatesDirectory) `, config.DefaultConfig.GetConfigDir(), config.DefaultConfig.TemplatesDirectory)
gologger.Print().Msg(warning) options.Logger.Print().Msg(warning)
reader := bufio.NewReader(os.Stdin) reader := bufio.NewReader(os.Stdin)
for { for {
fmt.Print("Are you sure you want to continue? [y/n]: ") fmt.Print("Are you sure you want to continue? [y/n]: ")
resp, err := reader.ReadString('\n') resp, err := reader.ReadString('\n')
if err != nil { if err != nil {
gologger.Fatal().Msgf("could not read response: %s", err) options.Logger.Fatal().Msgf("could not read response: %s", err)
} }
resp = strings.TrimSpace(resp) resp = strings.TrimSpace(resp)
if stringsutil.EqualFoldAny(resp, "y", "yes") { if stringsutil.EqualFoldAny(resp, "y", "yes") {
@ -724,13 +766,13 @@ Note: Make sure you have backup of your custom nuclei-templates before proceedin
} }
err := os.RemoveAll(config.DefaultConfig.GetConfigDir()) err := os.RemoveAll(config.DefaultConfig.GetConfigDir())
if err != nil { if err != nil {
gologger.Fatal().Msgf("could not delete config dir: %s", err) options.Logger.Fatal().Msgf("could not delete config dir: %s", err)
} }
err = os.RemoveAll(config.DefaultConfig.TemplatesDirectory) err = os.RemoveAll(config.DefaultConfig.TemplatesDirectory)
if err != nil { if err != nil {
gologger.Fatal().Msgf("could not delete templates dir: %s", err) options.Logger.Fatal().Msgf("could not delete templates dir: %s", err)
} }
gologger.Info().Msgf("Successfully deleted all nuclei configurations files and nuclei-templates") options.Logger.Info().Msgf("Successfully deleted all nuclei configurations files and nuclei-templates")
os.Exit(0) os.Exit(0)
} }
@ -750,14 +792,7 @@ func findProfilePathById(profileId, templatesDir string) string {
return nil return nil
}) })
if err != nil && err.Error() != "FOUND" { if err != nil && err.Error() != "FOUND" {
gologger.Error().Msgf("%s\n", err) options.Logger.Error().Msgf("%s\n", err)
} }
return profilePath return profilePath
} }
func init() {
// print stacktrace of errors in debug mode
if strings.EqualFold(os.Getenv("DEBUG"), "true") {
errorutil.ShowStackTrace = true
}
}

View File

@ -3,28 +3,55 @@ package main_test
import ( import (
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"os"
"testing" "testing"
"time" "time"
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/gologger/levels" "github.com/projectdiscovery/gologger/levels"
"github.com/projectdiscovery/nuclei/v3/internal/runner" "github.com/projectdiscovery/nuclei/v3/internal/runner"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
) )
func BenchmarkRunEnumeration(b *testing.B) { var (
projectPath string
targetURL string
)
func TestMain(m *testing.M) {
// Set up
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
_ = os.Setenv("DISABLE_STDOUT", "true")
var err error
projectPath, err = os.MkdirTemp("", "nuclei-benchmark-")
if err != nil {
panic(err)
}
dummyServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { dummyServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent) w.WriteHeader(http.StatusNoContent)
})) }))
defer dummyServer.Close() targetURL = dummyServer.URL
options := &types.Options{ // Execute tests
RemoteTemplateDomainList: goflags.StringSlice{
"cloud.projectdiscovery.io", exitCode := m.Run()
},
ProjectPath: "/tmp", // Tear down
Targets: goflags.StringSlice{dummyServer.URL},
dummyServer.Close()
_ = os.RemoveAll(projectPath)
_ = os.Unsetenv("DISABLE_STDOUT")
os.Exit(exitCode)
}
func getDefaultOptions() *types.Options {
return &types.Options{
RemoteTemplateDomainList: []string{"cloud.projectdiscovery.io"},
ProjectPath: projectPath,
StatsInterval: 5, StatsInterval: 5,
MetricsPort: 9092, MetricsPort: 9092,
MaxHostError: 30, MaxHostError: 30,
@ -65,23 +92,45 @@ func BenchmarkRunEnumeration(b *testing.B) {
LoadHelperFileFunction: types.DefaultOptions().LoadHelperFileFunction, LoadHelperFileFunction: types.DefaultOptions().LoadHelperFileFunction,
// DialerKeepAlive: time.Duration(0), // DialerKeepAlive: time.Duration(0),
// DASTServerAddress: "localhost:9055", // DASTServerAddress: "localhost:9055",
ExecutionId: "test",
Logger: gologger.DefaultLogger,
} }
}
func runEnumBenchmark(b *testing.B, options *types.Options) {
runner.ParseOptions(options) runner.ParseOptions(options)
// Disable logging to reduce benchmark noise.
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
nucleiRunner, err := runner.New(options) nucleiRunner, err := runner.New(options)
if err != nil { if err != nil {
b.Fatalf("failed to create runner: %s", err) b.Fatalf("failed to create runner: %s", err)
} }
defer nucleiRunner.Close()
b.ResetTimer() b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
if err := nucleiRunner.RunEnumeration(); err != nil { if err := nucleiRunner.RunEnumeration(); err != nil {
b.Fatalf("RunEnumeration failed: %s", err) b.Fatalf("%s failed: %s", b.Name(), err)
} }
} }
} }
func BenchmarkRunEnumeration(b *testing.B) {
// Default case: run enumeration with default options == all nuclei-templates
// b.Run("Default", func(b *testing.B) {
// options := getDefaultOptions()
// options.Targets = []string{targetURL}
// runEnumBenchmark(b, options)
// })
// Case: https://github.com/projectdiscovery/nuclei/pull/6258
b.Run("Multiproto", func(b *testing.B) {
options := getDefaultOptions()
options.Targets = []string{targetURL}
options.Templates = []string{"./cmd/nuclei/testdata/benchmark/multiproto/"}
runEnumBenchmark(b, options)
})
}

View File

@ -0,0 +1,239 @@
id: basic-template-multiproto-mixed
info:
name: Test Template Multiple Protocols (Mixed)
author: pdteam
severity: info
http:
- method: GET
id: first_iter_http
path:
- '{{BaseURL}}/1'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/2'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/3'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/4'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/5'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/6'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/7'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/8'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/9'
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /10 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /11 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /12 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /13 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /14 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET / HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /15 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /16 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /17 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /18 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"

View File

@ -0,0 +1,292 @@
id: basic-template-multiproto-raw
info:
name: Test Template Multiple Protocols RAW
author: pdteam
severity: info
http:
- raw:
- |
GET /1 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /2 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /3 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /4 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /5 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /6 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /7 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /8 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /9 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /10 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /11 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /12 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /13 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /14 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET / HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /15 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /16 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /17 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /18 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"

View File

@ -0,0 +1,170 @@
id: basic-template-multiproto
info:
name: Test Template Multiple Protocols
author: pdteam
severity: info
http:
- method: GET
id: first_iter_http
path:
- '{{BaseURL}}/1'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/2'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/3'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/4'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/5'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/6'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/7'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/8'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/9'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/10'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/11'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/12'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/13'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/14'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/15'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/16'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/17'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/18'
matchers:
- type: word
words:
- "Test is test matcher text"

View File

@ -23,7 +23,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json" "github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
) )
@ -135,7 +135,9 @@ func process(opts options) error {
if err != nil { if err != nil {
return err return err
} }
defer os.RemoveAll(tempDir) defer func() {
_ = os.RemoveAll(tempDir)
}()
var errFile *os.File var errFile *os.File
if opts.errorLogFile != "" { if opts.errorLogFile != "" {
@ -143,7 +145,9 @@ func process(opts options) error {
if err != nil { if err != nil {
gologger.Fatal().Msgf("could not open error log file: %s\n", err) gologger.Fatal().Msgf("could not open error log file: %s\n", err)
} }
defer errFile.Close() defer func() {
_ = errFile.Close()
}()
} }
templateCatalog := disk.NewCatalog(filepath.Dir(opts.input)) templateCatalog := disk.NewCatalog(filepath.Dir(opts.input))
@ -226,7 +230,7 @@ func logErrMsg(path string, err error, debug bool, errFile *os.File) string {
msg = fmt.Sprintf("❌ template: %s err: %s\n", path, err) msg = fmt.Sprintf("❌ template: %s err: %s\n", path, err)
} }
if errFile != nil { if errFile != nil {
_, _ = errFile.WriteString(fmt.Sprintf("❌ template: %s err: %s\n", path, err)) _, _ = fmt.Fprintf(errFile, "❌ template: %s err: %s\n", path, err)
} }
return msg return msg
} }
@ -239,7 +243,7 @@ func enhanceTemplate(data string) (string, bool, error) {
return data, false, err return data, false, err
} }
if resp.StatusCode != 200 { if resp.StatusCode != 200 {
return data, false, errorutil.New("unexpected status code: %v", resp.Status) return data, false, errkit.New("unexpected status code: %v", resp.Status)
} }
var templateResp TemplateResp var templateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&templateResp); err != nil { if err := json.NewDecoder(resp.Body).Decode(&templateResp); err != nil {
@ -250,20 +254,20 @@ func enhanceTemplate(data string) (string, bool, error) {
} }
if templateResp.ValidateErrorCount > 0 { if templateResp.ValidateErrorCount > 0 {
if len(templateResp.ValidateError) > 0 { if len(templateResp.ValidateError) > 0 {
return data, false, errorutil.NewWithTag("validate", templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line) return data, false, errkit.New(templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line, "tag", "validate")
} }
return data, false, errorutil.New("validation failed").WithTag("validate") return data, false, errkit.New("validation failed", "tag", "validate")
} }
if templateResp.Error.Name != "" { if templateResp.Error.Name != "" {
return data, false, errorutil.New("%s", templateResp.Error.Name) return data, false, errkit.New("%s", templateResp.Error.Name)
} }
if strings.TrimSpace(templateResp.Enhanced) == "" && !templateResp.Lint { if strings.TrimSpace(templateResp.Enhanced) == "" && !templateResp.Lint {
if templateResp.LintError.Reason != "" { if templateResp.LintError.Reason != "" {
return data, false, errorutil.NewWithTag("lint", templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line) return data, false, errkit.New(templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line, "tag", "lint")
} }
return data, false, errorutil.NewWithTag("lint", "at line: %v", templateResp.LintError.Mark.Line) return data, false, errkit.New("at line: %v", templateResp.LintError.Mark.Line, "tag", "lint")
} }
return data, false, errorutil.New("template enhance failed") return data, false, errkit.New("template enhance failed")
} }
// formatTemplate formats template data using templateman format api // formatTemplate formats template data using templateman format api
@ -273,7 +277,7 @@ func formatTemplate(data string) (string, bool, error) {
return data, false, err return data, false, err
} }
if resp.StatusCode != 200 { if resp.StatusCode != 200 {
return data, false, errorutil.New("unexpected status code: %v", resp.Status) return data, false, errkit.New("unexpected status code: %v", resp.Status)
} }
var templateResp TemplateResp var templateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&templateResp); err != nil { if err := json.NewDecoder(resp.Body).Decode(&templateResp); err != nil {
@ -284,20 +288,20 @@ func formatTemplate(data string) (string, bool, error) {
} }
if templateResp.ValidateErrorCount > 0 { if templateResp.ValidateErrorCount > 0 {
if len(templateResp.ValidateError) > 0 { if len(templateResp.ValidateError) > 0 {
return data, false, errorutil.NewWithTag("validate", templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line) return data, false, errkit.New(templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line, "tag", "validate")
} }
return data, false, errorutil.New("validation failed").WithTag("validate") return data, false, errkit.New("validation failed", "tag", "validate")
} }
if templateResp.Error.Name != "" { if templateResp.Error.Name != "" {
return data, false, errorutil.New("%s", templateResp.Error.Name) return data, false, errkit.New("%s", templateResp.Error.Name)
} }
if strings.TrimSpace(templateResp.Updated) == "" && !templateResp.Lint { if strings.TrimSpace(templateResp.Updated) == "" && !templateResp.Lint {
if templateResp.LintError.Reason != "" { if templateResp.LintError.Reason != "" {
return data, false, errorutil.NewWithTag("lint", templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line) return data, false, errkit.New(templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line, "tag", "lint")
} }
return data, false, errorutil.NewWithTag("lint", "at line: %v", templateResp.LintError.Mark.Line) return data, false, errkit.New("at line: %v", templateResp.LintError.Mark.Line, "tag", "lint")
} }
return data, false, errorutil.New("template format failed") return data, false, errkit.New("template format failed")
} }
// lintTemplate lints template data using templateman lint api // lintTemplate lints template data using templateman lint api
@ -307,7 +311,7 @@ func lintTemplate(data string) (bool, error) {
return false, err return false, err
} }
if resp.StatusCode != 200 { if resp.StatusCode != 200 {
return false, errorutil.New("unexpected status code: %v", resp.Status) return false, errkit.New("unexpected status code: %v", resp.Status)
} }
var lintResp TemplateLintResp var lintResp TemplateLintResp
if err := json.NewDecoder(resp.Body).Decode(&lintResp); err != nil { if err := json.NewDecoder(resp.Body).Decode(&lintResp); err != nil {
@ -317,9 +321,9 @@ func lintTemplate(data string) (bool, error) {
return true, nil return true, nil
} }
if lintResp.LintError.Reason != "" { if lintResp.LintError.Reason != "" {
return false, errorutil.NewWithTag("lint", lintResp.LintError.Reason+" : at line %v", lintResp.LintError.Mark.Line) return false, errkit.New(lintResp.LintError.Reason+" : at line %v", lintResp.LintError.Mark.Line, "tag", "lint")
} }
return false, errorutil.NewWithTag("lint", "at line: %v", lintResp.LintError.Mark.Line) return false, errkit.New("at line: %v", lintResp.LintError.Mark.Line, "tag", "lint")
} }
// validateTemplate validates template data using templateman validate api // validateTemplate validates template data using templateman validate api
@ -329,7 +333,7 @@ func validateTemplate(data string) (bool, error) {
return false, err return false, err
} }
if resp.StatusCode != 200 { if resp.StatusCode != 200 {
return false, errorutil.New("unexpected status code: %v", resp.Status) return false, errkit.New("unexpected status code: %v", resp.Status)
} }
var validateResp TemplateResp var validateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&validateResp); err != nil { if err := json.NewDecoder(resp.Body).Decode(&validateResp); err != nil {
@ -340,14 +344,14 @@ func validateTemplate(data string) (bool, error) {
} }
if validateResp.ValidateErrorCount > 0 { if validateResp.ValidateErrorCount > 0 {
if len(validateResp.ValidateError) > 0 { if len(validateResp.ValidateError) > 0 {
return false, errorutil.NewWithTag("validate", validateResp.ValidateError[0].Message+": at line %v", validateResp.ValidateError[0].Mark.Line) return false, errkit.New(validateResp.ValidateError[0].Message+": at line %v", validateResp.ValidateError[0].Mark.Line, "tag", "validate")
} }
return false, errorutil.New("validation failed").WithTag("validate") return false, errkit.New("validation failed", "tag", "validate")
} }
if validateResp.Error.Name != "" { if validateResp.Error.Name != "" {
return false, errorutil.New("%s", validateResp.Error.Name) return false, errkit.New("%s", validateResp.Error.Name)
} }
return false, errorutil.New("template validation failed") return false, errkit.New("template validation failed")
} }
// parseAndAddMaxRequests parses and adds max requests to templates // parseAndAddMaxRequests parses and adds max requests to templates
@ -397,7 +401,7 @@ func parseAndAddMaxRequests(catalog catalog.Catalog, path, data string) (string,
// parseTemplate parses a template and returns the template object // parseTemplate parses a template and returns the template object
func parseTemplate(catalog catalog.Catalog, templatePath string) (*templates.Template, error) { func parseTemplate(catalog catalog.Catalog, templatePath string) (*templates.Template, error) {
executorOpts := protocols.ExecutorOptions{ executorOpts := &protocols.ExecutorOptions{
Catalog: catalog, Catalog: catalog,
Options: defaultOpts, Options: defaultOpts,
} }

View File

@ -18,7 +18,9 @@ func main() {
defer fuzzplayground.Cleanup() defer fuzzplayground.Cleanup()
server := fuzzplayground.GetPlaygroundServer() server := fuzzplayground.GetPlaygroundServer()
defer server.Close() defer func() {
_ = server.Close()
}()
// Start the server // Start the server
if err := server.Start(addr); err != nil { if err := server.Start(addr); err != nil {

View File

@ -99,12 +99,12 @@ func main() {
gologger.Info().Msgf("✓ Template signed & verified successfully") gologger.Info().Msgf("✓ Template signed & verified successfully")
} }
func defaultExecutorOpts(templatePath string) protocols.ExecutorOptions { func defaultExecutorOpts(templatePath string) *protocols.ExecutorOptions {
// use parsed options when initializing signer instead of default options // use parsed options when initializing signer instead of default options
options := types.DefaultOptions() options := types.DefaultOptions()
templates.UseOptionsForSigner(options) templates.UseOptionsForSigner(options)
catalog := disk.NewCatalog(filepath.Dir(templatePath)) catalog := disk.NewCatalog(filepath.Dir(templatePath))
executerOpts := protocols.ExecutorOptions{ executerOpts := &protocols.ExecutorOptions{
Catalog: catalog, Catalog: catalog,
Options: options, Options: options,
TemplatePath: templatePath, TemplatePath: templatePath,

View File

@ -1,6 +1,7 @@
package main package main
import ( import (
"context"
"log" "log"
"sync" "sync"
"time" "time"
@ -34,7 +35,7 @@ func main() {
} }
func initializeNucleiEngine() (*nuclei.NucleiEngine, error) { func initializeNucleiEngine() (*nuclei.NucleiEngine, error) {
return nuclei.NewNucleiEngine( return nuclei.NewNucleiEngineCtx(context.TODO(),
nuclei.WithTemplateFilters(nuclei.TemplateFilters{Tags: []string{"oast"}}), nuclei.WithTemplateFilters(nuclei.TemplateFilters{Tags: []string{"oast"}}),
nuclei.EnableStatsWithOpts(nuclei.StatsOptions{MetricServerPort: 6064}), nuclei.EnableStatsWithOpts(nuclei.StatsOptions{MetricServerPort: 6064}),
nuclei.WithGlobalRateLimit(1, time.Second), nuclei.WithGlobalRateLimit(1, time.Second),

427
go.mod
View File

@ -1,141 +1,173 @@
module github.com/projectdiscovery/nuclei/v3 module github.com/projectdiscovery/nuclei/v3
go 1.23.0 go 1.24.2
toolchain go1.24.1 toolchain go1.24.4
require ( require (
github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible
github.com/andygrunwald/go-jira v1.16.0 github.com/andygrunwald/go-jira v1.16.1
github.com/antchfx/htmlquery v1.3.0 github.com/antchfx/htmlquery v1.3.4
github.com/bluele/gcache v0.0.2 github.com/bluele/gcache v0.0.2
github.com/go-playground/validator/v10 v10.14.1 github.com/go-playground/validator/v10 v10.26.0
github.com/go-rod/rod v0.116.2 github.com/go-rod/rod v0.116.2
github.com/gobwas/ws v1.2.1 github.com/gobwas/ws v1.4.0
github.com/google/go-github v17.0.0+incompatible github.com/google/go-github v17.0.0+incompatible
github.com/invopop/jsonschema v0.12.0 github.com/invopop/jsonschema v0.13.0
github.com/itchyny/gojq v0.12.13 github.com/itchyny/gojq v0.12.17
github.com/json-iterator/go v1.1.12 github.com/json-iterator/go v1.1.12
github.com/julienschmidt/httprouter v1.3.0 github.com/julienschmidt/httprouter v1.3.0
github.com/logrusorgru/aurora v2.0.3+incompatible github.com/logrusorgru/aurora v2.0.3+incompatible
github.com/miekg/dns v1.1.62 github.com/miekg/dns v1.1.66
github.com/olekukonko/tablewriter v0.0.5 github.com/olekukonko/tablewriter v1.0.8
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/projectdiscovery/clistats v0.1.1 github.com/projectdiscovery/clistats v0.1.1
github.com/projectdiscovery/fastdialer v0.4.0 github.com/projectdiscovery/fastdialer v0.4.11
github.com/projectdiscovery/hmap v0.0.88 github.com/projectdiscovery/hmap v0.0.94
github.com/projectdiscovery/interactsh v1.2.4 github.com/projectdiscovery/interactsh v1.2.4
github.com/projectdiscovery/rawhttp v0.1.90 github.com/projectdiscovery/rawhttp v0.1.90
github.com/projectdiscovery/retryabledns v1.0.99 github.com/projectdiscovery/retryabledns v1.0.107
github.com/projectdiscovery/retryablehttp-go v1.0.110 github.com/projectdiscovery/retryablehttp-go v1.0.125
github.com/projectdiscovery/yamldoc-go v1.0.6 github.com/projectdiscovery/yamldoc-go v1.0.6
github.com/remeh/sizedwaitgroup v1.0.0 github.com/remeh/sizedwaitgroup v1.0.0
github.com/rs/xid v1.6.0 github.com/rs/xid v1.6.0
github.com/segmentio/ksuid v1.0.4 github.com/segmentio/ksuid v1.0.4
github.com/shirou/gopsutil/v3 v3.24.2 // indirect github.com/shirou/gopsutil/v3 v3.24.5 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/spf13/cast v1.5.1 github.com/spf13/cast v1.9.2
github.com/syndtr/goleveldb v1.0.0 github.com/syndtr/goleveldb v1.0.0
github.com/valyala/fasttemplate v1.2.2 github.com/valyala/fasttemplate v1.2.2
github.com/weppos/publicsuffix-go v0.40.2 github.com/weppos/publicsuffix-go v0.50.0
github.com/xanzy/go-gitlab v0.107.0
go.uber.org/multierr v1.11.0 go.uber.org/multierr v1.11.0
golang.org/x/net v0.39.0 golang.org/x/net v0.44.0
golang.org/x/oauth2 v0.22.0 golang.org/x/oauth2 v0.30.0
golang.org/x/text v0.24.0 golang.org/x/text v0.29.0
gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v2 v2.4.0
) )
require ( require (
carvel.dev/ytt v0.52.0
code.gitea.io/sdk/gitea v0.17.0 code.gitea.io/sdk/gitea v0.17.0
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0 github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0
github.com/DataDog/gostackparse v0.6.0 github.com/DataDog/gostackparse v0.7.0
github.com/Masterminds/semver/v3 v3.2.1 github.com/Masterminds/semver/v3 v3.2.1
github.com/Mzack9999/gcache v0.0.0-20230410081825-519e28eab057 github.com/Mzack9999/gcache v0.0.0-20230410081825-519e28eab057
github.com/Mzack9999/goja v0.0.0-20250507184235-e46100e9c697
github.com/Mzack9999/goja_nodejs v0.0.0-20250507184139-66bcbf65c883
github.com/alexsnet/go-vnc v0.1.0
github.com/alitto/pond v1.9.2 github.com/alitto/pond v1.9.2
github.com/antchfx/xmlquery v1.3.17 github.com/antchfx/xmlquery v1.4.4
github.com/antchfx/xpath v1.3.3
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
github.com/aws/aws-sdk-go-v2 v1.19.0 github.com/aws/aws-sdk-go-v2 v1.36.5
github.com/aws/aws-sdk-go-v2/config v1.18.28 github.com/aws/aws-sdk-go-v2/config v1.29.17
github.com/aws/aws-sdk-go-v2/credentials v1.13.27 github.com/aws/aws-sdk-go-v2/credentials v1.17.70
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.72 github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.82
github.com/aws/aws-sdk-go-v2/service/s3 v1.37.0 github.com/aws/aws-sdk-go-v2/service/s3 v1.82.0
github.com/bytedance/sonic v1.12.8 github.com/bytedance/sonic v1.14.0
github.com/cespare/xxhash v1.1.0 github.com/cespare/xxhash v1.1.0
github.com/charmbracelet/glamour v0.8.0 github.com/charmbracelet/glamour v0.10.0
github.com/clbanning/mxj/v2 v2.7.0 github.com/clbanning/mxj/v2 v2.7.0
github.com/ditashi/jsbeautifier-go v0.0.0-20141206144643-2520a8026a9c github.com/ditashi/jsbeautifier-go v0.0.0-20141206144643-2520a8026a9c
github.com/docker/go-units v0.5.0 github.com/docker/go-units v0.5.0
github.com/dop251/goja v0.0.0-20240220182346-e401ed450204
github.com/fatih/structs v1.1.0 github.com/fatih/structs v1.1.0
github.com/getkin/kin-openapi v0.126.0 github.com/getkin/kin-openapi v0.132.0
github.com/go-git/go-git/v5 v5.13.0 github.com/go-git/go-git/v5 v5.16.2
github.com/go-ldap/ldap/v3 v3.4.5 github.com/go-ldap/ldap/v3 v3.4.11
github.com/go-pg/pg v8.0.7+incompatible github.com/go-pg/pg v8.0.7+incompatible
github.com/go-sql-driver/mysql v1.7.1 github.com/go-sql-driver/mysql v1.9.3
github.com/goccy/go-json v0.10.5 github.com/goccy/go-json v0.10.5
github.com/google/uuid v1.6.0
github.com/h2non/filetype v1.1.3 github.com/h2non/filetype v1.1.3
github.com/invopop/yaml v0.3.1 github.com/invopop/yaml v0.3.1
github.com/jcmturner/gokrb5/v8 v8.4.4
github.com/kitabisa/go-ci v1.0.3 github.com/kitabisa/go-ci v1.0.3
github.com/labstack/echo/v4 v4.13.3 github.com/labstack/echo/v4 v4.13.4
github.com/leslie-qiwa/flat v0.0.0-20230424180412-f9d1cf014baa github.com/leslie-qiwa/flat v0.0.0-20230424180412-f9d1cf014baa
github.com/lib/pq v1.10.9 github.com/lib/pq v1.10.9
github.com/mattn/go-sqlite3 v1.14.22 github.com/mattn/go-sqlite3 v1.14.28
github.com/mholt/archives v0.1.0 github.com/mholt/archives v0.1.3
github.com/microsoft/go-mssqldb v1.6.0 github.com/microsoft/go-mssqldb v1.9.2
github.com/ory/dockertest/v3 v3.10.0 github.com/ory/dockertest/v3 v3.12.0
github.com/praetorian-inc/fingerprintx v1.1.9 github.com/praetorian-inc/fingerprintx v1.1.15
github.com/projectdiscovery/dsl v0.4.2 github.com/projectdiscovery/dsl v0.7.0
github.com/projectdiscovery/fasttemplate v0.0.2 github.com/projectdiscovery/fasttemplate v0.0.2
github.com/projectdiscovery/gcache v0.0.0-20241015120333-12546c6e3f4c
github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb
github.com/projectdiscovery/goflags v0.1.74 github.com/projectdiscovery/goflags v0.1.74
github.com/projectdiscovery/gologger v1.1.53 github.com/projectdiscovery/gologger v1.1.55
github.com/projectdiscovery/gostruct v0.0.2 github.com/projectdiscovery/gostruct v0.0.2
github.com/projectdiscovery/gozero v0.0.3 github.com/projectdiscovery/gozero v0.1.0
github.com/projectdiscovery/httpx v1.7.0 github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a
github.com/projectdiscovery/mapcidr v1.1.34 github.com/projectdiscovery/mapcidr v1.1.34
github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5
github.com/projectdiscovery/ratelimit v0.0.80 github.com/projectdiscovery/networkpolicy v0.1.25
github.com/projectdiscovery/rdap v0.9.1-0.20221108103045-9865884d1917 github.com/projectdiscovery/ratelimit v0.0.82
github.com/projectdiscovery/rdap v0.9.0
github.com/projectdiscovery/sarif v0.0.1 github.com/projectdiscovery/sarif v0.0.1
github.com/projectdiscovery/tlsx v1.1.9 github.com/projectdiscovery/tlsx v1.2.1
github.com/projectdiscovery/uncover v1.0.10 github.com/projectdiscovery/uncover v1.1.0
github.com/projectdiscovery/useragent v0.0.100 github.com/projectdiscovery/useragent v0.0.101
github.com/projectdiscovery/utils v0.4.18 github.com/projectdiscovery/utils v0.5.0
github.com/projectdiscovery/wappalyzergo v0.2.25 github.com/projectdiscovery/wappalyzergo v0.2.47
github.com/redis/go-redis/v9 v9.1.0 github.com/redis/go-redis/v9 v9.11.0
github.com/seh-msft/burpxml v1.0.1 github.com/seh-msft/burpxml v1.0.1
github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466 github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466
github.com/stretchr/testify v1.10.0 github.com/sijms/go-ora/v2 v2.9.0
github.com/stretchr/testify v1.11.1
github.com/tarunKoyalwar/goleak v0.0.0-20240429141123-0efa90dbdcf9 github.com/tarunKoyalwar/goleak v0.0.0-20240429141123-0efa90dbdcf9
github.com/yassinebenaid/godump v0.10.0 github.com/testcontainers/testcontainers-go v0.38.0
github.com/zmap/zgrab2 v0.1.8-0.20230806160807-97ba87c0e706 github.com/testcontainers/testcontainers-go/modules/mongodb v0.37.0
go.mongodb.org/mongo-driver v1.17.0 github.com/yassinebenaid/godump v0.11.1
golang.org/x/term v0.31.0 github.com/zmap/zgrab2 v0.1.8
gitlab.com/gitlab-org/api/client-go v0.130.1
go.mongodb.org/mongo-driver v1.17.4
golang.org/x/term v0.35.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
moul.io/http2curl v1.0.0 moul.io/http2curl v1.0.0
) )
require ( require (
aead.dev/minisign v0.2.0 // indirect aead.dev/minisign v0.2.0 // indirect
dario.cat/mergo v1.0.0 // indirect dario.cat/mergo v1.0.2 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1 // indirect filippo.io/edwards25519 v1.1.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 // indirect git.mills.io/prologic/smtpd v0.0.0-20210710122116-a525b76c287a // indirect
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/Mzack9999/go-http-digest-auth-client v0.6.1-0.20220414142836-eb8883508809 // indirect
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect
github.com/ProtonMail/go-crypto v1.1.6 // indirect
github.com/PuerkitoBio/goquery v1.10.3 // indirect
github.com/STARRY-S/zip v0.2.1 // indirect github.com/STARRY-S/zip v0.2.1 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect github.com/VividCortex/ewma v1.2.0 // indirect
github.com/akrylysov/pogreb v0.10.2 // indirect
github.com/alecthomas/chroma/v2 v2.14.0 // indirect github.com/alecthomas/chroma/v2 v2.14.0 // indirect
github.com/andybalholm/brotli v1.1.1 // indirect github.com/alecthomas/kingpin/v2 v2.4.0 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.27 // indirect github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 // indirect github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.30 // indirect github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.4 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.32 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.36 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.36 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.25.5 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 // indirect
github.com/aws/smithy-go v1.22.4 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect github.com/bits-and-blooms/bitset v1.13.0 // indirect
github.com/bits-and-blooms/bloom/v3 v3.5.0 // indirect github.com/bits-and-blooms/bloom/v3 v3.5.0 // indirect
@ -143,96 +175,162 @@ require (
github.com/bodgit/sevenzip v1.6.0 // indirect github.com/bodgit/sevenzip v1.6.0 // indirect
github.com/bodgit/windows v1.0.1 // indirect github.com/bodgit/windows v1.0.1 // indirect
github.com/buger/jsonparser v1.1.1 // indirect github.com/buger/jsonparser v1.1.1 // indirect
github.com/bytedance/sonic/loader v0.2.2 // indirect github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/cenkalti/backoff/v4 v4.2.1 // indirect github.com/caddyserver/certmagic v0.19.2 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/charmbracelet/lipgloss v0.13.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/charmbracelet/x/ansi v0.3.2 // indirect github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/cheggaaa/pb/v3 v3.1.4 // indirect github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect
github.com/charmbracelet/x/ansi v0.8.0 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/cheggaaa/pb/v3 v3.1.6 // indirect
github.com/cloudflare/cfssl v1.6.4 // indirect github.com/cloudflare/cfssl v1.6.4 // indirect
github.com/cloudflare/circl v1.3.8 // indirect github.com/cloudflare/circl v1.6.1 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect github.com/cloudwego/base64x v0.1.5 // indirect
github.com/containerd/continuity v0.4.2 // indirect github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 // indirect
github.com/cyphar/filepath-securejoin v0.2.5 // indirect github.com/containerd/continuity v0.4.5 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
github.com/containerd/errdefs/pkg v0.3.0 // indirect
github.com/containerd/log v0.1.0 // indirect
github.com/containerd/platforms v0.2.1 // indirect
github.com/cpuguy83/dockercfg v0.3.2 // indirect
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/davidmz/go-pageant v1.0.2 // indirect github.com/davidmz/go-pageant v1.0.2 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/dlclark/regexp2 v1.11.4 // indirect github.com/distribution/reference v0.6.0 // indirect
github.com/docker/cli v24.0.5+incompatible // indirect github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/docker/docker v24.0.9+incompatible // indirect github.com/docker/cli v27.4.1+incompatible // indirect
github.com/docker/go-connections v0.4.0 // indirect github.com/docker/docker v28.3.3+incompatible // indirect
github.com/fatih/color v1.16.0 // indirect github.com/docker/go-connections v0.6.0 // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/ebitengine/purego v0.8.4 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/felixge/fgprof v0.9.5 // indirect github.com/felixge/fgprof v0.9.5 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/free5gc/util v1.0.5-0.20230511064842-2e120956883b // indirect github.com/free5gc/util v1.0.5-0.20230511064842-2e120956883b // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/gaissmai/bart v0.17.10 // indirect github.com/gaissmai/bart v0.25.0 // indirect
github.com/geoffgarside/ber v1.1.0 // indirect github.com/geoffgarside/ber v1.1.0 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect github.com/gin-contrib/sse v0.1.0 // indirect
github.com/gin-gonic/gin v1.9.1 // indirect github.com/gin-gonic/gin v1.9.1 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.4 // indirect github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect
github.com/go-fed/httpsig v1.1.0 // indirect github.com/go-fed/httpsig v1.1.0 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.6.2 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/swag v0.23.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v5 v5.2.1 // indirect github.com/golang-jwt/jwt/v5 v5.2.2 // indirect
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
github.com/golang-sql/sqlexp v0.1.0 // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect
github.com/google/certificate-transparency-go v1.1.4 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/certificate-transparency-go v1.3.2 // indirect
github.com/google/go-github/v30 v30.1.0 // indirect github.com/google/go-github/v30 v30.1.0 // indirect
github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7 // indirect github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 // indirect
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/go-version v1.7.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hbakhtiyor/strsim v0.0.0-20190107154042-4d2bbb273edf // indirect github.com/hbakhtiyor/strsim v0.0.0-20190107154042-4d2bbb273edf // indirect
github.com/hdm/jarm-go v0.0.7 // indirect
github.com/iangcarroll/cookiemonster v1.6.0 // indirect
github.com/imdario/mergo v0.3.16 // indirect
github.com/itchyny/timefmt-go v0.1.6 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect
github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect
github.com/jcmturner/gofork v1.7.6 // indirect github.com/jcmturner/gofork v1.7.6 // indirect
github.com/jcmturner/rpc/v2 v2.0.3 // indirect github.com/jcmturner/rpc/v2 v2.0.3 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/josharian/intern v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect
github.com/k14s/starlark-go v0.0.0-20200720175618-3a5c849cc368 // indirect
github.com/kataras/jwt v0.1.10 // indirect github.com/kataras/jwt v0.1.10 // indirect
github.com/klauspost/compress v1.17.11 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect github.com/klauspost/pgzip v1.2.6 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect github.com/kylelemons/godebug v1.1.0 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/libdns/libdns v0.2.1 // indirect
github.com/logrusorgru/aurora/v4 v4.0.0 // indirect github.com/logrusorgru/aurora/v4 v4.0.0 // indirect
github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/lufia/plan9stats v0.0.0-20250821153705-5981dea3221d // indirect
github.com/mackerelio/go-osstat v0.2.4 // indirect github.com/mackerelio/go-osstat v0.2.4 // indirect
github.com/magiconair/properties v1.8.10 // indirect
github.com/mailru/easyjson v0.7.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect
github.com/mholt/archiver/v3 v3.5.1 // indirect github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mholt/acmez v1.2.0 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect
github.com/minio/minlz v1.0.0 // indirect
github.com/minio/selfupdate v0.6.1-0.20230907112617-f11e74f84ca7 // indirect github.com/minio/selfupdate v0.6.1-0.20230907112617-f11e74f84ca7 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/moby/docker-image-spec v1.3.1 // indirect
github.com/moby/term v0.5.0 // indirect github.com/moby/go-archive v0.1.0 // indirect
github.com/moby/patternmatcher v0.6.0 // indirect
github.com/moby/sys/sequential v0.6.0 // indirect
github.com/moby/sys/user v0.4.0 // indirect
github.com/moby/sys/userns v0.1.0 // indirect
github.com/moby/term v0.5.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect github.com/montanaflynn/stats v0.7.1 // indirect
github.com/morikuni/aec v1.0.0 // indirect
github.com/muesli/reflow v0.3.0 // indirect github.com/muesli/reflow v0.3.0 // indirect
github.com/muesli/termenv v0.15.3-0.20240618155329-98d742f6907a // indirect github.com/muesli/termenv v0.16.0 // indirect
github.com/nwaples/rardecode/v2 v2.0.1 // indirect github.com/nwaples/rardecode/v2 v2.1.0 // indirect
github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect
github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect
github.com/olekukonko/errors v1.1.0 // indirect
github.com/olekukonko/ll v0.0.9 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.0.2 // indirect github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/opencontainers/runc v1.1.14 // indirect github.com/opencontainers/runc v1.2.3 // indirect
github.com/openrdap/rdap v0.9.1 // indirect
github.com/pelletier/go-toml/v2 v2.0.8 // indirect github.com/pelletier/go-toml/v2 v2.0.8 // indirect
github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/perimeterx/marshmallow v1.1.5 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/pjbgf/sha1cd v0.3.2 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/projectdiscovery/asnmap v1.1.1 // indirect github.com/projectdiscovery/asnmap v1.1.1 // indirect
github.com/projectdiscovery/cdncheck v1.1.15 // indirect github.com/projectdiscovery/blackrock v0.0.1 // indirect
github.com/projectdiscovery/cdncheck v1.2.0 // indirect
github.com/projectdiscovery/freeport v0.0.7 // indirect github.com/projectdiscovery/freeport v0.0.7 // indirect
github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect
github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect
github.com/refraction-networking/utls v1.6.7 // indirect github.com/refraction-networking/utls v1.7.1 // indirect
github.com/sashabaranov/go-openai v1.37.0 // indirect github.com/sashabaranov/go-openai v1.37.0 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/shirou/gopsutil/v4 v4.25.7 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.3.0 // indirect github.com/skeema/knownhosts v1.3.1 // indirect
github.com/sorairolake/lzip-go v0.3.5 // indirect github.com/sorairolake/lzip-go v0.3.5 // indirect
github.com/therootcompany/xz v1.0.1 // indirect
github.com/tidwall/btree v1.7.0 // indirect github.com/tidwall/btree v1.7.0 // indirect
github.com/tidwall/buntdb v1.3.1 // indirect github.com/tidwall/buntdb v1.3.1 // indirect
github.com/tidwall/gjson v1.18.0 // indirect github.com/tidwall/gjson v1.18.0 // indirect
@ -242,133 +340,82 @@ require (
github.com/tidwall/rtred v0.1.2 // indirect github.com/tidwall/rtred v0.1.2 // indirect
github.com/tidwall/tinyqueue v0.1.1 // indirect github.com/tidwall/tinyqueue v0.1.1 // indirect
github.com/tim-ywliu/nested-logrus-formatter v1.3.2 // indirect github.com/tim-ywliu/nested-logrus-formatter v1.3.2 // indirect
github.com/tklauser/go-sysconf v0.3.15 // indirect
github.com/tklauser/numcpus v0.10.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect github.com/ugorji/go/codec v1.2.11 // indirect
github.com/ulikunitz/xz v0.5.15 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect
github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xeipuuv/gojsonschema v1.2.0 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
github.com/ysmood/fetchup v0.2.3 // indirect github.com/ysmood/fetchup v0.2.3 // indirect
github.com/ysmood/got v0.40.0 // indirect github.com/ysmood/got v0.40.0 // indirect
github.com/yuin/goldmark v1.7.4 // indirect github.com/yuin/goldmark v1.7.13 // indirect
github.com/yuin/goldmark-emoji v1.0.3 // indirect github.com/yuin/goldmark-emoji v1.0.5 // indirect
github.com/zcalusic/sysinfo v1.0.2 // indirect github.com/zcalusic/sysinfo v1.0.2 // indirect
github.com/zeebo/blake3 v0.2.3 // indirect github.com/zeebo/blake3 v0.2.3 // indirect
go.uber.org/goleak v1.3.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 // indirect
go.opentelemetry.io/otel v1.37.0 // indirect
go.opentelemetry.io/otel/metric v1.37.0 // indirect
go.opentelemetry.io/otel/trace v1.37.0 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/arch v0.3.0 // indirect golang.org/x/arch v0.3.0 // indirect
golang.org/x/sync v0.13.0 // indirect golang.org/x/sync v0.17.0 // indirect
gopkg.in/djherbis/times.v1 v1.3.0 // indirect gopkg.in/djherbis/times.v1 v1.3.0 // indirect
mellium.im/sasl v0.3.1 // indirect mellium.im/sasl v0.3.2 // indirect
) )
require ( require (
git.mills.io/prologic/smtpd v0.0.0-20210710122116-a525b76c287a // indirect
github.com/Mzack9999/go-http-digest-auth-client v0.6.1-0.20220414142836-eb8883508809 // indirect
github.com/PuerkitoBio/goquery v1.10.3 // indirect
github.com/akrylysov/pogreb v0.10.2 // indirect
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/antchfx/xpath v1.2.4
github.com/aymerick/douceur v0.2.0 // indirect
github.com/caddyserver/certmagic v0.19.2 // indirect
github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dimchansky/utfbom v1.1.1 // indirect github.com/dimchansky/utfbom v1.1.1 // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/goburrow/cache v0.1.4 // indirect github.com/goburrow/cache v0.1.4 // indirect
github.com/gobwas/httphead v0.1.0 // indirect github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect github.com/gobwas/pool v0.2.1 // indirect
github.com/golang-jwt/jwt/v4 v4.5.1 // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/go-querystring v1.1.0 // indirect github.com/google/go-querystring v1.1.0 // indirect
github.com/google/uuid v1.6.0
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
github.com/hdm/jarm-go v0.0.7 // indirect
github.com/itchyny/timefmt-go v0.1.5 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/leodido/go-urn v1.2.4 // indirect
github.com/libdns/libdns v0.2.1 // indirect
github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mholt/acmez v1.2.0 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/projectdiscovery/blackrock v0.0.1 // indirect
github.com/projectdiscovery/networkpolicy v0.1.13
github.com/rivo/uniseg v0.4.7 // indirect github.com/rivo/uniseg v0.4.7 // indirect
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/trivago/tgo v1.0.7 github.com/trivago/tgo v1.0.7
github.com/ulikunitz/xz v0.5.12 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/ysmood/goob v0.4.0 // indirect github.com/ysmood/goob v0.4.0 // indirect
github.com/ysmood/gson v0.7.3 // indirect github.com/ysmood/gson v0.7.3 // indirect
github.com/ysmood/leakless v0.9.0 // indirect github.com/ysmood/leakless v0.9.0 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/zmap/rc2 v0.0.0-20190804163417-abaa70531248 // indirect github.com/zmap/rc2 v0.0.0-20190804163417-abaa70531248 // indirect
github.com/zmap/zcrypto v0.0.0-20240512203510-0fef58d9a9db // indirect github.com/zmap/zcrypto v0.0.0-20240512203510-0fef58d9a9db // indirect
go.etcd.io/bbolt v1.3.10 // indirect go.etcd.io/bbolt v1.4.0 // indirect
go.uber.org/zap v1.25.0 // indirect go.uber.org/zap v1.27.0 // indirect
goftp.io/server/v2 v2.0.1 // indirect goftp.io/server/v2 v2.0.1 // indirect
golang.org/x/crypto v0.37.0 // indirect golang.org/x/crypto v0.42.0 // indirect
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
golang.org/x/mod v0.22.0 // indirect golang.org/x/mod v0.27.0 // indirect
golang.org/x/sys v0.32.0 // indirect golang.org/x/sys v0.36.0 // indirect
golang.org/x/time v0.8.0 // indirect golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.29.0 golang.org/x/tools v0.36.0
google.golang.org/protobuf v1.34.2 // indirect google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect
gopkg.in/corvus-ch/zbase32.v1 v1.0.0 // indirect gopkg.in/corvus-ch/zbase32.v1 v1.0.0 // indirect
) )
require ( require (
github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/ProtonMail/go-crypto v1.1.3 // indirect
github.com/alecthomas/chroma v0.10.0 github.com/alecthomas/chroma v0.10.0
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5 // indirect github.com/go-echarts/go-echarts/v2 v2.6.0
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.36 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.12.13 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.19.3 // indirect
github.com/aws/smithy-go v1.13.5 // indirect
github.com/dop251/goja_nodejs v0.0.0-20230821135201-94e508132562
github.com/emirpasic/gods v1.18.1 // indirect
github.com/go-echarts/go-echarts/v2 v2.3.3
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.6.0 // indirect
github.com/imdario/mergo v0.3.16 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jcmturner/gokrb5/v8 v8.4.4
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect
) )
// https://go.dev/ref/mod#go-mod-file-retract // https://go.dev/ref/mod#go-mod-file-retract
retract v3.2.0 // retract due to broken js protocol issue retract v3.2.0 // retract due to broken js protocol issue
// Fix genproto version conflicts
replace (
google.golang.org/genproto => google.golang.org/genproto v0.0.0-20240814211410-ddb44dafa142
google.golang.org/genproto/googleapis/api => google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142
google.golang.org/genproto/googleapis/rpc => google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1
)

1878
go.sum

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
{{- if .Values.interactsh.ingress.enabled -}} {{- if .Values.interactsh.ingress.enabled -}}
{{- $fullName := include "nuclei.fullname" . -}} {{- $fullName := include "nuclei.fullname" . -}}
{{- $svcPort := .Values.service.port -}} {{- $svcPort := .Values.interactsh.service.port -}}
{{- $svcName := .Values.interactsh.service.name -}}
{{- if and .Values.interactsh.ingress.className (not (semverCompare ">=1.20-0" .Capabilities.KubeVersion.GitVersion)) }} {{- if and .Values.interactsh.ingress.className (not (semverCompare ">=1.20-0" .Capabilities.KubeVersion.GitVersion)) }}
{{- if not (hasKey .Values.interactsh.ingress.annotations "kubernetes.io/ingress.class") }} {{- if not (hasKey .Values.interactsh.ingress.annotations "kubernetes.io/ingress.class") }}
{{- $_ := set .Values.interactsh.ingress.annotations "kubernetes.io/ingress.class" .Values.interactsh.ingress.className}} {{- $_ := set .Values.interactsh.ingress.annotations "kubernetes.io/ingress.class" .Values.interactsh.ingress.className}}
@ -49,11 +50,11 @@ spec:
backend: backend:
{{- if semverCompare ">=1.20-0" $.Capabilities.KubeVersion.GitVersion }} {{- if semverCompare ">=1.20-0" $.Capabilities.KubeVersion.GitVersion }}
service: service:
name: {{ $fullName }} name: {{ $svcName }}
port: port:
number: {{ $svcPort }} number: {{ $svcPort }}
{{- else }} {{- else }}
serviceName: {{ $fullName }} serviceName: {{ $svcName }}
servicePort: {{ $svcPort }} servicePort: {{ $svcPort }}
{{- end }} {{- end }}
{{- end }} {{- end }}

View File

@ -0,0 +1,38 @@
id: fuzz-body
info:
name: fuzzing error sqli payloads in http req body
author: pdteam
severity: info
description: |
This template attempts to find SQL injection vulnerabilities by fuzzing http body
It automatically handles and parses json,xml,multipart form and x-www-form-urlencoded data
and performs fuzzing on the value of every key
http:
- pre-condition:
- type: dsl
dsl:
- method != "GET"
- method != "HEAD"
condition: and
payloads:
injection:
- "'"
- "\""
- ";"
fuzzing:
- part: body
type: postfix
mode: single
fuzz:
- '{{injection}}'
stop-at-first-match: true
matchers:
- type: word
words:
- "unrecognized token:"
- "null"

View File

@ -0,0 +1,38 @@
id: vnc-password-test
info:
name: VNC Password Authentication Test
author: pdteam
severity: high
description: |
Tests VNC authentication with correct and incorrect passwords.
metadata:
shodan-query: product:"vnc"
tags: js,network,vnc,authentication
javascript:
- pre-condition: |
isPortOpen(Host,Port)
code: |
let vnc = require('nuclei/vnc');
let client = new vnc.VNCClient();
client.Connect(Host, Port, Password);
args:
Host: "{{Host}}"
Port: "5900"
Password: "{{passwords}}"
payloads:
passwords:
- ""
- root
- password
- admin
- mysecret
stop-at-first-match: true
matchers:
- type: dsl
dsl:
- "success == true"

View File

@ -19,7 +19,7 @@ import (
"github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/retryablehttp-go"
pdcpauth "github.com/projectdiscovery/utils/auth/pdcp" pdcpauth "github.com/projectdiscovery/utils/auth/pdcp"
"github.com/projectdiscovery/utils/env" "github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
unitutils "github.com/projectdiscovery/utils/unit" unitutils "github.com/projectdiscovery/utils/unit"
updateutils "github.com/projectdiscovery/utils/update" updateutils "github.com/projectdiscovery/utils/update"
urlutil "github.com/projectdiscovery/utils/url" urlutil "github.com/projectdiscovery/utils/url"
@ -55,10 +55,11 @@ type UploadWriter struct {
scanName string scanName string
counter atomic.Int32 counter atomic.Int32
TeamID string TeamID string
Logger *gologger.Logger
} }
// NewUploadWriter creates a new upload writer // NewUploadWriter creates a new upload writer
func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*UploadWriter, error) { func NewUploadWriter(ctx context.Context, logger *gologger.Logger, creds *pdcpauth.PDCPCredentials) (*UploadWriter, error) {
if creds == nil { if creds == nil {
return nil, fmt.Errorf("no credentials provided") return nil, fmt.Errorf("no credentials provided")
} }
@ -66,6 +67,7 @@ func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*Upl
creds: creds, creds: creds,
done: make(chan struct{}, 1), done: make(chan struct{}, 1),
TeamID: NoneTeamID, TeamID: NoneTeamID,
Logger: logger,
} }
var err error var err error
reader, writer := io.Pipe() reader, writer := io.Pipe()
@ -75,11 +77,11 @@ func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*Upl
output.WithJson(true, true), output.WithJson(true, true),
) )
if err != nil { if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create output writer") return nil, errkit.Wrap(err, "could not create output writer")
} }
tmp, err := urlutil.Parse(creds.Server) tmp, err := urlutil.Parse(creds.Server)
if err != nil { if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not parse server url") return nil, errkit.Wrap(err, "could not parse server url")
} }
tmp.Path = uploadEndpoint tmp.Path = uploadEndpoint
tmp.Update() tmp.Update()
@ -127,7 +129,9 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// continuously read from the reader and send to channel // continuously read from the reader and send to channel
go func() { go func() {
defer r.Close() defer func() {
_ = r.Close()
}()
defer close(ch) defer close(ch)
for { for {
data, err := reader.ReadString('\n') data, err := reader.ReadString('\n')
@ -145,9 +149,9 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
close(u.done) close(u.done)
// if no scanid is generated no results were uploaded // if no scanid is generated no results were uploaded
if u.scanID == "" { if u.scanID == "" {
gologger.Verbose().Msgf("Scan results upload to cloud skipped, no results found to upload") u.Logger.Verbose().Msgf("Scan results upload to cloud skipped, no results found to upload")
} else { } else {
gologger.Info().Msgf("%v Scan results uploaded to cloud, you can view scan results at %v", u.counter.Load(), getScanDashBoardURL(u.scanID, u.TeamID)) u.Logger.Info().Msgf("%v Scan results uploaded to cloud, you can view scan results at %v", u.counter.Load(), getScanDashBoardURL(u.scanID, u.TeamID))
} }
}() }()
// temporary buffer to store the results // temporary buffer to store the results
@ -160,7 +164,7 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// flush before exit // flush before exit
if buff.Len() > 0 { if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil { if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err) u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
} }
} }
return return
@ -168,14 +172,14 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// flush the buffer // flush the buffer
if buff.Len() > 0 { if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil { if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err) u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
} }
} }
case line, ok := <-ch: case line, ok := <-ch:
if !ok { if !ok {
if buff.Len() > 0 { if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil { if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err) u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
} }
} }
return return
@ -183,7 +187,7 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
if buff.Len()+len(line) > MaxChunkSize { if buff.Len()+len(line) > MaxChunkSize {
// flush existing buffer // flush existing buffer
if err := u.uploadChunk(buff); err != nil { if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err) u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
} }
} else { } else {
buff.WriteString(line) buff.WriteString(line)
@ -195,35 +199,37 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// uploadChunk uploads a chunk of data to the server // uploadChunk uploads a chunk of data to the server
func (u *UploadWriter) uploadChunk(buff *bytes.Buffer) error { func (u *UploadWriter) uploadChunk(buff *bytes.Buffer) error {
if err := u.upload(buff.Bytes()); err != nil { if err := u.upload(buff.Bytes()); err != nil {
return errorutil.NewWithErr(err).Msgf("could not upload chunk") return errkit.Wrap(err, "could not upload chunk")
} }
// if successful, reset the buffer // if successful, reset the buffer
buff.Reset() buff.Reset()
// log in verbose mode // log in verbose mode
gologger.Warning().Msgf("Uploaded results chunk, you can view scan results at %v", getScanDashBoardURL(u.scanID, u.TeamID)) u.Logger.Warning().Msgf("Uploaded results chunk, you can view scan results at %v", getScanDashBoardURL(u.scanID, u.TeamID))
return nil return nil
} }
func (u *UploadWriter) upload(data []byte) error { func (u *UploadWriter) upload(data []byte) error {
req, err := u.getRequest(data) req, err := u.getRequest(data)
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("could not create upload request") return errkit.Wrap(err, "could not create upload request")
} }
resp, err := u.client.Do(req) resp, err := u.client.Do(req)
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("could not upload results") return errkit.Wrap(err, "could not upload results")
} }
defer resp.Body.Close() defer func() {
_ = resp.Body.Close()
}()
bin, err := io.ReadAll(resp.Body) bin, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("could not get id from response") return errkit.Wrap(err, "could not get id from response")
} }
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return fmt.Errorf("could not upload results got status code %v on %v", resp.StatusCode, resp.Request.URL.String()) return fmt.Errorf("could not upload results got status code %v on %v", resp.StatusCode, resp.Request.URL.String())
} }
var uploadResp uploadResponse var uploadResp uploadResponse
if err := json.Unmarshal(bin, &uploadResp); err != nil { if err := json.Unmarshal(bin, &uploadResp); err != nil {
return errorutil.NewWithErr(err).Msgf("could not unmarshal response got %v", string(bin)) return errkit.Wrap(err, fmt.Sprintf("could not unmarshal response got %v", string(bin)))
} }
if uploadResp.ID != "" && u.scanID == "" { if uploadResp.ID != "" && u.scanID == "" {
u.scanID = uploadResp.ID u.scanID = uploadResp.ID
@ -248,15 +254,15 @@ func (u *UploadWriter) getRequest(bin []byte) (*retryablehttp.Request, error) {
} }
req, err := retryablehttp.NewRequest(method, url, bytes.NewReader(bin)) req, err := retryablehttp.NewRequest(method, url, bytes.NewReader(bin))
if err != nil { if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create cloud upload request") return nil, errkit.Wrap(err, "could not create cloud upload request")
} }
// add pdtm meta params // add pdtm meta params
req.URL.Params.Merge(updateutils.GetpdtmParams(config.Version)) req.Params.Merge(updateutils.GetpdtmParams(config.Version))
// if it is upload endpoint also include name if it exists // if it is upload endpoint also include name if it exists
if u.scanName != "" && req.URL.Path == uploadEndpoint { if u.scanName != "" && req.Path == uploadEndpoint {
req.URL.Params.Add("name", u.scanName) req.Params.Add("name", u.scanName)
} }
req.URL.Update() req.Update()
req.Header.Set(pdcpauth.ApiKeyHeaderName, u.creds.APIKey) req.Header.Set(pdcpauth.ApiKeyHeaderName, u.creds.APIKey)
if u.TeamID != NoneTeamID && u.TeamID != "" { if u.TeamID != NoneTeamID && u.TeamID != "" {

View File

@ -47,7 +47,7 @@ func DoHealthCheck(options *types.Options) string {
} }
c4, err := net.Dial("tcp4", "scanme.sh:80") c4, err := net.Dial("tcp4", "scanme.sh:80")
if err == nil && c4 != nil { if err == nil && c4 != nil {
c4.Close() _ = c4.Close()
} }
testResult = "Ok" testResult = "Ok"
if err != nil { if err != nil {
@ -56,7 +56,7 @@ func DoHealthCheck(options *types.Options) string {
test.WriteString(fmt.Sprintf("IPv4 connectivity to scanme.sh:80 => %s\n", testResult)) test.WriteString(fmt.Sprintf("IPv4 connectivity to scanme.sh:80 => %s\n", testResult))
c6, err := net.Dial("tcp6", "scanme.sh:80") c6, err := net.Dial("tcp6", "scanme.sh:80")
if err == nil && c6 != nil { if err == nil && c6 != nil {
c6.Close() _ = c6.Close()
} }
testResult = "Ok" testResult = "Ok"
if err != nil { if err != nil {
@ -65,7 +65,7 @@ func DoHealthCheck(options *types.Options) string {
test.WriteString(fmt.Sprintf("IPv6 connectivity to scanme.sh:80 => %s\n", testResult)) test.WriteString(fmt.Sprintf("IPv6 connectivity to scanme.sh:80 => %s\n", testResult))
u4, err := net.Dial("udp4", "scanme.sh:53") u4, err := net.Dial("udp4", "scanme.sh:53")
if err == nil && u4 != nil { if err == nil && u4 != nil {
u4.Close() _ = u4.Close()
} }
testResult = "Ok" testResult = "Ok"
if err != nil { if err != nil {

View File

@ -2,11 +2,11 @@ package runner
import ( import (
"context" "context"
"fmt"
"sync/atomic" "sync/atomic"
"time" "time"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/hmap/store/hybrid" "github.com/projectdiscovery/hmap/store/hybrid"
"github.com/projectdiscovery/httpx/common/httpx" "github.com/projectdiscovery/httpx/common/httpx"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider" "github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
@ -28,7 +28,7 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
// currently http probing for input mode types is not supported // currently http probing for input mode types is not supported
return hm, nil return hm, nil
} }
gologger.Info().Msgf("Running httpx on input host") r.Logger.Info().Msgf("Running httpx on input host")
httpxOptions := httpx.DefaultOptions httpxOptions := httpx.DefaultOptions
if r.options.AliveHttpProxy != "" { if r.options.AliveHttpProxy != "" {
@ -38,7 +38,13 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
} }
httpxOptions.RetryMax = r.options.Retries httpxOptions.RetryMax = r.options.Retries
httpxOptions.Timeout = time.Duration(r.options.Timeout) * time.Second httpxOptions.Timeout = time.Duration(r.options.Timeout) * time.Second
httpxOptions.NetworkPolicy = protocolstate.NetworkPolicy
dialers := protocolstate.GetDialersWithId(r.options.ExecutionId)
if dialers == nil {
return nil, fmt.Errorf("dialers not initialized for %s", r.options.ExecutionId)
}
httpxOptions.NetworkPolicy = dialers.NetworkPolicy
httpxClient, err := httpx.New(&httpxOptions) httpxClient, err := httpx.New(&httpxOptions)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "could not create httpx client") return nil, errors.Wrap(err, "could not create httpx client")
@ -57,7 +63,7 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
if r.options.ProbeConcurrency > 0 && swg.Size != r.options.ProbeConcurrency { if r.options.ProbeConcurrency > 0 && swg.Size != r.options.ProbeConcurrency {
if err := swg.Resize(context.Background(), r.options.ProbeConcurrency); err != nil { if err := swg.Resize(context.Background(), r.options.ProbeConcurrency); err != nil {
gologger.Error().Msgf("Could not resize workpool: %s\n", err) r.Logger.Error().Msgf("Could not resize workpool: %s\n", err)
} }
} }
@ -74,6 +80,6 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
}) })
swg.Wait() swg.Wait()
gologger.Info().Msgf("Found %d URL from httpx", count.Load()) r.Logger.Info().Msgf("Found %d URL from httpx", count.Load())
return hm, nil return hm, nil
} }

View File

@ -17,22 +17,22 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/scan" "github.com/projectdiscovery/nuclei/v3/pkg/scan"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/utils/env" "github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
) )
type AuthLazyFetchOptions struct { type AuthLazyFetchOptions struct {
TemplateStore *loader.Store TemplateStore *loader.Store
ExecOpts protocols.ExecutorOptions ExecOpts *protocols.ExecutorOptions
OnError func(error) OnError func(error)
} }
// GetAuthTmplStore create new loader for loading auth templates // GetAuthTmplStore create new loader for loading auth templates
func GetAuthTmplStore(opts types.Options, catalog catalog.Catalog, execOpts protocols.ExecutorOptions) (*loader.Store, error) { func GetAuthTmplStore(opts *types.Options, catalog catalog.Catalog, execOpts *protocols.ExecutorOptions) (*loader.Store, error) {
tmpls := []string{} tmpls := []string{}
for _, file := range opts.SecretsFile { for _, file := range opts.SecretsFile {
data, err := authx.GetTemplatePathsFromSecretFile(file) data, err := authx.GetTemplatePathsFromSecretFile(file)
if err != nil { if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("failed to get template paths from secrets file") return nil, errkit.Wrap(err, "failed to get template paths from secrets file")
} }
tmpls = append(tmpls, data...) tmpls = append(tmpls, data...)
} }
@ -54,11 +54,11 @@ func GetAuthTmplStore(opts types.Options, catalog catalog.Catalog, execOpts prot
opts.Protocols = nil opts.Protocols = nil
opts.ExcludeProtocols = nil opts.ExcludeProtocols = nil
opts.IncludeConditions = nil opts.IncludeConditions = nil
cfg := loader.NewConfig(&opts, catalog, execOpts) cfg := loader.NewConfig(opts, catalog, execOpts)
cfg.StoreId = loader.AuthStoreId cfg.StoreId = loader.AuthStoreId
store, err := loader.New(cfg) store, err := loader.New(cfg)
if err != nil { if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("failed to initialize dynamic auth templates store") return nil, errkit.Wrap(err, "failed to initialize dynamic auth templates store")
} }
return store, nil return store, nil
} }

View File

@ -31,7 +31,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/utils/yaml" "github.com/projectdiscovery/nuclei/v3/pkg/utils/yaml"
fileutil "github.com/projectdiscovery/utils/file" fileutil "github.com/projectdiscovery/utils/file"
"github.com/projectdiscovery/utils/generic" "github.com/projectdiscovery/utils/generic"
logutil "github.com/projectdiscovery/utils/log"
stringsutil "github.com/projectdiscovery/utils/strings" stringsutil "github.com/projectdiscovery/utils/strings"
) )
@ -40,6 +39,8 @@ const (
DefaultDumpTrafficOutputFolder = "output" DefaultDumpTrafficOutputFolder = "output"
) )
var validateOptions = validator.New()
func ConfigureOptions() error { func ConfigureOptions() error {
// with FileStringSliceOptions, FileNormalizedStringSliceOptions, FileCommaSeparatedStringSliceOptions // with FileStringSliceOptions, FileNormalizedStringSliceOptions, FileCommaSeparatedStringSliceOptions
// if file has the extension `.yaml` or `.json` we consider those as strings and not files to be read // if file has the extension `.yaml` or `.json` we consider those as strings and not files to be read
@ -71,17 +72,17 @@ func ParseOptions(options *types.Options) {
vardump.Limit = options.VarDumpLimit vardump.Limit = options.VarDumpLimit
} }
if options.ShowActions { if options.ShowActions {
gologger.Info().Msgf("Showing available headless actions: ") options.Logger.Info().Msgf("Showing available headless actions: ")
for action := range engine.ActionStringToAction { for action := range engine.ActionStringToAction {
gologger.Print().Msgf("\t%s", action) options.Logger.Print().Msgf("\t%s", action)
} }
os.Exit(0) os.Exit(0)
} }
defaultProfilesPath := filepath.Join(config.DefaultConfig.GetTemplateDir(), "profiles") defaultProfilesPath := filepath.Join(config.DefaultConfig.GetTemplateDir(), "profiles")
if options.ListTemplateProfiles { if options.ListTemplateProfiles {
gologger.Print().Msgf( options.Logger.Print().Msgf(
"\nListing available %v nuclei template profiles for %v", "Listing available %v nuclei template profiles for %v",
config.DefaultConfig.TemplateVersion, config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory, config.DefaultConfig.TemplatesDirectory,
) )
@ -93,23 +94,23 @@ func ParseOptions(options *types.Options) {
return nil return nil
} }
if profileRelPath, err := filepath.Rel(templatesRootDir, iterItem); err == nil { if profileRelPath, err := filepath.Rel(templatesRootDir, iterItem); err == nil {
gologger.Print().Msgf("%s (%s)\n", profileRelPath, strings.TrimSuffix(filepath.Base(iterItem), ext)) options.Logger.Print().Msgf("%s (%s)\n", profileRelPath, strings.TrimSuffix(filepath.Base(iterItem), ext))
} }
return nil return nil
}) })
if err != nil { if err != nil {
gologger.Error().Msgf("%s\n", err) options.Logger.Error().Msgf("%s\n", err)
} }
os.Exit(0) os.Exit(0)
} }
if options.StoreResponseDir != DefaultDumpTrafficOutputFolder && !options.StoreResponse { if options.StoreResponseDir != DefaultDumpTrafficOutputFolder && !options.StoreResponse {
gologger.Debug().Msgf("Store response directory specified, enabling \"store-resp\" flag automatically\n") options.Logger.Debug().Msgf("Store response directory specified, enabling \"store-resp\" flag automatically\n")
options.StoreResponse = true options.StoreResponse = true
} }
// Validate the options passed by the user and if any // Validate the options passed by the user and if any
// invalid options have been used, exit. // invalid options have been used, exit.
if err := ValidateOptions(options); err != nil { if err := ValidateOptions(options); err != nil {
gologger.Fatal().Msgf("Program exiting: %s\n", err) options.Logger.Fatal().Msgf("Program exiting: %s\n", err)
} }
// Load the resolvers if user asked for them // Load the resolvers if user asked for them
@ -117,12 +118,12 @@ func ParseOptions(options *types.Options) {
err := protocolinit.Init(options) err := protocolinit.Init(options)
if err != nil { if err != nil {
gologger.Fatal().Msgf("Could not initialize protocols: %s\n", err) options.Logger.Fatal().Msgf("Could not initialize protocols: %s\n", err)
} }
// Set GitHub token in env variable. runner.getGHClientWithToken() reads token from env // Set GitHub token in env variable. runner.getGHClientWithToken() reads token from env
if options.GitHubToken != "" && os.Getenv("GITHUB_TOKEN") != options.GitHubToken { if options.GitHubToken != "" && os.Getenv("GITHUB_TOKEN") != options.GitHubToken {
os.Setenv("GITHUB_TOKEN", options.GitHubToken) _ = os.Setenv("GITHUB_TOKEN", options.GitHubToken)
} }
if options.UncoverQuery != nil { if options.UncoverQuery != nil {
@ -139,8 +140,7 @@ func ParseOptions(options *types.Options) {
// validateOptions validates the configuration options passed // validateOptions validates the configuration options passed
func ValidateOptions(options *types.Options) error { func ValidateOptions(options *types.Options) error {
validate := validator.New() if err := validateOptions.Struct(options); err != nil {
if err := validate.Struct(options); err != nil {
if _, ok := err.(*validator.InvalidValidationError); ok { if _, ok := err.(*validator.InvalidValidationError); ok {
return err return err
} }
@ -169,7 +169,7 @@ func ValidateOptions(options *types.Options) error {
return err return err
} }
if options.Validate { if options.Validate {
validateTemplatePaths(config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows) validateTemplatePaths(options.Logger, config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows)
} }
if options.DAST { if options.DAST {
if err := validateDASTOptions(options); err != nil { if err := validateDASTOptions(options); err != nil {
@ -182,7 +182,7 @@ func ValidateOptions(options *types.Options) error {
if generic.EqualsAny("", options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile) { if generic.EqualsAny("", options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile) {
return errors.New("if a client certification option is provided, then all three must be provided") return errors.New("if a client certification option is provided, then all three must be provided")
} }
validateCertificatePaths(options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile) validateCertificatePaths(options.Logger, options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile)
} }
// Verify AWS secrets are passed if a S3 template bucket is passed // Verify AWS secrets are passed if a S3 template bucket is passed
if options.AwsBucketName != "" && options.UpdateTemplates && !options.AwsTemplateDisableDownload { if options.AwsBucketName != "" && options.UpdateTemplates && !options.AwsTemplateDisableDownload {
@ -304,7 +304,9 @@ func createReportingOptions(options *types.Options) (*reporting.Options, error)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "could not open reporting config file") return nil, errors.Wrap(err, "could not open reporting config file")
} }
defer file.Close() defer func() {
_ = file.Close()
}()
if err := yaml.DecodeAndValidate(file, reportingOptions); err != nil { if err := yaml.DecodeAndValidate(file, reportingOptions); err != nil {
return nil, errors.Wrap(err, "could not parse reporting config file") return nil, errors.Wrap(err, "could not parse reporting config file")
@ -342,32 +344,33 @@ func createReportingOptions(options *types.Options) (*reporting.Options, error)
} }
reportingOptions.OmitRaw = options.OmitRawRequests reportingOptions.OmitRaw = options.OmitRawRequests
reportingOptions.ExecutionId = options.ExecutionId
return reportingOptions, nil return reportingOptions, nil
} }
// configureOutput configures the output logging levels to be displayed on the screen // configureOutput configures the output logging levels to be displayed on the screen
func configureOutput(options *types.Options) { func configureOutput(options *types.Options) {
if options.NoColor { if options.NoColor {
gologger.DefaultLogger.SetFormatter(formatter.NewCLI(true)) options.Logger.SetFormatter(formatter.NewCLI(true))
} }
// If the user desires verbose output, show verbose output // If the user desires verbose output, show verbose output
if options.Debug || options.DebugRequests || options.DebugResponse { if options.Debug || options.DebugRequests || options.DebugResponse {
gologger.DefaultLogger.SetMaxLevel(levels.LevelDebug) options.Logger.SetMaxLevel(levels.LevelDebug)
} }
// Debug takes precedence before verbose // Debug takes precedence before verbose
// because debug is a lower logging level. // because debug is a lower logging level.
if options.Verbose || options.Validate { if options.Verbose || options.Validate {
gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose) options.Logger.SetMaxLevel(levels.LevelVerbose)
} }
if options.NoColor { if options.NoColor {
gologger.DefaultLogger.SetFormatter(formatter.NewCLI(true)) options.Logger.SetFormatter(formatter.NewCLI(true))
} }
if options.Silent { if options.Silent {
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent) options.Logger.SetMaxLevel(levels.LevelSilent)
} }
// disable standard logger (ref: https://github.com/golang/go/issues/19895) // disable standard logger (ref: https://github.com/golang/go/issues/19895)
logutil.DisableDefaultLogger() // logutil.DisableDefaultLogger()
} }
// loadResolvers loads resolvers from both user-provided flags and file // loadResolvers loads resolvers from both user-provided flags and file
@ -378,9 +381,11 @@ func loadResolvers(options *types.Options) {
file, err := os.Open(options.ResolversFile) file, err := os.Open(options.ResolversFile)
if err != nil { if err != nil {
gologger.Fatal().Msgf("Could not open resolvers file: %s\n", err) options.Logger.Fatal().Msgf("Could not open resolvers file: %s\n", err)
} }
defer file.Close() defer func() {
_ = file.Close()
}()
scanner := bufio.NewScanner(file) scanner := bufio.NewScanner(file)
for scanner.Scan() { for scanner.Scan() {
@ -396,7 +401,7 @@ func loadResolvers(options *types.Options) {
} }
} }
func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPaths []string) { func validateTemplatePaths(logger *gologger.Logger, templatesDirectory string, templatePaths, workflowPaths []string) {
allGivenTemplatePaths := append(templatePaths, workflowPaths...) allGivenTemplatePaths := append(templatePaths, workflowPaths...)
for _, templatePath := range allGivenTemplatePaths { for _, templatePath := range allGivenTemplatePaths {
if templatesDirectory != templatePath && filepath.IsAbs(templatePath) { if templatesDirectory != templatePath && filepath.IsAbs(templatePath) {
@ -404,7 +409,7 @@ func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPat
if err == nil && fileInfo.IsDir() { if err == nil && fileInfo.IsDir() {
relativizedPath, err2 := filepath.Rel(templatesDirectory, templatePath) relativizedPath, err2 := filepath.Rel(templatesDirectory, templatePath)
if err2 != nil || (len(relativizedPath) >= 2 && relativizedPath[:2] == "..") { if err2 != nil || (len(relativizedPath) >= 2 && relativizedPath[:2] == "..") {
gologger.Warning().Msgf("The given path (%s) is outside the default template directory path (%s)! "+ logger.Warning().Msgf("The given path (%s) is outside the default template directory path (%s)! "+
"Referenced sub-templates with relative paths in workflows will be resolved against the default template directory.", templatePath, templatesDirectory) "Referenced sub-templates with relative paths in workflows will be resolved against the default template directory.", templatePath, templatesDirectory)
break break
} }
@ -413,12 +418,12 @@ func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPat
} }
} }
func validateCertificatePaths(certificatePaths ...string) { func validateCertificatePaths(logger *gologger.Logger, certificatePaths ...string) {
for _, certificatePath := range certificatePaths { for _, certificatePath := range certificatePaths {
if !fileutil.FileExists(certificatePath) { if !fileutil.FileExists(certificatePath) {
// The provided path to the PEM certificate does not exist for the client authentication. As this is // The provided path to the PEM certificate does not exist for the client authentication. As this is
// required for successful authentication, log and return an error // required for successful authentication, log and return an error
gologger.Fatal().Msgf("The given path (%s) to the certificate does not exist!", certificatePath) logger.Fatal().Msgf("The given path (%s) to the certificate does not exist!", certificatePath)
break break
} }
} }
@ -445,7 +450,7 @@ func readEnvInputVars(options *types.Options) {
// Attempt to convert the repo ID to an integer // Attempt to convert the repo ID to an integer
repoIDInt, err := strconv.Atoi(repoID) repoIDInt, err := strconv.Atoi(repoID)
if err != nil { if err != nil {
gologger.Warning().Msgf("Invalid GitLab template repository ID: %s", repoID) options.Logger.Warning().Msgf("Invalid GitLab template repository ID: %s", repoID)
continue continue
} }

View File

@ -7,9 +7,8 @@ import (
"os" "os"
"strings" "strings"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file" fileutil "github.com/projectdiscovery/utils/file"
proxyutils "github.com/projectdiscovery/utils/proxy" proxyutils "github.com/projectdiscovery/utils/proxy"
) )
@ -30,7 +29,9 @@ func loadProxyServers(options *types.Options) error {
if err != nil { if err != nil {
return fmt.Errorf("could not open proxy file: %w", err) return fmt.Errorf("could not open proxy file: %w", err)
} }
defer file.Close() defer func() {
_ = file.Close()
}()
scanner := bufio.NewScanner(file) scanner := bufio.NewScanner(file)
for scanner.Scan() { for scanner.Scan() {
proxy := scanner.Text() proxy := scanner.Text()
@ -49,17 +50,18 @@ func loadProxyServers(options *types.Options) error {
} }
proxyURL, err := url.Parse(aliveProxy) proxyURL, err := url.Parse(aliveProxy)
if err != nil { if err != nil {
return errorutil.WrapfWithNil(err, "failed to parse proxy got %v", err) return errkit.Wrapf(err, "failed to parse proxy got %v", err)
} }
if options.ProxyInternal { if options.ProxyInternal {
os.Setenv(HTTP_PROXY_ENV, proxyURL.String()) _ = os.Setenv(HTTP_PROXY_ENV, proxyURL.String())
} }
if proxyURL.Scheme == proxyutils.HTTP || proxyURL.Scheme == proxyutils.HTTPS { switch proxyURL.Scheme {
gologger.Verbose().Msgf("Using %s as proxy server", proxyURL.String()) case proxyutils.HTTP, proxyutils.HTTPS:
options.Logger.Verbose().Msgf("Using %s as proxy server", proxyURL.String())
options.AliveHttpProxy = proxyURL.String() options.AliveHttpProxy = proxyURL.String()
} else if proxyURL.Scheme == proxyutils.SOCKS5 { case proxyutils.SOCKS5:
options.AliveSocksProxy = proxyURL.String() options.AliveSocksProxy = proxyURL.String()
gologger.Verbose().Msgf("Using %s as socket proxy server", proxyURL.String()) options.Logger.Verbose().Msgf("Using %s as socket proxy server", proxyURL.String())
} }
return nil return nil
} }

View File

@ -10,6 +10,7 @@ import (
"sync/atomic" "sync/atomic"
"time" "time"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/pdcp" "github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/nuclei/v3/internal/server" "github.com/projectdiscovery/nuclei/v3/internal/server"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider" "github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
@ -32,7 +33,6 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/projectdiscovery/ratelimit" "github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/colorizer" "github.com/projectdiscovery/nuclei/v3/internal/colorizer"
"github.com/projectdiscovery/nuclei/v3/internal/httpapi" "github.com/projectdiscovery/nuclei/v3/internal/httpapi"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog" "github.com/projectdiscovery/nuclei/v3/pkg/catalog"
@ -95,6 +95,7 @@ type Runner struct {
inputProvider provider.InputProvider inputProvider provider.InputProvider
fuzzFrequencyCache *frequency.Tracker fuzzFrequencyCache *frequency.Tracker
httpStats *outputstats.Tracker httpStats *outputstats.Tracker
Logger *gologger.Logger
//general purpose temporary directory //general purpose temporary directory
tmpDir string tmpDir string
@ -108,10 +109,11 @@ type Runner struct {
func New(options *types.Options) (*Runner, error) { func New(options *types.Options) (*Runner, error) {
runner := &Runner{ runner := &Runner{
options: options, options: options,
Logger: options.Logger,
} }
if options.HealthCheck { if options.HealthCheck {
gologger.Print().Msgf("%s\n", DoHealthCheck(options)) runner.Logger.Print().Msgf("%s\n", DoHealthCheck(options))
os.Exit(0) os.Exit(0)
} }
@ -119,14 +121,22 @@ func New(options *types.Options) (*Runner, error) {
if config.DefaultConfig.CanCheckForUpdates() { if config.DefaultConfig.CanCheckForUpdates() {
if err := installer.NucleiVersionCheck(); err != nil { if err := installer.NucleiVersionCheck(); err != nil {
if options.Verbose || options.Debug { if options.Verbose || options.Debug {
gologger.Error().Msgf("nuclei version check failed got: %s\n", err) runner.Logger.Error().Msgf("nuclei version check failed got: %s\n", err)
} }
} }
// if template list or template display is enabled, enable all templates
if options.TemplateList || options.TemplateDisplay {
options.EnableCodeTemplates = true
options.EnableFileTemplates = true
options.EnableSelfContainedTemplates = true
options.EnableGlobalMatchersTemplates = true
}
// check for custom template updates and update if available // check for custom template updates and update if available
ctm, err := customtemplates.NewCustomTemplatesManager(options) ctm, err := customtemplates.NewCustomTemplatesManager(options)
if err != nil { if err != nil {
gologger.Error().Label("custom-templates").Msgf("Failed to create custom templates manager: %s\n", err) runner.Logger.Error().Label("custom-templates").Msgf("Failed to create custom templates manager: %s\n", err)
} }
// Check for template updates and update if available. // Check for template updates and update if available.
@ -136,15 +146,15 @@ func New(options *types.Options) (*Runner, error) {
DisablePublicTemplates: options.PublicTemplateDisableDownload, DisablePublicTemplates: options.PublicTemplateDisableDownload,
} }
if err := tm.FreshInstallIfNotExists(); err != nil { if err := tm.FreshInstallIfNotExists(); err != nil {
gologger.Warning().Msgf("failed to install nuclei templates: %s\n", err) runner.Logger.Warning().Msgf("failed to install nuclei templates: %s\n", err)
} }
if err := tm.UpdateIfOutdated(); err != nil { if err := tm.UpdateIfOutdated(); err != nil {
gologger.Warning().Msgf("failed to update nuclei templates: %s\n", err) runner.Logger.Warning().Msgf("failed to update nuclei templates: %s\n", err)
} }
if config.DefaultConfig.NeedsIgnoreFileUpdate() { if config.DefaultConfig.NeedsIgnoreFileUpdate() {
if err := installer.UpdateIgnoreFile(); err != nil { if err := installer.UpdateIgnoreFile(); err != nil {
gologger.Warning().Msgf("failed to update nuclei ignore file: %s\n", err) runner.Logger.Warning().Msgf("failed to update nuclei ignore file: %s\n", err)
} }
} }
@ -152,7 +162,7 @@ func New(options *types.Options) (*Runner, error) {
// we automatically check for updates unless explicitly disabled // we automatically check for updates unless explicitly disabled
// this print statement is only to inform the user that there are no updates // this print statement is only to inform the user that there are no updates
if !config.DefaultConfig.NeedsTemplateUpdate() { if !config.DefaultConfig.NeedsTemplateUpdate() {
gologger.Info().Msgf("No new updates found for nuclei templates") runner.Logger.Info().Msgf("No new updates found for nuclei templates")
} }
// manually trigger update of custom templates // manually trigger update of custom templates
if ctm != nil { if ctm != nil {
@ -161,20 +171,25 @@ func New(options *types.Options) (*Runner, error) {
} }
} }
parser := templates.NewParser() if op, ok := options.Parser.(*templates.Parser); ok {
// Enable passing in an existing parser instance
if options.Validate { // This uses a type assertion to avoid an import loop
parser.ShouldValidate = true runner.parser = op
} else {
parser := templates.NewParser()
if options.Validate {
parser.ShouldValidate = true
}
// TODO: refactor to pass options reference globally without cycles
parser.NoStrictSyntax = options.NoStrictSyntax
runner.parser = parser
} }
// TODO: refactor to pass options reference globally without cycles
parser.NoStrictSyntax = options.NoStrictSyntax
runner.parser = parser
yaml.StrictSyntax = !options.NoStrictSyntax yaml.StrictSyntax = !options.NoStrictSyntax
if options.Headless { if options.Headless {
if engine.MustDisableSandbox() { if engine.MustDisableSandbox() {
gologger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n") runner.Logger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n")
} }
browser, err := engine.New(options) browser, err := engine.New(options)
if err != nil { if err != nil {
@ -226,11 +241,11 @@ func New(options *types.Options) (*Runner, error) {
if options.HttpApiEndpoint != "" { if options.HttpApiEndpoint != "" {
apiServer := httpapi.New(options.HttpApiEndpoint, options) apiServer := httpapi.New(options.HttpApiEndpoint, options)
gologger.Info().Msgf("Listening api endpoint on: %s", options.HttpApiEndpoint) runner.Logger.Info().Msgf("Listening api endpoint on: %s", options.HttpApiEndpoint)
runner.httpApiEndpoint = apiServer runner.httpApiEndpoint = apiServer
go func() { go func() {
if err := apiServer.Start(); err != nil { if err := apiServer.Start(); err != nil {
gologger.Error().Msgf("Failed to start API server: %s", err) runner.Logger.Error().Msgf("Failed to start API server: %s", err)
} }
}() }()
} }
@ -284,7 +299,7 @@ func New(options *types.Options) (*Runner, error) {
// create the resume configuration structure // create the resume configuration structure
resumeCfg := types.NewResumeCfg() resumeCfg := types.NewResumeCfg()
if runner.options.ShouldLoadResume() { if runner.options.ShouldLoadResume() {
gologger.Info().Msg("Resuming from save checkpoint") runner.Logger.Info().Msg("Resuming from save checkpoint")
file, err := os.ReadFile(runner.options.Resume) file, err := os.ReadFile(runner.options.Resume)
if err != nil { if err != nil {
return nil, err return nil, err
@ -326,6 +341,7 @@ func New(options *types.Options) (*Runner, error) {
} }
opts := interactsh.DefaultOptions(runner.output, runner.issuesClient, runner.progress) opts := interactsh.DefaultOptions(runner.output, runner.issuesClient, runner.progress)
opts.Logger = runner.Logger
opts.Debug = runner.options.Debug opts.Debug = runner.options.Debug
opts.NoColor = runner.options.NoColor opts.NoColor = runner.options.NoColor
if options.InteractshURL != "" { if options.InteractshURL != "" {
@ -355,24 +371,20 @@ func New(options *types.Options) (*Runner, error) {
} }
interactshClient, err := interactsh.New(opts) interactshClient, err := interactsh.New(opts)
if err != nil { if err != nil {
gologger.Error().Msgf("Could not create interactsh client: %s", err) runner.Logger.Error().Msgf("Could not create interactsh client: %s", err)
} else { } else {
runner.interactsh = interactshClient runner.interactsh = interactshClient
} }
if options.RateLimitMinute > 0 { if options.RateLimitMinute > 0 {
gologger.Print().Msgf("[%v] %v", aurora.BrightYellow("WRN"), "rate limit per minute is deprecated - use rate-limit-duration") runner.Logger.Print().Msgf("[%v] %v", aurora.BrightYellow("WRN"), "rate limit per minute is deprecated - use rate-limit-duration")
options.RateLimit = options.RateLimitMinute options.RateLimit = options.RateLimitMinute
options.RateLimitDuration = time.Minute options.RateLimitDuration = time.Minute
} }
if options.RateLimit > 0 && options.RateLimitDuration == 0 { if options.RateLimit > 0 && options.RateLimitDuration == 0 {
options.RateLimitDuration = time.Second options.RateLimitDuration = time.Second
} }
if options.RateLimit == 0 && options.RateLimitDuration == 0 { runner.rateLimiter = utils.GetRateLimiter(context.Background(), options.RateLimit, options.RateLimitDuration)
runner.rateLimiter = ratelimit.NewUnlimited(context.Background())
} else {
runner.rateLimiter = ratelimit.New(context.Background(), uint(options.RateLimit), options.RateLimitDuration)
}
if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil { if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil {
runner.tmpDir = tmpDir runner.tmpDir = tmpDir
@ -382,7 +394,7 @@ func New(options *types.Options) (*Runner, error) {
} }
// runStandardEnumeration runs standard enumeration // runStandardEnumeration runs standard enumeration
func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) { func (r *Runner) runStandardEnumeration(executerOpts *protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
if r.options.AutomaticScan { if r.options.AutomaticScan {
return r.executeSmartWorkflowInput(executerOpts, store, engine) return r.executeSmartWorkflowInput(executerOpts, store, engine)
} }
@ -413,7 +425,7 @@ func (r *Runner) Close() {
if r.inputProvider != nil { if r.inputProvider != nil {
r.inputProvider.Close() r.inputProvider.Close()
} }
protocolinit.Close() protocolinit.Close(r.options.ExecutionId)
if r.pprofServer != nil { if r.pprofServer != nil {
r.pprofServer.Stop() r.pprofServer.Stop()
} }
@ -439,23 +451,22 @@ func (r *Runner) setupPDCPUpload(writer output.Writer) output.Writer {
if r.options.ScanID != "" { if r.options.ScanID != "" {
r.options.EnableCloudUpload = true r.options.EnableCloudUpload = true
} }
if !(r.options.EnableCloudUpload || EnableCloudUpload) { if !r.options.EnableCloudUpload && !EnableCloudUpload {
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] Scan results upload to cloud is disabled.", r.colorizer.BrightYellow("WRN")) r.pdcpUploadErrMsg = "Scan results upload to cloud is disabled."
return writer return writer
} }
color := aurora.NewAurora(!r.options.NoColor)
h := &pdcpauth.PDCPCredHandler{} h := &pdcpauth.PDCPCredHandler{}
creds, err := h.GetCreds() creds, err := h.GetCreds()
if err != nil { if err != nil {
if err != pdcpauth.ErrNoCreds && !HideAutoSaveMsg { if err != pdcpauth.ErrNoCreds && !HideAutoSaveMsg {
gologger.Verbose().Msgf("Could not get credentials for cloud upload: %s\n", err) r.Logger.Verbose().Msgf("Could not get credentials for cloud upload: %s\n", err)
} }
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] To view results on Cloud Dashboard, Configure API key from %v", color.BrightYellow("WRN"), pdcpauth.DashBoardURL) r.pdcpUploadErrMsg = fmt.Sprintf("To view results on Cloud Dashboard, configure API key from %v", pdcpauth.DashBoardURL)
return writer return writer
} }
uploadWriter, err := pdcp.NewUploadWriter(context.Background(), creds) uploadWriter, err := pdcp.NewUploadWriter(context.Background(), r.Logger, creds)
if err != nil { if err != nil {
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] PDCP (%v) Auto-Save Failed: %s\n", color.BrightYellow("WRN"), pdcpauth.DashBoardURL, err) r.pdcpUploadErrMsg = fmt.Sprintf("PDCP (%v) Auto-Save Failed: %s\n", pdcpauth.DashBoardURL, err)
return writer return writer
} }
if r.options.ScanID != "" { if r.options.ScanID != "" {
@ -491,6 +502,7 @@ func (r *Runner) RunEnumeration() error {
Parser: r.parser, Parser: r.parser,
TemporaryDirectory: r.tmpDir, TemporaryDirectory: r.tmpDir,
FuzzStatsDB: r.fuzzStats, FuzzStatsDB: r.fuzzStats,
Logger: r.Logger,
} }
dastServer, err := server.New(&server.Options{ dastServer, err := server.New(&server.Options{
Address: r.options.DASTServerAddress, Address: r.options.DASTServerAddress,
@ -532,7 +544,7 @@ func (r *Runner) RunEnumeration() error {
// Create the executor options which will be used throughout the execution // Create the executor options which will be used throughout the execution
// stage by the nuclei engine modules. // stage by the nuclei engine modules.
executorOpts := protocols.ExecutorOptions{ executorOpts := &protocols.ExecutorOptions{
Output: r.output, Output: r.output,
Options: r.options, Options: r.options,
Progress: r.progress, Progress: r.progress,
@ -550,6 +562,8 @@ func (r *Runner) RunEnumeration() error {
Parser: r.parser, Parser: r.parser,
FuzzParamsFrequency: fuzzFreqCache, FuzzParamsFrequency: fuzzFreqCache,
GlobalMatchers: globalmatchers.New(), GlobalMatchers: globalmatchers.New(),
DoNotCache: r.options.DoNotCacheTemplates,
Logger: r.Logger,
} }
if config.DefaultConfig.IsDebugArgEnabled(config.DebugExportURLPattern) { if config.DefaultConfig.IsDebugArgEnabled(config.DebugExportURLPattern) {
@ -558,7 +572,7 @@ func (r *Runner) RunEnumeration() error {
} }
if len(r.options.SecretsFile) > 0 && !r.options.Validate { if len(r.options.SecretsFile) > 0 && !r.options.Validate {
authTmplStore, err := GetAuthTmplStore(*r.options, r.catalog, executorOpts) authTmplStore, err := GetAuthTmplStore(r.options, r.catalog, executorOpts)
if err != nil { if err != nil {
return errors.Wrap(err, "failed to load dynamic auth templates") return errors.Wrap(err, "failed to load dynamic auth templates")
} }
@ -578,8 +592,8 @@ func (r *Runner) RunEnumeration() error {
if r.options.ShouldUseHostError() { if r.options.ShouldUseHostError() {
maxHostError := r.options.MaxHostError maxHostError := r.options.MaxHostError
if r.options.TemplateThreads > maxHostError { if r.options.TemplateThreads > maxHostError {
gologger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", r.colorizer.BrightYellow("WRN")) r.Logger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", r.colorizer.BrightYellow("WRN"))
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", r.options.TemplateThreads) r.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", r.options.TemplateThreads)
maxHostError = r.options.TemplateThreads maxHostError = r.options.TemplateThreads
} }
@ -594,7 +608,7 @@ func (r *Runner) RunEnumeration() error {
executorEngine := core.New(r.options) executorEngine := core.New(r.options)
executorEngine.SetExecuterOptions(executorOpts) executorEngine.SetExecuterOptions(executorOpts)
workflowLoader, err := parsers.NewLoader(&executorOpts) workflowLoader, err := parsers.NewLoader(executorOpts)
if err != nil { if err != nil {
return errors.Wrap(err, "Could not create loader.") return errors.Wrap(err, "Could not create loader.")
} }
@ -633,7 +647,7 @@ func (r *Runner) RunEnumeration() error {
return err return err
} }
if stats.GetValue(templates.SyntaxErrorStats) == 0 && stats.GetValue(templates.SyntaxWarningStats) == 0 && stats.GetValue(templates.RuntimeWarningsStats) == 0 { if stats.GetValue(templates.SyntaxErrorStats) == 0 && stats.GetValue(templates.SyntaxWarningStats) == 0 && stats.GetValue(templates.RuntimeWarningsStats) == 0 {
gologger.Info().Msgf("All templates validated successfully\n") r.Logger.Info().Msgf("All templates validated successfully")
} else { } else {
return errors.New("encountered errors while performing template validation") return errors.New("encountered errors while performing template validation")
} }
@ -655,7 +669,7 @@ func (r *Runner) RunEnumeration() error {
} }
ret := uncover.GetUncoverTargetsFromMetadata(context.TODO(), store.Templates(), r.options.UncoverField, uncoverOpts) ret := uncover.GetUncoverTargetsFromMetadata(context.TODO(), store.Templates(), r.options.UncoverField, uncoverOpts)
for host := range ret { for host := range ret {
_ = r.inputProvider.SetWithExclusions(host) _ = r.inputProvider.SetWithExclusions(r.options.ExecutionId, host)
} }
} }
// display execution info like version , templates used etc // display execution info like version , templates used etc
@ -663,7 +677,7 @@ func (r *Runner) RunEnumeration() error {
// prefetch secrets if enabled // prefetch secrets if enabled
if executorOpts.AuthProvider != nil && r.options.PreFetchSecrets { if executorOpts.AuthProvider != nil && r.options.PreFetchSecrets {
gologger.Info().Msgf("Pre-fetching secrets from authprovider[s]") r.Logger.Info().Msgf("Pre-fetching secrets from authprovider[s]")
if err := executorOpts.AuthProvider.PreFetchSecrets(); err != nil { if err := executorOpts.AuthProvider.PreFetchSecrets(); err != nil {
return errors.Wrap(err, "could not pre-fetch secrets") return errors.Wrap(err, "could not pre-fetch secrets")
} }
@ -697,11 +711,12 @@ func (r *Runner) RunEnumeration() error {
if r.dastServer != nil { if r.dastServer != nil {
go func() { go func() {
if err := r.dastServer.Start(); err != nil { if err := r.dastServer.Start(); err != nil {
gologger.Error().Msgf("could not start dast server: %v", err) r.Logger.Error().Msgf("could not start dast server: %v", err)
} }
}() }()
} }
now := time.Now()
enumeration := false enumeration := false
var results *atomic.Bool var results *atomic.Bool
results, err = r.runStandardEnumeration(executorOpts, store, executorEngine) results, err = r.runStandardEnumeration(executorOpts, store, executorEngine)
@ -725,11 +740,17 @@ func (r *Runner) RunEnumeration() error {
} }
r.fuzzFrequencyCache.Close() r.fuzzFrequencyCache.Close()
r.progress.Stop()
timeTaken := time.Since(now)
// todo: error propagation without canonical straight error check is required by cloud? // todo: error propagation without canonical straight error check is required by cloud?
// use safe dereferencing to avoid potential panics in case of previous unchecked errors // use safe dereferencing to avoid potential panics in case of previous unchecked errors
if v := ptrutil.Safe(results); !v.Load() { if v := ptrutil.Safe(results); !v.Load() {
gologger.Info().Msgf("No results found. Better luck next time!") r.Logger.Info().Msgf("Scan completed in %s. No results found.", shortDur(timeTaken))
} else {
matchCount := r.output.ResultCount()
r.Logger.Info().Msgf("Scan completed in %s. %d matches found.", shortDur(timeTaken), matchCount)
} }
// check if a passive scan was requested but no target was provided // check if a passive scan was requested but no target was provided
if r.options.OfflineHTTP && len(r.options.Targets) == 0 && r.options.TargetsFilePath == "" { if r.options.OfflineHTTP && len(r.options.Targets) == 0 && r.options.TargetsFilePath == "" {
return errors.Wrap(err, "missing required input (http response) to run passive templates") return errors.Wrap(err, "missing required input (http response) to run passive templates")
@ -738,6 +759,24 @@ func (r *Runner) RunEnumeration() error {
return err return err
} }
func shortDur(d time.Duration) string {
if d < time.Minute {
return d.String()
}
// Truncate to the nearest minute
d = d.Truncate(time.Minute)
s := d.String()
if strings.HasSuffix(s, "m0s") {
s = s[:len(s)-2]
}
if strings.HasSuffix(s, "h0m") {
s = s[:len(s)-2]
}
return s
}
func (r *Runner) isInputNonHTTP() bool { func (r *Runner) isInputNonHTTP() bool {
var nonURLInput bool var nonURLInput bool
r.inputProvider.Iterate(func(value *contextargs.MetaInput) bool { r.inputProvider.Iterate(func(value *contextargs.MetaInput) bool {
@ -750,7 +789,7 @@ func (r *Runner) isInputNonHTTP() bool {
return nonURLInput return nonURLInput
} }
func (r *Runner) executeSmartWorkflowInput(executorOpts protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) { func (r *Runner) executeSmartWorkflowInput(executorOpts *protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
r.progress.Init(r.inputProvider.Count(), 0, 0) r.progress.Init(r.inputProvider.Count(), 0, 0)
service, err := automaticscan.New(automaticscan.Options{ service, err := automaticscan.New(automaticscan.Options{
@ -818,7 +857,7 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
if tmplCount == 0 && workflowCount == 0 { if tmplCount == 0 && workflowCount == 0 {
// if dast flag is used print explicit warning // if dast flag is used print explicit warning
if r.options.DAST { if r.options.DAST {
gologger.DefaultLogger.Print().Msgf("[%v] No DAST templates found", aurora.BrightYellow("WRN")) r.Logger.Print().Msgf("[%v] No DAST templates found", aurora.BrightYellow("WRN"))
} }
stats.ForceDisplayWarning(templates.SkippedCodeTmplTamperedStats) stats.ForceDisplayWarning(templates.SkippedCodeTmplTamperedStats)
} else { } else {
@ -838,38 +877,38 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
return fmt.Sprintf("Current %s version: %v %v", versionType, version, updateutils.GetVersionDescription(version, latestVersion)) return fmt.Sprintf("Current %s version: %v %v", versionType, version, updateutils.GetVersionDescription(version, latestVersion))
} }
gologger.Info().Msgf(versionInfo(config.Version, cfg.LatestNucleiVersion, "nuclei")) gologger.Info().Msg(versionInfo(config.Version, cfg.LatestNucleiVersion, "nuclei"))
gologger.Info().Msgf(versionInfo(cfg.TemplateVersion, cfg.LatestNucleiTemplatesVersion, "nuclei-templates")) gologger.Info().Msg(versionInfo(cfg.TemplateVersion, cfg.LatestNucleiTemplatesVersion, "nuclei-templates"))
if !HideAutoSaveMsg { if !HideAutoSaveMsg {
if r.pdcpUploadErrMsg != "" { if r.pdcpUploadErrMsg != "" {
gologger.Print().Msgf("%s", r.pdcpUploadErrMsg) r.Logger.Warning().Msgf("%s", r.pdcpUploadErrMsg)
} else { } else {
gologger.Info().Msgf("To view results on cloud dashboard, visit %v/scans upon scan completion.", pdcpauth.DashBoardURL) r.Logger.Info().Msgf("To view results on cloud dashboard, visit %v/scans upon scan completion.", pdcpauth.DashBoardURL)
} }
} }
if tmplCount > 0 || workflowCount > 0 { if tmplCount > 0 || workflowCount > 0 {
if len(store.Templates()) > 0 { if len(store.Templates()) > 0 {
gologger.Info().Msgf("New templates added in latest release: %d", len(config.DefaultConfig.GetNewAdditions())) r.Logger.Info().Msgf("New templates added in latest release: %d", len(config.DefaultConfig.GetNewAdditions()))
gologger.Info().Msgf("Templates loaded for current scan: %d", len(store.Templates())) r.Logger.Info().Msgf("Templates loaded for current scan: %d", len(store.Templates()))
} }
if len(store.Workflows()) > 0 { if len(store.Workflows()) > 0 {
gologger.Info().Msgf("Workflows loaded for current scan: %d", len(store.Workflows())) r.Logger.Info().Msgf("Workflows loaded for current scan: %d", len(store.Workflows()))
} }
for k, v := range templates.SignatureStats { for k, v := range templates.SignatureStats {
value := v.Load() value := v.Load()
if value > 0 { if value > 0 {
if k == templates.Unsigned && !r.options.Silent && !config.DefaultConfig.HideTemplateSigWarning { if k == templates.Unsigned && !r.options.Silent && !config.DefaultConfig.HideTemplateSigWarning {
gologger.Print().Msgf("[%v] Loading %d unsigned templates for scan. Use with caution.", r.colorizer.BrightYellow("WRN"), value) r.Logger.Print().Msgf("[%v] Loading %d unsigned templates for scan. Use with caution.", r.colorizer.BrightYellow("WRN"), value)
} else { } else {
gologger.Info().Msgf("Executing %d signed templates from %s", value, k) r.Logger.Info().Msgf("Executing %d signed templates from %s", value, k)
} }
} }
} }
} }
if r.inputProvider.Count() > 0 { if r.inputProvider.Count() > 0 {
gologger.Info().Msgf("Targets loaded for current scan: %d", r.inputProvider.Count()) r.Logger.Info().Msgf("Targets loaded for current scan: %d", r.inputProvider.Count())
} }
} }
@ -896,7 +935,7 @@ func UploadResultsToCloud(options *types.Options) error {
return errors.Wrap(err, "could not get credentials for cloud upload") return errors.Wrap(err, "could not get credentials for cloud upload")
} }
ctx := context.TODO() ctx := context.TODO()
uploadWriter, err := pdcp.NewUploadWriter(ctx, creds) uploadWriter, err := pdcp.NewUploadWriter(ctx, options.Logger, creds)
if err != nil { if err != nil {
return errors.Wrap(err, "could not create upload writer") return errors.Wrap(err, "could not create upload writer")
} }
@ -915,19 +954,21 @@ func UploadResultsToCloud(options *types.Options) error {
if err != nil { if err != nil {
return errors.Wrap(err, "could not open scan upload file") return errors.Wrap(err, "could not open scan upload file")
} }
defer file.Close() defer func() {
_ = file.Close()
}()
gologger.Info().Msgf("Uploading scan results to cloud dashboard from %s", options.ScanUploadFile) options.Logger.Info().Msgf("Uploading scan results to cloud dashboard from %s", options.ScanUploadFile)
dec := json.NewDecoder(file) dec := json.NewDecoder(file)
for dec.More() { for dec.More() {
var r output.ResultEvent var r output.ResultEvent
err := dec.Decode(&r) err := dec.Decode(&r)
if err != nil { if err != nil {
gologger.Warning().Msgf("Could not decode jsonl: %s\n", err) options.Logger.Warning().Msgf("Could not decode jsonl: %s\n", err)
continue continue
} }
if err = uploadWriter.Write(&r); err != nil { if err = uploadWriter.Write(&r); err != nil {
gologger.Warning().Msgf("[%s] failed to upload: %s\n", r.TemplateID, err) options.Logger.Warning().Msgf("[%s] failed to upload: %s\n", r.TemplateID, err)
} }
} }
uploadWriter.Close() uploadWriter.Close()

View File

@ -64,8 +64,8 @@ func TestWalkReflectStructAssignsEnvVars(t *testing.T) {
B: "$VAR_TWO", B: "$VAR_TWO",
}, },
} }
os.Setenv("VAR_EXAMPLE", "value") _ = os.Setenv("VAR_EXAMPLE", "value")
os.Setenv("VAR_TWO", "value2") _ = os.Setenv("VAR_TWO", "value2")
Walk(testStruct, expandEndVars) Walk(testStruct, expandEndVars)
@ -79,9 +79,9 @@ func TestWalkReflectStructHandlesDifferentTypes(t *testing.T) {
B: "$VAR_TWO", B: "$VAR_TWO",
C: "$VAR_THREE", C: "$VAR_THREE",
} }
os.Setenv("VAR_EXAMPLE", "value") _ = os.Setenv("VAR_EXAMPLE", "value")
os.Setenv("VAR_TWO", "2") _ = os.Setenv("VAR_TWO", "2")
os.Setenv("VAR_THREE", "true") _ = os.Setenv("VAR_THREE", "true")
Walk(testStruct, expandEndVars) Walk(testStruct, expandEndVars)
@ -96,9 +96,9 @@ func TestWalkReflectStructEmpty(t *testing.T) {
B: "", B: "",
C: "$VAR_THREE", C: "$VAR_THREE",
} }
os.Setenv("VAR_EXAMPLE", "value") _ = os.Setenv("VAR_EXAMPLE", "value")
os.Setenv("VAR_TWO", "2") _ = os.Setenv("VAR_TWO", "2")
os.Setenv("VAR_THREE", "true") _ = os.Setenv("VAR_THREE", "true")
Walk(testStruct, expandEndVars) Walk(testStruct, expandEndVars)
@ -116,7 +116,7 @@ func TestWalkReflectStructWithNoYamlTag(t *testing.T) {
C: "$GITHUB_USER", C: "$GITHUB_USER",
} }
os.Setenv("GITHUB_USER", "testuser") _ = os.Setenv("GITHUB_USER", "testuser")
Walk(test, expandEndVars) Walk(test, expandEndVars)
require.Equal(t, "testuser", test.A) require.Equal(t, "testuser", test.A)
@ -132,9 +132,9 @@ func TestWalkReflectStructHandlesNestedStructs(t *testing.T) {
C: "$VAR_THREE", C: "$VAR_THREE",
}, },
} }
os.Setenv("VAR_EXAMPLE", "value") _ = os.Setenv("VAR_EXAMPLE", "value")
os.Setenv("VAR_TWO", "2") _ = os.Setenv("VAR_TWO", "2")
os.Setenv("VAR_THREE", "true") _ = os.Setenv("VAR_THREE", "true")
Walk(testStruct, expandEndVars) Walk(testStruct, expandEndVars)

View File

@ -12,7 +12,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/templates" "github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
) )
@ -25,7 +24,7 @@ func (r *Runner) logAvailableTemplate(tplPath string) {
panic("not a template") panic("not a template")
} }
if err != nil { if err != nil {
gologger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err) r.Logger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
} else { } else {
r.verboseTemplate(tpl) r.verboseTemplate(tpl)
} }
@ -33,14 +32,14 @@ func (r *Runner) logAvailableTemplate(tplPath string) {
// log available templates for verbose (-vv) // log available templates for verbose (-vv)
func (r *Runner) verboseTemplate(tpl *templates.Template) { func (r *Runner) verboseTemplate(tpl *templates.Template) {
gologger.Print().Msgf("%s\n", templates.TemplateLogMessage(tpl.ID, r.Logger.Print().Msgf("%s\n", templates.TemplateLogMessage(tpl.ID,
types.ToString(tpl.Info.Name), types.ToString(tpl.Info.Name),
tpl.Info.Authors.ToSlice(), tpl.Info.Authors.ToSlice(),
tpl.Info.SeverityHolder.Severity)) tpl.Info.SeverityHolder.Severity))
} }
func (r *Runner) listAvailableStoreTemplates(store *loader.Store) { func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
gologger.Print().Msgf( r.Logger.Print().Msgf(
"\nListing available %v nuclei templates for %v", "\nListing available %v nuclei templates for %v",
config.DefaultConfig.TemplateVersion, config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory, config.DefaultConfig.TemplatesDirectory,
@ -52,20 +51,20 @@ func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
path := tpl.Path path := tpl.Path
tplBody, err := store.ReadTemplateFromURI(path, true) tplBody, err := store.ReadTemplateFromURI(path, true)
if err != nil { if err != nil {
gologger.Error().Msgf("Could not read the template %s: %s", path, err) r.Logger.Error().Msgf("Could not read the template %s: %s", path, err)
continue continue
} }
if colorize { if colorize {
path = aurora.Cyan(tpl.Path).String() path = aurora.Cyan(tpl.Path).String()
tplBody, err = r.highlightTemplate(&tplBody) tplBody, err = r.highlightTemplate(&tplBody)
if err != nil { if err != nil {
gologger.Error().Msgf("Could not highlight the template %s: %s", tpl.Path, err) r.Logger.Error().Msgf("Could not highlight the template %s: %s", tpl.Path, err)
continue continue
} }
} }
gologger.Silent().Msgf("Template: %s\n\n%s", path, tplBody) r.Logger.Print().Msgf("Template: %s\n\n%s", path, tplBody)
} else { } else {
gologger.Silent().Msgf("%s\n", strings.TrimPrefix(tpl.Path, config.DefaultConfig.TemplatesDirectory+string(filepath.Separator))) r.Logger.Print().Msgf("%s\n", strings.TrimPrefix(tpl.Path, config.DefaultConfig.TemplatesDirectory+string(filepath.Separator)))
} }
} else { } else {
r.verboseTemplate(tpl) r.verboseTemplate(tpl)
@ -74,7 +73,7 @@ func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
} }
func (r *Runner) listAvailableStoreTags(store *loader.Store) { func (r *Runner) listAvailableStoreTags(store *loader.Store) {
gologger.Print().Msgf( r.Logger.Print().Msgf(
"\nListing available %v nuclei tags for %v", "\nListing available %v nuclei tags for %v",
config.DefaultConfig.TemplateVersion, config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory, config.DefaultConfig.TemplatesDirectory,
@ -100,9 +99,9 @@ func (r *Runner) listAvailableStoreTags(store *loader.Store) {
for _, tag := range tagsList { for _, tag := range tagsList {
if r.options.JSONL { if r.options.JSONL {
marshalled, _ := jsoniter.Marshal(tag) marshalled, _ := jsoniter.Marshal(tag)
gologger.Silent().Msgf("%s\n", string(marshalled)) r.Logger.Debug().Msgf("%s", string(marshalled))
} else { } else {
gologger.Silent().Msgf("%s (%d)\n", tag.Key, tag.Value) r.Logger.Debug().Msgf("%s (%d)", tag.Key, tag.Value)
} }
} }
} }

View File

@ -41,7 +41,7 @@ type nucleiExecutor struct {
engine *core.Engine engine *core.Engine
store *loader.Store store *loader.Store
options *NucleiExecutorOptions options *NucleiExecutorOptions
executorOpts protocols.ExecutorOptions executorOpts *protocols.ExecutorOptions
} }
type NucleiExecutorOptions struct { type NucleiExecutorOptions struct {
@ -58,6 +58,7 @@ type NucleiExecutorOptions struct {
Colorizer aurora.Aurora Colorizer aurora.Aurora
Parser parser.Parser Parser parser.Parser
TemporaryDirectory string TemporaryDirectory string
Logger *gologger.Logger
} }
func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) { func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
@ -66,7 +67,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
// Create the executor options which will be used throughout the execution // Create the executor options which will be used throughout the execution
// stage by the nuclei engine modules. // stage by the nuclei engine modules.
executorOpts := protocols.ExecutorOptions{ executorOpts := &protocols.ExecutorOptions{
Output: opts.Output, Output: opts.Output,
Options: opts.Options, Options: opts.Options,
Progress: opts.Progress, Progress: opts.Progress,
@ -85,6 +86,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
FuzzParamsFrequency: fuzzFreqCache, FuzzParamsFrequency: fuzzFreqCache,
GlobalMatchers: globalmatchers.New(), GlobalMatchers: globalmatchers.New(),
FuzzStatsDB: opts.FuzzStatsDB, FuzzStatsDB: opts.FuzzStatsDB,
Logger: opts.Logger,
} }
if opts.Options.ShouldUseHostError() { if opts.Options.ShouldUseHostError() {
@ -93,7 +95,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
maxHostError = 100 // auto adjust for fuzzings maxHostError = 100 // auto adjust for fuzzings
} }
if opts.Options.TemplateThreads > maxHostError { if opts.Options.TemplateThreads > maxHostError {
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads) opts.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads)
maxHostError = opts.Options.TemplateThreads maxHostError = opts.Options.TemplateThreads
} }
@ -107,7 +109,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
executorEngine := core.New(opts.Options) executorEngine := core.New(opts.Options)
executorEngine.SetExecuterOptions(executorOpts) executorEngine.SetExecuterOptions(executorOpts)
workflowLoader, err := parsers.NewLoader(&executorOpts) workflowLoader, err := parsers.NewLoader(executorOpts)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "Could not create loader options.") return nil, errors.Wrap(err, "Could not create loader options.")
} }

View File

@ -112,7 +112,7 @@ func New(options *Options) (*DASTServer, error) {
func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) { func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) {
server := &DASTServer{ server := &DASTServer{
nucleiExecutor: &nucleiExecutor{ nucleiExecutor: &nucleiExecutor{
executorOpts: protocols.ExecutorOptions{ executorOpts: &protocols.ExecutorOptions{
FuzzStatsDB: fuzzStatsDB, FuzzStatsDB: fuzzStatsDB,
}, },
}, },
@ -125,7 +125,7 @@ func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) {
func (s *DASTServer) Close() { func (s *DASTServer) Close() {
s.nucleiExecutor.Close() s.nucleiExecutor.Close()
s.echo.Close() _ = s.echo.Close()
s.tasksPool.StopAndWaitFor(1 * time.Minute) s.tasksPool.StopAndWaitFor(1 * time.Minute)
} }

View File

@ -7,7 +7,8 @@ import (
"github.com/projectdiscovery/goflags" "github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/ratelimit" "github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/utils/errkit"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider" "github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog" "github.com/projectdiscovery/nuclei/v3/pkg/catalog"
@ -19,6 +20,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/utils/vardump" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/utils/vardump"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types" "github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
pkgtypes "github.com/projectdiscovery/nuclei/v3/pkg/types"
) )
// TemplateSources contains template sources // TemplateSources contains template sources
@ -101,7 +103,7 @@ type InteractshOpts interactsh.Options
func WithInteractshOptions(opts InteractshOpts) NucleiSDKOptions { func WithInteractshOptions(opts InteractshOpts) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithInteractshOptions") return errkit.Wrap(ErrOptionsNotSupported, "WithInteractshOptions")
} }
optsPtr := &opts optsPtr := &opts
e.interactshOpts = (*interactsh.Options)(optsPtr) e.interactshOpts = (*interactsh.Options)(optsPtr)
@ -179,7 +181,7 @@ func WithGlobalRateLimitCtx(ctx context.Context, maxTokens int, duration time.Du
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
e.opts.RateLimit = maxTokens e.opts.RateLimit = maxTokens
e.opts.RateLimitDuration = duration e.opts.RateLimitDuration = duration
e.rateLimiter = ratelimit.New(ctx, uint(e.opts.RateLimit), e.opts.RateLimitDuration) e.rateLimiter = utils.GetRateLimiter(ctx, e.opts.RateLimit, e.opts.RateLimitDuration)
return nil return nil
} }
} }
@ -205,7 +207,7 @@ func EnableHeadlessWithOpts(hopts *HeadlessOpts) NucleiSDKOptions {
e.opts.UseInstalledChrome = hopts.UseChrome e.opts.UseInstalledChrome = hopts.UseChrome
} }
if engine.MustDisableSandbox() { if engine.MustDisableSandbox() {
gologger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n") e.Logger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox")
} }
browser, err := engine.New(e.opts) browser, err := engine.New(e.opts)
if err != nil { if err != nil {
@ -228,7 +230,7 @@ type StatsOptions struct {
func EnableStatsWithOpts(opts StatsOptions) NucleiSDKOptions { func EnableStatsWithOpts(opts StatsOptions) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("EnableStatsWithOpts") return errkit.Wrap(ErrOptionsNotSupported, "EnableStatsWithOpts")
} }
if opts.Interval == 0 { if opts.Interval == 0 {
opts.Interval = 5 //sec opts.Interval = 5 //sec
@ -256,7 +258,7 @@ type VerbosityOptions struct {
func WithVerbosity(opts VerbosityOptions) NucleiSDKOptions { func WithVerbosity(opts VerbosityOptions) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithVerbosity") return errkit.Wrap(ErrOptionsNotSupported, "WithVerbosity")
} }
e.opts.Verbose = opts.Verbose e.opts.Verbose = opts.Verbose
e.opts.Silent = opts.Silent e.opts.Silent = opts.Silent
@ -289,15 +291,15 @@ type NetworkConfig struct {
func WithNetworkConfig(opts NetworkConfig) NucleiSDKOptions { func WithNetworkConfig(opts NetworkConfig) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithNetworkConfig") return errkit.Wrap(ErrOptionsNotSupported, "WithNetworkConfig")
} }
e.opts.NoHostErrors = opts.DisableMaxHostErr e.opts.NoHostErrors = opts.DisableMaxHostErr
e.opts.MaxHostError = opts.MaxHostError e.opts.MaxHostError = opts.MaxHostError
if e.opts.ShouldUseHostError() { if e.opts.ShouldUseHostError() {
maxHostError := opts.MaxHostError maxHostError := opts.MaxHostError
if e.opts.TemplateThreads > maxHostError { if e.opts.TemplateThreads > maxHostError {
gologger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", e.executerOpts.Colorizer.BrightYellow("WRN")) e.Logger.Warning().Msg("The concurrency value is higher than max-host-error")
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", e.opts.TemplateThreads) e.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", e.opts.TemplateThreads)
maxHostError = e.opts.TemplateThreads maxHostError = e.opts.TemplateThreads
e.opts.MaxHostError = maxHostError e.opts.MaxHostError = maxHostError
} }
@ -320,7 +322,7 @@ func WithNetworkConfig(opts NetworkConfig) NucleiSDKOptions {
func WithProxy(proxy []string, proxyInternalRequests bool) NucleiSDKOptions { func WithProxy(proxy []string, proxyInternalRequests bool) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithProxy") return errkit.Wrap(ErrOptionsNotSupported, "WithProxy")
} }
e.opts.Proxy = proxy e.opts.Proxy = proxy
e.opts.ProxyInternal = proxyInternalRequests e.opts.ProxyInternal = proxyInternalRequests
@ -345,7 +347,7 @@ type OutputWriter output.Writer
func UseOutputWriter(writer OutputWriter) NucleiSDKOptions { func UseOutputWriter(writer OutputWriter) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("UseOutputWriter") return errkit.Wrap(ErrOptionsNotSupported, "UseOutputWriter")
} }
e.customWriter = writer e.customWriter = writer
return nil return nil
@ -360,7 +362,7 @@ type StatsWriter progress.Progress
func UseStatsWriter(writer StatsWriter) NucleiSDKOptions { func UseStatsWriter(writer StatsWriter) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("UseStatsWriter") return errkit.Wrap(ErrOptionsNotSupported, "UseStatsWriter")
} }
e.customProgress = writer e.customProgress = writer
return nil return nil
@ -374,7 +376,7 @@ func UseStatsWriter(writer StatsWriter) NucleiSDKOptions {
func WithTemplateUpdateCallback(disableTemplatesAutoUpgrade bool, callback func(newVersion string)) NucleiSDKOptions { func WithTemplateUpdateCallback(disableTemplatesAutoUpgrade bool, callback func(newVersion string)) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithTemplateUpdateCallback") return errkit.Wrap(ErrOptionsNotSupported, "WithTemplateUpdateCallback")
} }
e.disableTemplatesAutoUpgrade = disableTemplatesAutoUpgrade e.disableTemplatesAutoUpgrade = disableTemplatesAutoUpgrade
e.onUpdateAvailableCallback = callback e.onUpdateAvailableCallback = callback
@ -386,7 +388,7 @@ func WithTemplateUpdateCallback(disableTemplatesAutoUpgrade bool, callback func(
func WithSandboxOptions(allowLocalFileAccess bool, restrictLocalNetworkAccess bool) NucleiSDKOptions { func WithSandboxOptions(allowLocalFileAccess bool, restrictLocalNetworkAccess bool) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
if e.mode == threadSafe { if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithSandboxOptions") return errkit.Wrap(ErrOptionsNotSupported, "WithSandboxOptions")
} }
e.opts.AllowLocalFileAccess = allowLocalFileAccess e.opts.AllowLocalFileAccess = allowLocalFileAccess
e.opts.RestrictLocalNetworkAccess = restrictLocalNetworkAccess e.opts.RestrictLocalNetworkAccess = restrictLocalNetworkAccess
@ -419,6 +421,14 @@ func EnableGlobalMatchersTemplates() NucleiSDKOptions {
} }
} }
// DisableTemplateCache disables template caching
func DisableTemplateCache() NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts.DoNotCacheTemplates = true
return nil
}
}
// EnableFileTemplates allows loading/executing file protocol templates // EnableFileTemplates allows loading/executing file protocol templates
func EnableFileTemplates() NucleiSDKOptions { func EnableFileTemplates() NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
@ -463,6 +473,14 @@ func EnablePassiveMode() NucleiSDKOptions {
} }
} }
// EnableMatcherStatus allows enabling matcher status
func EnableMatcherStatus() NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts.MatcherStatus = true
return nil
}
}
// WithAuthProvider allows setting a custom authprovider implementation // WithAuthProvider allows setting a custom authprovider implementation
func WithAuthProvider(provider authprovider.AuthProvider) NucleiSDKOptions { func WithAuthProvider(provider authprovider.AuthProvider) NucleiSDKOptions {
return func(e *NucleiEngine) error { return func(e *NucleiEngine) error {
@ -519,3 +537,25 @@ func WithResumeFile(file string) NucleiSDKOptions {
return nil return nil
} }
} }
// WithLogger allows setting a shared gologger instance
func WithLogger(logger *gologger.Logger) NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.Logger = logger
if e.opts != nil {
e.opts.Logger = logger
}
if e.executerOpts != nil {
e.executerOpts.Logger = logger
}
return nil
}
}
// WithOptions sets all options at once
func WithOptions(opts *pkgtypes.Options) NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts = opts
return nil
}
}

View File

@ -12,8 +12,9 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/output" "github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols" "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/ratelimit" "github.com/projectdiscovery/nuclei/v3/pkg/utils"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
"github.com/rs/xid"
) )
// unsafeOptions are those nuclei objects/instances/types // unsafeOptions are those nuclei objects/instances/types
@ -21,14 +22,14 @@ import (
// hence they are ephemeral and are created on every ExecuteNucleiWithOpts invocation // hence they are ephemeral and are created on every ExecuteNucleiWithOpts invocation
// in ThreadSafeNucleiEngine // in ThreadSafeNucleiEngine
type unsafeOptions struct { type unsafeOptions struct {
executerOpts protocols.ExecutorOptions executerOpts *protocols.ExecutorOptions
engine *core.Engine engine *core.Engine
} }
// createEphemeralObjects creates ephemeral nuclei objects/instances/types // createEphemeralObjects creates ephemeral nuclei objects/instances/types
func createEphemeralObjects(ctx context.Context, base *NucleiEngine, opts *types.Options) (*unsafeOptions, error) { func createEphemeralObjects(ctx context.Context, base *NucleiEngine, opts *types.Options) (*unsafeOptions, error) {
u := &unsafeOptions{} u := &unsafeOptions{}
u.executerOpts = protocols.ExecutorOptions{ u.executerOpts = &protocols.ExecutorOptions{
Output: base.customWriter, Output: base.customWriter,
Options: opts, Options: opts,
Progress: base.customProgress, Progress: base.customProgress,
@ -52,11 +53,7 @@ func createEphemeralObjects(ctx context.Context, base *NucleiEngine, opts *types
if opts.RateLimit > 0 && opts.RateLimitDuration == 0 { if opts.RateLimit > 0 && opts.RateLimitDuration == 0 {
opts.RateLimitDuration = time.Second opts.RateLimitDuration = time.Second
} }
if opts.RateLimit == 0 && opts.RateLimitDuration == 0 { u.executerOpts.RateLimiter = utils.GetRateLimiter(ctx, opts.RateLimit, opts.RateLimitDuration)
u.executerOpts.RateLimiter = ratelimit.NewUnlimited(ctx)
} else {
u.executerOpts.RateLimiter = ratelimit.New(ctx, uint(opts.RateLimit), opts.RateLimitDuration)
}
u.engine = core.New(opts) u.engine = core.New(opts)
u.engine.SetExecuterOptions(u.executerOpts) u.engine.SetExecuterOptions(u.executerOpts)
return u, nil return u, nil
@ -88,9 +85,11 @@ type ThreadSafeNucleiEngine struct {
// whose methods are thread-safe and can be used concurrently // whose methods are thread-safe and can be used concurrently
// Note: Non-thread-safe methods start with Global prefix // Note: Non-thread-safe methods start with Global prefix
func NewThreadSafeNucleiEngineCtx(ctx context.Context, opts ...NucleiSDKOptions) (*ThreadSafeNucleiEngine, error) { func NewThreadSafeNucleiEngineCtx(ctx context.Context, opts ...NucleiSDKOptions) (*ThreadSafeNucleiEngine, error) {
defaultOptions := types.DefaultOptions()
defaultOptions.ExecutionId = xid.New().String()
// default options // default options
e := &NucleiEngine{ e := &NucleiEngine{
opts: types.DefaultOptions(), opts: defaultOptions,
mode: threadSafe, mode: threadSafe,
} }
for _, option := range opts { for _, option := range opts {
@ -125,8 +124,8 @@ func (e *ThreadSafeNucleiEngine) GlobalResultCallback(callback func(event *outpu
// by invoking this method with different options and targets // by invoking this method with different options and targets
// Note: Not all options are thread-safe. this method will throw error if you try to use non-thread-safe options // Note: Not all options are thread-safe. this method will throw error if you try to use non-thread-safe options
func (e *ThreadSafeNucleiEngine) ExecuteNucleiWithOptsCtx(ctx context.Context, targets []string, opts ...NucleiSDKOptions) error { func (e *ThreadSafeNucleiEngine) ExecuteNucleiWithOptsCtx(ctx context.Context, targets []string, opts ...NucleiSDKOptions) error {
baseOpts := *e.eng.opts baseOpts := e.eng.opts.Copy()
tmpEngine := &NucleiEngine{opts: &baseOpts, mode: threadSafe} tmpEngine := &NucleiEngine{opts: baseOpts, mode: threadSafe}
for _, option := range opts { for _, option := range opts {
if err := option(tmpEngine); err != nil { if err := option(tmpEngine); err != nil {
return err return err
@ -142,19 +141,19 @@ func (e *ThreadSafeNucleiEngine) ExecuteNucleiWithOptsCtx(ctx context.Context, t
defer closeEphemeralObjects(unsafeOpts) defer closeEphemeralObjects(unsafeOpts)
// load templates // load templates
workflowLoader, err := workflow.NewLoader(&unsafeOpts.executerOpts) workflowLoader, err := workflow.NewLoader(unsafeOpts.executerOpts)
if err != nil { if err != nil {
return errorutil.New("Could not create workflow loader: %s\n", err) return errkit.Wrapf(err, "Could not create workflow loader: %s", err)
} }
unsafeOpts.executerOpts.WorkflowLoader = workflowLoader unsafeOpts.executerOpts.WorkflowLoader = workflowLoader
store, err := loader.New(loader.NewConfig(tmpEngine.opts, e.eng.catalog, unsafeOpts.executerOpts)) store, err := loader.New(loader.NewConfig(tmpEngine.opts, e.eng.catalog, unsafeOpts.executerOpts))
if err != nil { if err != nil {
return errorutil.New("Could not create loader client: %s\n", err) return errkit.Wrapf(err, "Could not create loader client: %s", err)
} }
store.Load() store.Load()
inputProvider := provider.NewSimpleInputProviderWithUrls(targets...) inputProvider := provider.NewSimpleInputProviderWithUrls(e.eng.opts.ExecutionId, targets...)
if len(store.Templates()) == 0 && len(store.Workflows()) == 0 { if len(store.Templates()) == 0 && len(store.Workflows()) == 0 {
return ErrNoTemplatesAvailable return ErrNoTemplatesAvailable

View File

@ -5,7 +5,9 @@ import (
"bytes" "bytes"
"context" "context"
"io" "io"
"sync"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider" "github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog" "github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
@ -26,7 +28,8 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/ratelimit" "github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
"github.com/rs/xid"
) )
// NucleiSDKOptions contains options for nuclei SDK // NucleiSDKOptions contains options for nuclei SDK
@ -34,13 +37,13 @@ type NucleiSDKOptions func(e *NucleiEngine) error
var ( var (
// ErrNotImplemented is returned when a feature is not implemented // ErrNotImplemented is returned when a feature is not implemented
ErrNotImplemented = errorutil.New("Not implemented") ErrNotImplemented = errkit.New("Not implemented")
// ErrNoTemplatesAvailable is returned when no templates are available to execute // ErrNoTemplatesAvailable is returned when no templates are available to execute
ErrNoTemplatesAvailable = errorutil.New("No templates available") ErrNoTemplatesAvailable = errkit.New("No templates available")
// ErrNoTargetsAvailable is returned when no targets are available to scan // ErrNoTargetsAvailable is returned when no targets are available to scan
ErrNoTargetsAvailable = errorutil.New("No targets available") ErrNoTargetsAvailable = errkit.New("No targets available")
// ErrOptionsNotSupported is returned when an option is not supported in thread safe mode // ErrOptionsNotSupported is returned when an option is not supported in thread safe mode
ErrOptionsNotSupported = errorutil.NewWithFmt("Option %v not supported in thread safe mode") ErrOptionsNotSupported = errkit.New("Option not supported in thread safe mode")
) )
type engineMode uint type engineMode uint
@ -64,6 +67,7 @@ type NucleiEngine struct {
templatesLoaded bool templatesLoaded bool
// unexported core fields // unexported core fields
ctx context.Context
interactshClient *interactsh.Client interactshClient *interactsh.Client
catalog catalog.Catalog catalog catalog.Catalog
rateLimiter *ratelimit.Limiter rateLimiter *ratelimit.Limiter
@ -84,20 +88,23 @@ type NucleiEngine struct {
customWriter output.Writer customWriter output.Writer
customProgress progress.Progress customProgress progress.Progress
rc reporting.Client rc reporting.Client
executerOpts protocols.ExecutorOptions executerOpts *protocols.ExecutorOptions
// Logger instance for the engine
Logger *gologger.Logger
} }
// LoadAllTemplates loads all nuclei template based on given options // LoadAllTemplates loads all nuclei template based on given options
func (e *NucleiEngine) LoadAllTemplates() error { func (e *NucleiEngine) LoadAllTemplates() error {
workflowLoader, err := workflow.NewLoader(&e.executerOpts) workflowLoader, err := workflow.NewLoader(e.executerOpts)
if err != nil { if err != nil {
return errorutil.New("Could not create workflow loader: %s\n", err) return errkit.Wrapf(err, "Could not create workflow loader: %s", err)
} }
e.executerOpts.WorkflowLoader = workflowLoader e.executerOpts.WorkflowLoader = workflowLoader
e.store, err = loader.New(loader.NewConfig(e.opts, e.catalog, e.executerOpts)) e.store, err = loader.New(loader.NewConfig(e.opts, e.catalog, e.executerOpts))
if err != nil { if err != nil {
return errorutil.New("Could not create loader client: %s\n", err) return errkit.Wrapf(err, "Could not create loader client: %s", err)
} }
e.store.Load() e.store.Load()
e.templatesLoaded = true e.templatesLoaded = true
@ -124,9 +131,9 @@ func (e *NucleiEngine) GetWorkflows() []*templates.Template {
func (e *NucleiEngine) LoadTargets(targets []string, probeNonHttp bool) { func (e *NucleiEngine) LoadTargets(targets []string, probeNonHttp bool) {
for _, target := range targets { for _, target := range targets {
if probeNonHttp { if probeNonHttp {
_ = e.inputProvider.SetWithProbe(target, e.httpxClient) _ = e.inputProvider.SetWithProbe(e.opts.ExecutionId, target, e.httpxClient)
} else { } else {
e.inputProvider.Set(target) e.inputProvider.Set(e.opts.ExecutionId, target)
} }
} }
} }
@ -136,9 +143,9 @@ func (e *NucleiEngine) LoadTargetsFromReader(reader io.Reader, probeNonHttp bool
buff := bufio.NewScanner(reader) buff := bufio.NewScanner(reader)
for buff.Scan() { for buff.Scan() {
if probeNonHttp { if probeNonHttp {
_ = e.inputProvider.SetWithProbe(buff.Text(), e.httpxClient) _ = e.inputProvider.SetWithProbe(e.opts.ExecutionId, buff.Text(), e.httpxClient)
} else { } else {
e.inputProvider.Set(buff.Text()) e.inputProvider.Set(e.opts.ExecutionId, buff.Text())
} }
} }
} }
@ -161,7 +168,7 @@ func (e *NucleiEngine) LoadTargetsWithHttpData(filePath string, filemode string)
// GetExecuterOptions returns the nuclei executor options // GetExecuterOptions returns the nuclei executor options
func (e *NucleiEngine) GetExecuterOptions() *protocols.ExecutorOptions { func (e *NucleiEngine) GetExecuterOptions() *protocols.ExecutorOptions {
return &e.executerOpts return e.executerOpts
} }
// ParseTemplate parses a template from given data // ParseTemplate parses a template from given data
@ -229,7 +236,7 @@ func (e *NucleiEngine) closeInternal() {
// Close all resources used by nuclei engine // Close all resources used by nuclei engine
func (e *NucleiEngine) Close() { func (e *NucleiEngine) Close() {
e.closeInternal() e.closeInternal()
protocolinit.Close() protocolinit.Close(e.opts.ExecutionId)
} }
// ExecuteCallbackWithCtx executes templates on targets and calls callback on each result(only if results are found) // ExecuteCallbackWithCtx executes templates on targets and calls callback on each result(only if results are found)
@ -246,9 +253,9 @@ func (e *NucleiEngine) ExecuteCallbackWithCtx(ctx context.Context, callback ...f
} }
filtered := []func(event *output.ResultEvent){} filtered := []func(event *output.ResultEvent){}
for _, callback := range callback { for _, cb := range callback {
if callback != nil { if cb != nil {
filtered = append(filtered, callback) filtered = append(filtered, cb)
} }
} }
e.resultCallbacks = append(e.resultCallbacks, filtered...) e.resultCallbacks = append(e.resultCallbacks, filtered...)
@ -258,15 +265,32 @@ func (e *NucleiEngine) ExecuteCallbackWithCtx(ctx context.Context, callback ...f
return ErrNoTemplatesAvailable return ErrNoTemplatesAvailable
} }
_ = e.engine.ExecuteScanWithOpts(ctx, templatesAndWorkflows, e.inputProvider, false) var wg sync.WaitGroup
defer e.engine.WorkPool().Wait() wg.Add(1)
go func() {
defer wg.Done()
_ = e.engine.ExecuteScanWithOpts(ctx, templatesAndWorkflows, e.inputProvider, false)
}()
// wait for context to be cancelled
select {
case <-ctx.Done():
<-wait(&wg) // wait for scan to finish
return ctx.Err()
case <-wait(&wg):
// scan finished
}
return nil return nil
} }
// ExecuteWithCallback is same as ExecuteCallbackWithCtx but with default context // ExecuteWithCallback is same as ExecuteCallbackWithCtx but with default context
// Note this is deprecated and will be removed in future major release // Note this is deprecated and will be removed in future major release
func (e *NucleiEngine) ExecuteWithCallback(callback ...func(event *output.ResultEvent)) error { func (e *NucleiEngine) ExecuteWithCallback(callback ...func(event *output.ResultEvent)) error {
return e.ExecuteCallbackWithCtx(context.Background(), callback...) ctx := context.Background()
if e.ctx != nil {
ctx = e.ctx
}
return e.ExecuteCallbackWithCtx(ctx, callback...)
} }
// Options return nuclei Type Options // Options return nuclei Type Options
@ -287,9 +311,12 @@ func (e *NucleiEngine) Store() *loader.Store {
// NewNucleiEngineCtx creates a new nuclei engine instance with given context // NewNucleiEngineCtx creates a new nuclei engine instance with given context
func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*NucleiEngine, error) { func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*NucleiEngine, error) {
// default options // default options
defaultOptions := types.DefaultOptions()
defaultOptions.ExecutionId = xid.New().String()
e := &NucleiEngine{ e := &NucleiEngine{
opts: types.DefaultOptions(), opts: defaultOptions,
mode: singleInstance, mode: singleInstance,
ctx: ctx,
} }
for _, option := range options { for _, option := range options {
if err := option(e); err != nil { if err := option(e); err != nil {
@ -306,3 +333,18 @@ func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*Nucl
func NewNucleiEngine(options ...NucleiSDKOptions) (*NucleiEngine, error) { func NewNucleiEngine(options ...NucleiSDKOptions) (*NucleiEngine, error) {
return NewNucleiEngineCtx(context.Background(), options...) return NewNucleiEngineCtx(context.Background(), options...)
} }
// GetParser returns the template parser with cache
func (e *NucleiEngine) GetParser() *templates.Parser {
return e.parser
}
// wait for a waitgroup to finish
func wait(wg *sync.WaitGroup) <-chan struct{} {
ch := make(chan struct{})
go func() {
defer close(ch)
wg.Wait()
}()
return ch
}

View File

@ -8,6 +8,7 @@ import (
"time" "time"
"github.com/projectdiscovery/nuclei/v3/pkg/input" "github.com/projectdiscovery/nuclei/v3/pkg/input"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/logrusorgru/aurora" "github.com/logrusorgru/aurora"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -29,7 +30,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolinit" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolinit"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/http/httpclientpool" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/http/httpclientpool"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/projectdiscovery/nuclei/v3/pkg/templates" "github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils" "github.com/projectdiscovery/nuclei/v3/pkg/testutils"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
@ -37,8 +37,6 @@ import (
"github.com/projectdiscovery/ratelimit" "github.com/projectdiscovery/ratelimit"
) )
var sharedInit *sync.Once
// applyRequiredDefaults to options // applyRequiredDefaults to options
func (e *NucleiEngine) applyRequiredDefaults(ctx context.Context) { func (e *NucleiEngine) applyRequiredDefaults(ctx context.Context) {
mockoutput := testutils.NewMockOutputWriter(e.opts.OmitTemplate) mockoutput := testutils.NewMockOutputWriter(e.opts.OmitTemplate)
@ -98,27 +96,39 @@ func (e *NucleiEngine) applyRequiredDefaults(ctx context.Context) {
// init // init
func (e *NucleiEngine) init(ctx context.Context) error { func (e *NucleiEngine) init(ctx context.Context) error {
// Set a default logger if one isn't provided in the options
if e.opts.Logger != nil {
e.Logger = e.opts.Logger
} else {
e.opts.Logger = &gologger.Logger{}
}
e.Logger = e.opts.Logger
if e.opts.Verbose { if e.opts.Verbose {
gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose) e.Logger.SetMaxLevel(levels.LevelVerbose)
} else if e.opts.Debug { } else if e.opts.Debug {
gologger.DefaultLogger.SetMaxLevel(levels.LevelDebug) e.Logger.SetMaxLevel(levels.LevelDebug)
} else if e.opts.Silent { } else if e.opts.Silent {
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent) e.Logger.SetMaxLevel(levels.LevelSilent)
} }
if err := runner.ValidateOptions(e.opts); err != nil { if err := runner.ValidateOptions(e.opts); err != nil {
return err return err
} }
e.parser = templates.NewParser() if e.opts.Parser != nil {
if op, ok := e.opts.Parser.(*templates.Parser); ok {
if sharedInit == nil || protocolstate.ShouldInit() { e.parser = op
sharedInit = &sync.Once{} }
} }
sharedInit.Do(func() { if e.parser == nil {
e.parser = templates.NewParser()
}
if protocolstate.ShouldInit(e.opts.ExecutionId) {
_ = protocolinit.Init(e.opts) _ = protocolinit.Init(e.opts)
}) }
if e.opts.ProxyInternal && e.opts.AliveHttpProxy != "" || e.opts.AliveSocksProxy != "" { if e.opts.ProxyInternal && e.opts.AliveHttpProxy != "" || e.opts.AliveSocksProxy != "" {
httpclient, err := httpclientpool.Get(e.opts, &httpclientpool.Configuration{}) httpclient, err := httpclientpool.Get(e.opts, &httpclientpool.Configuration{})
@ -160,7 +170,7 @@ func (e *NucleiEngine) init(ctx context.Context) error {
e.catalog = disk.NewCatalog(config.DefaultConfig.TemplatesDirectory) e.catalog = disk.NewCatalog(config.DefaultConfig.TemplatesDirectory)
} }
e.executerOpts = protocols.ExecutorOptions{ e.executerOpts = &protocols.ExecutorOptions{
Output: e.customWriter, Output: e.customWriter,
Options: e.opts, Options: e.opts,
Progress: e.customProgress, Progress: e.customProgress,
@ -173,12 +183,13 @@ func (e *NucleiEngine) init(ctx context.Context) error {
Browser: e.browserInstance, Browser: e.browserInstance,
Parser: e.parser, Parser: e.parser,
InputHelper: input.NewHelper(), InputHelper: input.NewHelper(),
Logger: e.opts.Logger,
} }
if e.opts.ShouldUseHostError() && e.hostErrCache != nil { if e.opts.ShouldUseHostError() && e.hostErrCache != nil {
e.executerOpts.HostErrorsCache = e.hostErrCache e.executerOpts.HostErrorsCache = e.hostErrCache
} }
if len(e.opts.SecretsFile) > 0 { if len(e.opts.SecretsFile) > 0 {
authTmplStore, err := runner.GetAuthTmplStore(*e.opts, e.catalog, e.executerOpts) authTmplStore, err := runner.GetAuthTmplStore(e.opts, e.catalog, e.executerOpts)
if err != nil { if err != nil {
return errors.Wrap(err, "failed to load dynamic auth templates") return errors.Wrap(err, "failed to load dynamic auth templates")
} }
@ -220,6 +231,25 @@ func (e *NucleiEngine) init(ctx context.Context) error {
} }
} }
// Handle the case where the user passed an existing parser that we can use as a cache
if e.opts.Parser != nil {
if cachedParser, ok := e.opts.Parser.(*templates.Parser); ok {
e.parser = cachedParser
e.opts.Parser = cachedParser
e.executerOpts.Parser = cachedParser
e.executerOpts.Options.Parser = cachedParser
}
}
// Create a new parser if necessary
if e.parser == nil {
op := templates.NewParser()
e.parser = op
e.opts.Parser = op
e.executerOpts.Parser = op
e.executerOpts.Options.Parser = op
}
e.engine = core.New(e.opts) e.engine = core.New(e.opts)
e.engine.SetExecuterOptions(e.executerOpts) e.engine.SetExecuterOptions(e.executerOpts)

37
lib/sdk_test.go Normal file
View File

@ -0,0 +1,37 @@
package nuclei_test
import (
"context"
"log"
"testing"
"time"
nuclei "github.com/projectdiscovery/nuclei/v3/lib"
"github.com/stretchr/testify/require"
)
func TestContextCancelNucleiEngine(t *testing.T) {
// create nuclei engine with options
ctx, cancel := context.WithCancel(context.Background())
ne, err := nuclei.NewNucleiEngineCtx(ctx,
nuclei.WithTemplateFilters(nuclei.TemplateFilters{Tags: []string{"oast"}}),
nuclei.EnableStatsWithOpts(nuclei.StatsOptions{MetricServerPort: 0}),
)
require.NoError(t, err, "could not create nuclei engine")
go func() {
time.Sleep(time.Second * 2)
cancel()
log.Println("Test: context cancelled")
}()
// load targets and optionally probe non http/https targets
ne.LoadTargets([]string{"http://honey.scanme.sh"}, false)
// when callback is nil it nuclei will print JSON output to stdout
err = ne.ExecuteWithCallback(nil)
if err != nil {
// we expect a context cancellation error
require.ErrorIs(t, err, context.Canceled, "was expecting context cancellation error")
}
defer ne.Close()
}

View File

@ -3,12 +3,12 @@ package authx
import ( import (
"fmt" "fmt"
"strings" "strings"
"sync" "sync/atomic"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/replacer" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/replacer"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json" "github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
sliceutil "github.com/projectdiscovery/utils/slice" sliceutil "github.com/projectdiscovery/utils/slice"
) )
@ -30,8 +30,8 @@ type Dynamic struct {
Input string `json:"input" yaml:"input"` // (optional) target for the dynamic secret Input string `json:"input" yaml:"input"` // (optional) target for the dynamic secret
Extracted map[string]interface{} `json:"-" yaml:"-"` // extracted values from the dynamic secret Extracted map[string]interface{} `json:"-" yaml:"-"` // extracted values from the dynamic secret
fetchCallback LazyFetchSecret `json:"-" yaml:"-"` fetchCallback LazyFetchSecret `json:"-" yaml:"-"`
m *sync.Mutex `json:"-" yaml:"-"` // mutex for lazy fetch fetched *atomic.Bool `json:"-" yaml:"-"` // atomic flag to check if the secret has been fetched
fetched bool `json:"-" yaml:"-"` // flag to check if the secret has been fetched fetching *atomic.Bool `json:"-" yaml:"-"` // atomic flag to prevent recursive fetch calls
error error `json:"-" yaml:"-"` // error if any error error `json:"-" yaml:"-"` // error if any
} }
@ -43,8 +43,8 @@ func (d *Dynamic) GetDomainAndDomainRegex() ([]string, []string) {
domainRegex = append(domainRegex, secret.DomainsRegex...) domainRegex = append(domainRegex, secret.DomainsRegex...)
} }
if d.Secret != nil { if d.Secret != nil {
domains = append(domains, d.Secret.Domains...) domains = append(domains, d.Domains...)
domainRegex = append(domainRegex, d.Secret.DomainsRegex...) domainRegex = append(domainRegex, d.DomainsRegex...)
} }
uniqueDomains := sliceutil.Dedupe(domains) uniqueDomains := sliceutil.Dedupe(domains)
uniqueDomainRegex := sliceutil.Dedupe(domainRegex) uniqueDomainRegex := sliceutil.Dedupe(domainRegex)
@ -52,29 +52,35 @@ func (d *Dynamic) GetDomainAndDomainRegex() ([]string, []string) {
} }
func (d *Dynamic) UnmarshalJSON(data []byte) error { func (d *Dynamic) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &d); err != nil { if d == nil {
return errkit.New("cannot unmarshal into nil Dynamic struct")
}
// Use an alias type (auxiliary) to avoid a recursive call in this method.
type Alias Dynamic
// If d.Secret was nil, json.Unmarshal will allocate a new Secret object
// and populate it from the top level JSON fields.
if err := json.Unmarshal(data, (*Alias)(d)); err != nil {
return err return err
} }
var s Secret
if err := json.Unmarshal(data, &s); err != nil {
return err
}
d.Secret = &s
return nil return nil
} }
// Validate validates the dynamic secret // Validate validates the dynamic secret
func (d *Dynamic) Validate() error { func (d *Dynamic) Validate() error {
d.m = &sync.Mutex{} d.fetched = &atomic.Bool{}
d.fetching = &atomic.Bool{}
if d.TemplatePath == "" { if d.TemplatePath == "" {
return errorutil.New(" template-path is required for dynamic secret") return errkit.New(" template-path is required for dynamic secret")
} }
if len(d.Variables) == 0 { if len(d.Variables) == 0 {
return errorutil.New("variables are required for dynamic secret") return errkit.New("variables are required for dynamic secret")
} }
if d.Secret != nil { if d.Secret != nil {
d.Secret.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation d.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
if err := d.Secret.Validate(); err != nil { if err := d.Secret.Validate(); err != nil {
return err return err
} }
@ -92,9 +98,7 @@ func (d *Dynamic) Validate() error {
func (d *Dynamic) SetLazyFetchCallback(callback LazyFetchSecret) { func (d *Dynamic) SetLazyFetchCallback(callback LazyFetchSecret) {
d.fetchCallback = func(d *Dynamic) error { d.fetchCallback = func(d *Dynamic) error {
err := callback(d) err := callback(d)
d.fetched = true
if err != nil { if err != nil {
d.error = err
return err return err
} }
if len(d.Extracted) == 0 { if len(d.Extracted) == 0 {
@ -179,15 +183,21 @@ func (d *Dynamic) applyValuesToSecret(secret *Secret) error {
// GetStrategy returns the auth strategies for the dynamic secret // GetStrategy returns the auth strategies for the dynamic secret
func (d *Dynamic) GetStrategies() []AuthStrategy { func (d *Dynamic) GetStrategies() []AuthStrategy {
if !d.fetched { if d.fetched.Load() {
if d.error != nil {
return nil
}
} else {
// Try to fetch if not already fetched
_ = d.Fetch(true) _ = d.Fetch(true)
} }
if d.error != nil { if d.error != nil {
return nil return nil
} }
var strategies []AuthStrategy var strategies []AuthStrategy
if d.Secret != nil { if d.Secret != nil {
strategies = append(strategies, d.Secret.GetStrategy()) strategies = append(strategies, d.GetStrategy())
} }
for _, secret := range d.Secrets { for _, secret := range d.Secrets {
strategies = append(strategies, secret.GetStrategy()) strategies = append(strategies, secret.GetStrategy())
@ -198,12 +208,23 @@ func (d *Dynamic) GetStrategies() []AuthStrategy {
// Fetch fetches the dynamic secret // Fetch fetches the dynamic secret
// if isFatal is true, it will stop the execution if the secret could not be fetched // if isFatal is true, it will stop the execution if the secret could not be fetched
func (d *Dynamic) Fetch(isFatal bool) error { func (d *Dynamic) Fetch(isFatal bool) error {
d.m.Lock() if d.fetched.Load() {
defer d.m.Unlock() return d.error
if d.fetched {
return nil
} }
// Try to set fetching flag atomically
if !d.fetching.CompareAndSwap(false, true) {
// Already fetching, return current error
return d.error
}
// We're the only one fetching, call the callback
d.error = d.fetchCallback(d) d.error = d.fetchCallback(d)
// Mark as fetched and clear fetching flag
d.fetched.Store(true)
d.fetching.Store(false)
if d.error != nil && isFatal { if d.error != nil && isFatal {
gologger.Fatal().Msgf("Could not fetch dynamic secret: %s\n", d.error) gologger.Fatal().Msgf("Could not fetch dynamic secret: %s\n", d.error)
} }

View File

@ -0,0 +1,125 @@
package authx
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestDynamicUnmarshalJSON(t *testing.T) {
t.Run("basic-unmarshal", func(t *testing.T) {
data := []byte(`{
"template": "test-template.yaml",
"variables": [
{
"key": "username",
"value": "testuser"
}
],
"secrets": [
{
"type": "BasicAuth",
"domains": ["example.com"],
"username": "user1",
"password": "pass1"
}
],
"type": "BasicAuth",
"domains": ["test.com"],
"username": "testuser",
"password": "testpass"
}`)
var d Dynamic
err := d.UnmarshalJSON(data)
require.NoError(t, err)
// Secret
require.NotNil(t, d.Secret)
require.Equal(t, "BasicAuth", d.Type)
require.Equal(t, []string{"test.com"}, d.Domains)
require.Equal(t, "testuser", d.Username)
require.Equal(t, "testpass", d.Password)
// Dynamic fields
require.Equal(t, "test-template.yaml", d.TemplatePath)
require.Len(t, d.Variables, 1)
require.Equal(t, "username", d.Variables[0].Key)
require.Equal(t, "testuser", d.Variables[0].Value)
require.Len(t, d.Secrets, 1)
require.Equal(t, "BasicAuth", d.Secrets[0].Type)
require.Equal(t, []string{"example.com"}, d.Secrets[0].Domains)
require.Equal(t, "user1", d.Secrets[0].Username)
require.Equal(t, "pass1", d.Secrets[0].Password)
})
t.Run("complex-unmarshal", func(t *testing.T) {
data := []byte(`{
"template": "test-template.yaml",
"variables": [
{
"key": "token",
"value": "Bearer xyz"
}
],
"secrets": [
{
"type": "CookiesAuth",
"domains": ["example.com"],
"cookies": [
{
"key": "session",
"value": "abc123"
}
]
}
],
"type": "HeadersAuth",
"domains": ["api.test.com"],
"headers": [
{
"key": "X-API-Key",
"value": "secret-key"
}
]
}`)
var d Dynamic
err := d.UnmarshalJSON(data)
require.NoError(t, err)
// Secret
require.NotNil(t, d.Secret)
require.Equal(t, "HeadersAuth", d.Type)
require.Equal(t, []string{"api.test.com"}, d.Domains)
require.Len(t, d.Headers, 1)
require.Equal(t, "X-API-Key", d.Secret.Headers[0].Key)
require.Equal(t, "secret-key", d.Secret.Headers[0].Value)
// Dynamic fields
require.Equal(t, "test-template.yaml", d.TemplatePath)
require.Len(t, d.Variables, 1)
require.Equal(t, "token", d.Variables[0].Key)
require.Equal(t, "Bearer xyz", d.Variables[0].Value)
require.Len(t, d.Secrets, 1)
require.Equal(t, "CookiesAuth", d.Secrets[0].Type)
require.Equal(t, []string{"example.com"}, d.Secrets[0].Domains)
require.Len(t, d.Secrets[0].Cookies, 1)
require.Equal(t, "session", d.Secrets[0].Cookies[0].Key)
require.Equal(t, "abc123", d.Secrets[0].Cookies[0].Value)
})
t.Run("invalid-json", func(t *testing.T) {
data := []byte(`{invalid json}`)
var d Dynamic
err := d.UnmarshalJSON(data)
require.Error(t, err)
})
t.Run("empty-json", func(t *testing.T) {
data := []byte(`{}`)
var d Dynamic
err := d.UnmarshalJSON(data)
require.NoError(t, err)
})
}

View File

@ -8,7 +8,7 @@ import (
"strings" "strings"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json" "github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
"github.com/projectdiscovery/utils/generic" "github.com/projectdiscovery/utils/generic"
stringsutil "github.com/projectdiscovery/utils/strings" stringsutil "github.com/projectdiscovery/utils/strings"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
@ -55,7 +55,7 @@ type Secret struct {
Type string `json:"type" yaml:"type"` Type string `json:"type" yaml:"type"`
Domains []string `json:"domains" yaml:"domains"` Domains []string `json:"domains" yaml:"domains"`
DomainsRegex []string `json:"domains-regex" yaml:"domains-regex"` DomainsRegex []string `json:"domains-regex" yaml:"domains-regex"`
Headers []KV `json:"headers" yaml:"headers"` Headers []KV `json:"headers" yaml:"headers"` // Headers preserve exact casing (useful for case-sensitive APIs)
Cookies []Cookie `json:"cookies" yaml:"cookies"` Cookies []Cookie `json:"cookies" yaml:"cookies"`
Params []KV `json:"params" yaml:"params"` Params []KV `json:"params" yaml:"params"`
Username string `json:"username" yaml:"username"` // can be either email or username Username string `json:"username" yaml:"username"` // can be either email or username
@ -148,7 +148,7 @@ func (s *Secret) Validate() error {
} }
type KV struct { type KV struct {
Key string `json:"key" yaml:"key"` Key string `json:"key" yaml:"key"` // Header key (preserves exact casing)
Value string `json:"value" yaml:"value"` Value string `json:"value" yaml:"value"`
} }
@ -237,7 +237,9 @@ func GetAuthDataFromYAML(data []byte) (*Authx, error) {
var auth Authx var auth Authx
err := yaml.Unmarshal(data, &auth) err := yaml.Unmarshal(data, &auth)
if err != nil { if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not unmarshal yaml") errorErr := errkit.FromError(err)
errorErr.Msgf("could not unmarshal yaml")
return nil, errorErr
} }
return &auth, nil return &auth, nil
} }
@ -247,7 +249,9 @@ func GetAuthDataFromJSON(data []byte) (*Authx, error) {
var auth Authx var auth Authx
err := json.Unmarshal(data, &auth) err := json.Unmarshal(data, &auth)
if err != nil { if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not unmarshal json") errorErr := errkit.FromError(err)
errorErr.Msgf("could not unmarshal json")
return nil, errorErr
} }
return &auth, nil return &auth, nil
} }

View File

@ -21,15 +21,19 @@ func NewHeadersAuthStrategy(data *Secret) *HeadersAuthStrategy {
} }
// Apply applies the headers auth strategy to the request // Apply applies the headers auth strategy to the request
// NOTE: This preserves exact header casing (e.g., barAuthToken stays as barAuthToken)
// This is useful for APIs that require case-sensitive header names
func (s *HeadersAuthStrategy) Apply(req *http.Request) { func (s *HeadersAuthStrategy) Apply(req *http.Request) {
for _, header := range s.Data.Headers { for _, header := range s.Data.Headers {
req.Header.Set(header.Key, header.Value) req.Header[header.Key] = []string{header.Value}
} }
} }
// ApplyOnRR applies the headers auth strategy to the retryable request // ApplyOnRR applies the headers auth strategy to the retryable request
// NOTE: This preserves exact header casing (e.g., barAuthToken stays as barAuthToken)
// This is useful for APIs that require case-sensitive header names
func (s *HeadersAuthStrategy) ApplyOnRR(req *retryablehttp.Request) { func (s *HeadersAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
for _, header := range s.Data.Headers { for _, header := range s.Data.Headers {
req.Header.Set(header.Key, header.Value) req.Header[header.Key] = []string{header.Value}
} }
} }

View File

@ -12,6 +12,8 @@ info:
# static secrets # static secrets
static: static:
# for header based auth session # for header based auth session
# NOTE: Headers preserve exact casing (e.g., x-pdcp-key stays as x-pdcp-key)
# This is useful for APIs that require case-sensitive header names
- type: header - type: header
domains: domains:
- api.projectdiscovery.io - api.projectdiscovery.io
@ -20,6 +22,8 @@ static:
headers: headers:
- key: x-pdcp-key - key: x-pdcp-key
value: <api-key-here> value: <api-key-here>
- key: barAuthToken
value: <auth-token-here>
# for query based auth session # for query based auth session
- type: Query - type: Query

View File

@ -7,7 +7,7 @@ import (
"strings" "strings"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider/authx" "github.com/projectdiscovery/nuclei/v3/pkg/authprovider/authx"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
urlutil "github.com/projectdiscovery/utils/url" urlutil "github.com/projectdiscovery/utils/url"
) )
@ -30,16 +30,20 @@ func NewFileAuthProvider(path string, callback authx.LazyFetchSecret) (AuthProvi
return nil, ErrNoSecrets return nil, ErrNoSecrets
} }
if len(store.Dynamic) > 0 && callback == nil { if len(store.Dynamic) > 0 && callback == nil {
return nil, errorutil.New("lazy fetch callback is required for dynamic secrets") return nil, errkit.New("lazy fetch callback is required for dynamic secrets")
} }
for _, secret := range store.Secrets { for _, secret := range store.Secrets {
if err := secret.Validate(); err != nil { if err := secret.Validate(); err != nil {
return nil, errorutil.NewWithErr(err).Msgf("invalid secret in file: %s", path) errorErr := errkit.FromError(err)
errorErr.Msgf("invalid secret in file: %s", path)
return nil, errorErr
} }
} }
for i, dynamic := range store.Dynamic { for i, dynamic := range store.Dynamic {
if err := dynamic.Validate(); err != nil { if err := dynamic.Validate(); err != nil {
return nil, errorutil.NewWithErr(err).Msgf("invalid dynamic in file: %s", path) errorErr := errkit.FromError(err)
errorErr.Msgf("invalid dynamic in file: %s", path)
return nil, errorErr
} }
dynamic.SetLazyFetchCallback(callback) dynamic.SetLazyFetchCallback(callback)
store.Dynamic[i] = dynamic store.Dynamic[i] = dynamic

View File

@ -7,6 +7,7 @@ import (
"fmt" "fmt"
"io" "io"
"path" "path"
"slices"
"strings" "strings"
"github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/aws"
@ -140,10 +141,8 @@ func (c Catalog) ResolvePath(templateName, second string) (string, error) {
} }
// check if templateName is already an absolute path to c key // check if templateName is already an absolute path to c key
for _, key := range keys { if slices.Contains(keys, templateName) {
if key == templateName { return templateName, nil
return templateName, nil
}
} }
return "", fmt.Errorf("no such path found: %s%s for keys: %v", second, templateName, keys) return "", fmt.Errorf("no such path found: %s%s for keys: %v", second, templateName, keys)

View File

@ -3,6 +3,7 @@ package aws
import ( import (
"io" "io"
"reflect" "reflect"
"slices"
"strings" "strings"
"testing" "testing"
@ -250,13 +251,7 @@ func (m mocks3svc) getAllKeys() ([]string, error) {
} }
func (m mocks3svc) downloadKey(name string) (io.ReadCloser, error) { func (m mocks3svc) downloadKey(name string) (io.ReadCloser, error) {
found := false found := slices.Contains(m.keys, name)
for _, key := range m.keys {
if key == name {
found = true
break
}
}
if !found { if !found {
return nil, errors.New("key not found") return nil, errors.New("key not found")
} }

View File

@ -31,7 +31,7 @@ const (
CLIConfigFileName = "config.yaml" CLIConfigFileName = "config.yaml"
ReportingConfigFilename = "reporting-config.yaml" ReportingConfigFilename = "reporting-config.yaml"
// Version is the current version of nuclei // Version is the current version of nuclei
Version = `v3.4.2` Version = `v3.4.10`
// Directory Names of custom templates // Directory Names of custom templates
CustomS3TemplatesDirName = "s3" CustomS3TemplatesDirName = "s3"
CustomGitHubTemplatesDirName = "github" CustomGitHubTemplatesDirName = "github"
@ -46,18 +46,21 @@ const (
// if the current version is outdated // if the current version is outdated
func IsOutdatedVersion(current, latest string) bool { func IsOutdatedVersion(current, latest string) bool {
if latest == "" { if latest == "" {
// if pdtm api call failed it's assumed that the current version is outdated // NOTE(dwisiswant0): if PDTM API call failed or returned empty, we
// and it will be confirmed while updating from GitHub // cannot determine if templates are outdated w/o additional checks
// this fixes `version string empty` errors // return false to avoid unnecessary updates.
return true return false
} }
current = trimDevIfExists(current) current = trimDevIfExists(current)
currentVer, _ := semver.NewVersion(current) currentVer, _ := semver.NewVersion(current)
newVer, _ := semver.NewVersion(latest) newVer, _ := semver.NewVersion(latest)
if currentVer == nil || newVer == nil { if currentVer == nil || newVer == nil {
// fallback to naive comparison // fallback to naive comparison - return true only if they are different
return current == latest return current != latest
} }
return newVer.GreaterThan(currentVer) return newVer.GreaterThan(currentVer)
} }

View File

@ -20,7 +20,9 @@ func ReadIgnoreFile() IgnoreFile {
gologger.Error().Msgf("Could not read nuclei-ignore file: %s\n", err) gologger.Error().Msgf("Could not read nuclei-ignore file: %s\n", err)
return IgnoreFile{} return IgnoreFile{}
} }
defer file.Close() defer func() {
_ = file.Close()
}()
ignore := IgnoreFile{} ignore := IgnoreFile{}
if err := yaml.NewDecoder(file).Decode(&ignore); err != nil { if err := yaml.NewDecoder(file).Decode(&ignore); err != nil {

View File

@ -4,16 +4,16 @@ import (
"bytes" "bytes"
"crypto/md5" "crypto/md5"
"fmt" "fmt"
"log"
"os" "os"
"path/filepath" "path/filepath"
"slices"
"strings" "strings"
"sync"
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger" "github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json" "github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/utils/env" "github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file" fileutil "github.com/projectdiscovery/utils/file"
folderutil "github.com/projectdiscovery/utils/folder" folderutil "github.com/projectdiscovery/utils/folder"
) )
@ -41,15 +41,18 @@ type Config struct {
// local cache of nuclei version check endpoint // local cache of nuclei version check endpoint
// these fields are only update during nuclei version check // these fields are only update during nuclei version check
// TODO: move these fields to a separate unexported struct as they are not meant to be used directly // TODO: move these fields to a separate unexported struct as they are not meant to be used directly
LatestNucleiVersion string `json:"nuclei-latest-version"` LatestNucleiVersion string `json:"nuclei-latest-version"`
LatestNucleiTemplatesVersion string `json:"nuclei-templates-latest-version"` LatestNucleiTemplatesVersion string `json:"nuclei-templates-latest-version"`
LatestNucleiIgnoreHash string `json:"nuclei-latest-ignore-hash,omitempty"` LatestNucleiIgnoreHash string `json:"nuclei-latest-ignore-hash,omitempty"`
Logger *gologger.Logger `json:"-"` // logger
// internal / unexported fields // internal / unexported fields
disableUpdates bool `json:"-"` // disable updates both version check and template updates disableUpdates bool `json:"-"` // disable updates both version check and template updates
homeDir string `json:"-"` // User Home Directory homeDir string `json:"-"` // User Home Directory
configDir string `json:"-"` // Nuclei Global Config Directory configDir string `json:"-"` // Nuclei Global Config Directory
debugArgs []string `json:"-"` // debug args debugArgs []string `json:"-"` // debug args
m sync.Mutex
} }
// IsCustomTemplate determines whether a given template is custom-built or part of the official Nuclei templates. // IsCustomTemplate determines whether a given template is custom-built or part of the official Nuclei templates.
@ -104,21 +107,29 @@ func (c *Config) GetTemplateDir() string {
// DisableUpdateCheck disables update check and template updates // DisableUpdateCheck disables update check and template updates
func (c *Config) DisableUpdateCheck() { func (c *Config) DisableUpdateCheck() {
c.m.Lock()
defer c.m.Unlock()
c.disableUpdates = true c.disableUpdates = true
} }
// CanCheckForUpdates returns true if update check is enabled // CanCheckForUpdates returns true if update check is enabled
func (c *Config) CanCheckForUpdates() bool { func (c *Config) CanCheckForUpdates() bool {
c.m.Lock()
defer c.m.Unlock()
return !c.disableUpdates return !c.disableUpdates
} }
// NeedsTemplateUpdate returns true if template installation/update is required // NeedsTemplateUpdate returns true if template installation/update is required
func (c *Config) NeedsTemplateUpdate() bool { func (c *Config) NeedsTemplateUpdate() bool {
c.m.Lock()
defer c.m.Unlock()
return !c.disableUpdates && (c.TemplateVersion == "" || IsOutdatedVersion(c.TemplateVersion, c.LatestNucleiTemplatesVersion) || !fileutil.FolderExists(c.TemplatesDirectory)) return !c.disableUpdates && (c.TemplateVersion == "" || IsOutdatedVersion(c.TemplateVersion, c.LatestNucleiTemplatesVersion) || !fileutil.FolderExists(c.TemplatesDirectory))
} }
// NeedsIgnoreFileUpdate returns true if Ignore file hash is different (aka ignore file is outdated) // NeedsIgnoreFileUpdate returns true if Ignore file hash is different (aka ignore file is outdated)
func (c *Config) NeedsIgnoreFileUpdate() bool { func (c *Config) NeedsIgnoreFileUpdate() bool {
c.m.Lock()
defer c.m.Unlock()
return c.NucleiIgnoreHash == "" || c.NucleiIgnoreHash != c.LatestNucleiIgnoreHash return c.NucleiIgnoreHash == "" || c.NucleiIgnoreHash != c.LatestNucleiIgnoreHash
} }
@ -129,13 +140,13 @@ func (c *Config) UpdateNucleiIgnoreHash() error {
if fileutil.FileExists(ignoreFilePath) { if fileutil.FileExists(ignoreFilePath) {
bin, err := os.ReadFile(ignoreFilePath) bin, err := os.ReadFile(ignoreFilePath)
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("could not read nuclei ignore file") return errkit.Newf("could not read nuclei ignore file: %v", err)
} }
c.NucleiIgnoreHash = fmt.Sprintf("%x", md5.Sum(bin)) c.NucleiIgnoreHash = fmt.Sprintf("%x", md5.Sum(bin))
// write config to disk // write config to disk
return c.WriteTemplatesConfig() return c.WriteTemplatesConfig()
} }
return errorutil.NewWithTag("config", "ignore file not found: could not update nuclei ignore hash") return errkit.New("ignore file not found: could not update nuclei ignore hash")
} }
// GetConfigDir returns the nuclei configuration directory // GetConfigDir returns the nuclei configuration directory
@ -210,7 +221,7 @@ func (c *Config) GetCacheDir() string {
func (c *Config) SetConfigDir(dir string) { func (c *Config) SetConfigDir(dir string) {
c.configDir = dir c.configDir = dir
if err := c.createConfigDirIfNotExists(); err != nil { if err := c.createConfigDirIfNotExists(); err != nil {
gologger.Fatal().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err) c.Logger.Fatal().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
} }
// if folder already exists read config or create new // if folder already exists read config or create new
@ -218,7 +229,7 @@ func (c *Config) SetConfigDir(dir string) {
// create new config // create new config
applyDefaultConfig() applyDefaultConfig()
if err2 := c.WriteTemplatesConfig(); err2 != nil { if err2 := c.WriteTemplatesConfig(); err2 != nil {
gologger.Fatal().Msgf("Could not create nuclei config file at %s: %s", c.getTemplatesConfigFilePath(), err2) c.Logger.Fatal().Msgf("Could not create nuclei config file at %s: %s", c.getTemplatesConfigFilePath(), err2)
} }
} }
@ -246,7 +257,7 @@ func (c *Config) SetTemplatesVersion(version string) error {
c.TemplateVersion = version c.TemplateVersion = version
// write config to disk // write config to disk
if err := c.WriteTemplatesConfig(); err != nil { if err := c.WriteTemplatesConfig(); err != nil {
return errorutil.NewWithErr(err).Msgf("could not write nuclei config file at %s", c.getTemplatesConfigFilePath()) return errkit.Newf("could not write nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
} }
return nil return nil
} }
@ -254,15 +265,15 @@ func (c *Config) SetTemplatesVersion(version string) error {
// ReadTemplatesConfig reads the nuclei templates config file // ReadTemplatesConfig reads the nuclei templates config file
func (c *Config) ReadTemplatesConfig() error { func (c *Config) ReadTemplatesConfig() error {
if !fileutil.FileExists(c.getTemplatesConfigFilePath()) { if !fileutil.FileExists(c.getTemplatesConfigFilePath()) {
return errorutil.NewWithTag("config", "nuclei config file at %s does not exist", c.getTemplatesConfigFilePath()) return errkit.Newf("nuclei config file at %s does not exist", c.getTemplatesConfigFilePath())
} }
var cfg *Config var cfg *Config
bin, err := os.ReadFile(c.getTemplatesConfigFilePath()) bin, err := os.ReadFile(c.getTemplatesConfigFilePath())
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("could not read nuclei config file at %s", c.getTemplatesConfigFilePath()) return errkit.Newf("could not read nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
} }
if err := json.Unmarshal(bin, &cfg); err != nil { if err := json.Unmarshal(bin, &cfg); err != nil {
return errorutil.NewWithErr(err).Msgf("could not unmarshal nuclei config file at %s", c.getTemplatesConfigFilePath()) return errkit.Newf("could not unmarshal nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
} }
// apply config // apply config
c.TemplatesDirectory = cfg.TemplatesDirectory c.TemplatesDirectory = cfg.TemplatesDirectory
@ -281,10 +292,10 @@ func (c *Config) WriteTemplatesConfig() error {
} }
bin, err := json.Marshal(c) bin, err := json.Marshal(c)
if err != nil { if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to marshal nuclei config") return errkit.Newf("failed to marshal nuclei config: %v", err)
} }
if err = os.WriteFile(c.getTemplatesConfigFilePath(), bin, 0600); err != nil { if err = os.WriteFile(c.getTemplatesConfigFilePath(), bin, 0600); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write nuclei config file at %s", c.getTemplatesConfigFilePath()) return errkit.Newf("failed to write nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
} }
return nil return nil
} }
@ -308,7 +319,7 @@ func (c *Config) getTemplatesConfigFilePath() string {
func (c *Config) createConfigDirIfNotExists() error { func (c *Config) createConfigDirIfNotExists() error {
if !fileutil.FolderExists(c.configDir) { if !fileutil.FolderExists(c.configDir) {
if err := fileutil.CreateFolder(c.configDir); err != nil { if err := fileutil.CreateFolder(c.configDir); err != nil {
return errorutil.NewWithErr(err).Msgf("could not create nuclei config directory at %s", c.configDir) return errkit.Newf("could not create nuclei config directory at %s: %v", c.configDir, err)
} }
} }
return nil return nil
@ -318,14 +329,14 @@ func (c *Config) createConfigDirIfNotExists() error {
// to the current config directory // to the current config directory
func (c *Config) copyIgnoreFile() { func (c *Config) copyIgnoreFile() {
if err := c.createConfigDirIfNotExists(); err != nil { if err := c.createConfigDirIfNotExists(); err != nil {
gologger.Error().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err) c.Logger.Error().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
return return
} }
ignoreFilePath := c.GetIgnoreFilePath() ignoreFilePath := c.GetIgnoreFilePath()
if !fileutil.FileExists(ignoreFilePath) { if !fileutil.FileExists(ignoreFilePath) {
// copy ignore file from default config directory // copy ignore file from default config directory
if err := fileutil.CopyFile(filepath.Join(folderutil.AppConfigDirOrDefault(FallbackConfigFolderName, BinaryName), NucleiIgnoreFileName), ignoreFilePath); err != nil { if err := fileutil.CopyFile(filepath.Join(folderutil.AppConfigDirOrDefault(FallbackConfigFolderName, BinaryName), NucleiIgnoreFileName), ignoreFilePath); err != nil {
gologger.Error().Msgf("Could not copy nuclei ignore file at %s: %s", ignoreFilePath, err) c.Logger.Error().Msgf("Could not copy nuclei ignore file at %s: %s", ignoreFilePath, err)
} }
} }
} }
@ -334,12 +345,7 @@ func (c *Config) copyIgnoreFile() {
// this could be a feature specific to debugging like PPROF or printing stats // this could be a feature specific to debugging like PPROF or printing stats
// of max host error etc // of max host error etc
func (c *Config) IsDebugArgEnabled(arg string) bool { func (c *Config) IsDebugArgEnabled(arg string) bool {
for _, v := range c.debugArgs { return slices.Contains(c.debugArgs, arg)
if v == arg {
return true
}
}
return false
} }
// parseDebugArgs from string // parseDebugArgs from string
@ -371,9 +377,6 @@ func (c *Config) parseDebugArgs(data string) {
} }
func init() { func init() {
// first attempt to migrate all files from old config directory to new config directory
goflags.AttemptConfigMigration() // regardless how many times this is called it will only migrate once based on condition
ConfigDir := folderutil.AppConfigDirOrDefault(FallbackConfigFolderName, BinaryName) ConfigDir := folderutil.AppConfigDirOrDefault(FallbackConfigFolderName, BinaryName)
if cfgDir := os.Getenv(NucleiConfigDirEnv); cfgDir != "" { if cfgDir := os.Getenv(NucleiConfigDirEnv); cfgDir != "" {
@ -389,6 +392,7 @@ func init() {
DefaultConfig = &Config{ DefaultConfig = &Config{
homeDir: folderutil.HomeDirOrDefault(""), homeDir: folderutil.HomeDirOrDefault(""),
configDir: ConfigDir, configDir: ConfigDir,
Logger: gologger.DefaultLogger,
} }
// when enabled will log events in more verbosity than -v or -debug // when enabled will log events in more verbosity than -v or -debug
@ -410,9 +414,7 @@ func init() {
gologger.Error().Msgf("failed to write config file at %s got: %s", DefaultConfig.getTemplatesConfigFilePath(), err) gologger.Error().Msgf("failed to write config file at %s got: %s", DefaultConfig.getTemplatesConfigFilePath(), err)
} }
} }
// attempt to migrate resume files
// this also happens once regardless of how many times this is called
migrateResumeFiles()
// Loads/updates paths of custom templates // Loads/updates paths of custom templates
// Note: custom templates paths should not be updated in config file // Note: custom templates paths should not be updated in config file
// and even if it is changed we don't follow it since it is not expected behavior // and even if it is changed we don't follow it since it is not expected behavior
@ -427,61 +429,3 @@ func applyDefaultConfig() {
// updates all necessary paths // updates all necessary paths
DefaultConfig.SetTemplatesDir(DefaultConfig.TemplatesDirectory) DefaultConfig.SetTemplatesDir(DefaultConfig.TemplatesDirectory)
} }
func migrateResumeFiles() {
// attempt to migrate old resume files to new directory structure
// after migration has been done in goflags
oldResumeDir := DefaultConfig.GetConfigDir()
// migrate old resume file to new directory structure
if !fileutil.FileOrFolderExists(DefaultConfig.GetCacheDir()) && fileutil.FileOrFolderExists(oldResumeDir) {
// this means new cache dir doesn't exist, so we need to migrate
// first check if old resume file exists if not then no need to migrate
exists := false
files, err := os.ReadDir(oldResumeDir)
if err != nil {
// log silently
log.Printf("could not read old resume dir: %s\n", err)
return
}
for _, file := range files {
if strings.HasSuffix(file.Name(), ".cfg") {
exists = true
break
}
}
if !exists {
// no need to migrate
return
}
// create new cache dir
err = os.MkdirAll(DefaultConfig.GetCacheDir(), os.ModePerm)
if err != nil {
// log silently
log.Printf("could not create new cache dir: %s\n", err)
return
}
err = filepath.WalkDir(oldResumeDir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
if !strings.HasSuffix(path, ".cfg") {
return nil
}
err = os.Rename(path, filepath.Join(DefaultConfig.GetCacheDir(), filepath.Base(path)))
if err != nil {
return err
}
return nil
})
if err != nil {
// log silently
log.Printf("could not migrate old resume files: %s\n", err)
return
}
}
}

View File

@ -7,7 +7,6 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions" "github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions"
fileutil "github.com/projectdiscovery/utils/file" fileutil "github.com/projectdiscovery/utils/file"
stringsutil "github.com/projectdiscovery/utils/strings" stringsutil "github.com/projectdiscovery/utils/strings"
@ -74,7 +73,9 @@ func getTemplateID(filePath string) (string, error) {
return "", err return "", err
} }
defer file.Close() defer func() {
_ = file.Close()
}()
return GetTemplateIDFromReader(file, filePath) return GetTemplateIDFromReader(file, filePath)
} }
@ -96,7 +97,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
return index, nil return index, nil
} }
} }
gologger.Error().Msgf("failed to read index file creating new one: %v", err) DefaultConfig.Logger.Error().Msgf("failed to read index file creating new one: %v", err)
} }
ignoreDirs := DefaultConfig.GetAllCustomTemplateDirs() ignoreDirs := DefaultConfig.GetAllCustomTemplateDirs()
@ -107,7 +108,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
} }
err := filepath.WalkDir(DefaultConfig.TemplatesDirectory, func(path string, d os.DirEntry, err error) error { err := filepath.WalkDir(DefaultConfig.TemplatesDirectory, func(path string, d os.DirEntry, err error) error {
if err != nil { if err != nil {
gologger.Verbose().Msgf("failed to walk path=%v err=%v", path, err) DefaultConfig.Logger.Verbose().Msgf("failed to walk path=%v err=%v", path, err)
return nil return nil
} }
if d.IsDir() || !IsTemplate(path) || stringsutil.ContainsAny(path, ignoreDirs...) { if d.IsDir() || !IsTemplate(path) || stringsutil.ContainsAny(path, ignoreDirs...) {
@ -116,7 +117,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
// get template id from file // get template id from file
id, err := getTemplateID(path) id, err := getTemplateID(path)
if err != nil || id == "" { if err != nil || id == "" {
gologger.Verbose().Msgf("failed to get template id from file=%v got id=%v err=%v", path, id, err) DefaultConfig.Logger.Verbose().Msgf("failed to get template id from file=%v got id=%v err=%v", path, id, err)
return nil return nil
} }
index[id] = path index[id] = path

View File

@ -8,7 +8,6 @@ import (
"github.com/logrusorgru/aurora" "github.com/logrusorgru/aurora"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
stringsutil "github.com/projectdiscovery/utils/strings" stringsutil "github.com/projectdiscovery/utils/strings"
updateutils "github.com/projectdiscovery/utils/update" updateutils "github.com/projectdiscovery/utils/update"
@ -84,7 +83,7 @@ func (c *DiskCatalog) GetTemplatePath(target string) ([]string, error) {
absPath = BackwardsCompatiblePaths(c.templatesDirectory, target) absPath = BackwardsCompatiblePaths(c.templatesDirectory, target)
if absPath != target && strings.TrimPrefix(absPath, c.templatesDirectory+string(filepath.Separator)) != target { if absPath != target && strings.TrimPrefix(absPath, c.templatesDirectory+string(filepath.Separator)) != target {
if config.DefaultConfig.LogAllEvents { if config.DefaultConfig.LogAllEvents {
gologger.DefaultLogger.Print().Msgf("[%v] requested Template path %s is deprecated, please update to %s\n", aurora.Yellow("WRN").String(), target, absPath) config.DefaultConfig.Logger.Print().Msgf("[%v] requested Template path %s is deprecated, please update to %s\n", aurora.Yellow("WRN").String(), target, absPath)
} }
deprecatedPathsCounter++ deprecatedPathsCounter++
} }
@ -302,6 +301,6 @@ func PrintDeprecatedPathsMsgIfApplicable(isSilent bool) {
return return
} }
if deprecatedPathsCounter > 0 && !isSilent { if deprecatedPathsCounter > 0 && !isSilent {
gologger.Print().Msgf("[%v] Found %v template[s] loaded with deprecated paths, update before v3 for continued support.\n", aurora.Yellow("WRN").String(), deprecatedPathsCounter) config.DefaultConfig.Logger.Print().Msgf("[%v] Found %v template[s] loaded with deprecated paths, update before v3 for continued support.\n", aurora.Yellow("WRN").String(), deprecatedPathsCounter)
} }
} }

View File

@ -10,12 +10,11 @@ import (
"strings" "strings"
"github.com/alecthomas/chroma/quick" "github.com/alecthomas/chroma/quick"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/retryablehttp-go"
pdcpauth "github.com/projectdiscovery/utils/auth/pdcp" pdcpauth "github.com/projectdiscovery/utils/auth/pdcp"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
) )
const ( const (
@ -34,31 +33,31 @@ type AITemplateResponse struct {
func getAIGeneratedTemplates(prompt string, options *types.Options) ([]string, error) { func getAIGeneratedTemplates(prompt string, options *types.Options) ([]string, error) {
prompt = strings.TrimSpace(prompt) prompt = strings.TrimSpace(prompt)
if len(prompt) < 5 { if len(prompt) < 5 {
return nil, errorutil.New("Prompt is too short. Please provide a more descriptive prompt") return nil, errkit.Newf("Prompt is too short. Please provide a more descriptive prompt")
} }
if len(prompt) > 3000 { if len(prompt) > 3000 {
return nil, errorutil.New("Prompt is too long. Please limit to 3000 characters") return nil, errkit.Newf("Prompt is too long. Please limit to 3000 characters")
} }
template, templateID, err := generateAITemplate(prompt) template, templateID, err := generateAITemplate(prompt)
if err != nil { if err != nil {
return nil, errorutil.New("Failed to generate template: %v", err) return nil, errkit.Newf("Failed to generate template: %v", err)
} }
pdcpTemplateDir := filepath.Join(config.DefaultConfig.GetTemplateDir(), "pdcp") pdcpTemplateDir := filepath.Join(config.DefaultConfig.GetTemplateDir(), "pdcp")
if err := os.MkdirAll(pdcpTemplateDir, 0755); err != nil { if err := os.MkdirAll(pdcpTemplateDir, 0755); err != nil {
return nil, errorutil.New("Failed to create pdcp template directory: %v", err) return nil, errkit.Newf("Failed to create pdcp template directory: %v", err)
} }
templateFile := filepath.Join(pdcpTemplateDir, templateID+".yaml") templateFile := filepath.Join(pdcpTemplateDir, templateID+".yaml")
err = os.WriteFile(templateFile, []byte(template), 0644) err = os.WriteFile(templateFile, []byte(template), 0644)
if err != nil { if err != nil {
return nil, errorutil.New("Failed to generate template: %v", err) return nil, errkit.Newf("Failed to generate template: %v", err)
} }
gologger.Info().Msgf("Generated template available at: https://cloud.projectdiscovery.io/templates/%s", templateID) options.Logger.Info().Msgf("Generated template available at: https://cloud.projectdiscovery.io/templates/%s", templateID)
gologger.Info().Msgf("Generated template path: %s", templateFile) options.Logger.Info().Msgf("Generated template path: %s", templateFile)
// Check if we should display the template // Check if we should display the template
// This happens when: // This happens when:
@ -76,7 +75,7 @@ func getAIGeneratedTemplates(prompt string, options *types.Options) ([]string, e
template = buf.String() template = buf.String()
} }
} }
gologger.Silent().Msgf("\n%s", template) options.Logger.Debug().Msgf("\n%s", template)
// FIXME: // FIXME:
// we should not be exiting the program here // we should not be exiting the program here
// but we need to find a better way to handle this // but we need to find a better way to handle this
@ -92,22 +91,22 @@ func generateAITemplate(prompt string) (string, string, error) {
} }
jsonBody, err := json.Marshal(reqBody) jsonBody, err := json.Marshal(reqBody)
if err != nil { if err != nil {
return "", "", errorutil.New("Failed to marshal request body: %v", err) return "", "", errkit.Newf("Failed to marshal request body: %v", err)
} }
req, err := http.NewRequest(http.MethodPost, aiTemplateGeneratorAPIEndpoint, bytes.NewBuffer(jsonBody)) req, err := http.NewRequest(http.MethodPost, aiTemplateGeneratorAPIEndpoint, bytes.NewBuffer(jsonBody))
if err != nil { if err != nil {
return "", "", errorutil.New("Failed to create HTTP request: %v", err) return "", "", errkit.Newf("Failed to create HTTP request: %v", err)
} }
ph := pdcpauth.PDCPCredHandler{} ph := pdcpauth.PDCPCredHandler{}
creds, err := ph.GetCreds() creds, err := ph.GetCreds()
if err != nil { if err != nil {
return "", "", errorutil.New("Failed to get PDCP credentials: %v", err) return "", "", errkit.Newf("Failed to get PDCP credentials: %v", err)
} }
if creds == nil { if creds == nil {
return "", "", errorutil.New("PDCP API Key not configured, Create one for free at https://cloud.projectdiscovery.io/") return "", "", errkit.Newf("PDCP API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -115,26 +114,28 @@ func generateAITemplate(prompt string) (string, string, error) {
resp, err := retryablehttp.DefaultClient().Do(req) resp, err := retryablehttp.DefaultClient().Do(req)
if err != nil { if err != nil {
return "", "", errorutil.New("Failed to send HTTP request: %v", err) return "", "", errkit.Newf("Failed to send HTTP request: %v", err)
} }
defer resp.Body.Close() defer func() {
_ = resp.Body.Close()
}()
if resp.StatusCode == http.StatusUnauthorized { if resp.StatusCode == http.StatusUnauthorized {
return "", "", errorutil.New("Invalid API Key or API Key not configured, Create one for free at https://cloud.projectdiscovery.io/") return "", "", errkit.Newf("Invalid API Key or API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
} }
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body) body, _ := io.ReadAll(resp.Body)
return "", "", errorutil.New("API returned status code %d: %s", resp.StatusCode, string(body)) return "", "", errkit.Newf("API returned status code %d: %s", resp.StatusCode, string(body))
} }
var result AITemplateResponse var result AITemplateResponse
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", "", errorutil.New("Failed to decode API response: %v", err) return "", "", errkit.Newf("Failed to decode API response: %v", err)
} }
if result.TemplateID == "" || result.Completion == "" { if result.TemplateID == "" || result.Completion == "" {
return "", "", errorutil.New("Failed to generate template") return "", "", errkit.Newf("Failed to generate template")
} }
return result.Completion, result.TemplateID, nil return result.Completion, result.TemplateID, nil

View File

@ -7,7 +7,6 @@ import (
"os" "os"
"sort" "sort"
"strings" "strings"
"sync"
"github.com/logrusorgru/aurora" "github.com/logrusorgru/aurora"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -18,16 +17,20 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/keys" "github.com/projectdiscovery/nuclei/v3/pkg/keys"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols" "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v3/pkg/templates" "github.com/projectdiscovery/nuclei/v3/pkg/templates"
templateTypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" templateTypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/stats" "github.com/projectdiscovery/nuclei/v3/pkg/utils/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/workflows" "github.com/projectdiscovery/nuclei/v3/pkg/workflows"
"github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors" "github.com/projectdiscovery/utils/errkit"
mapsutil "github.com/projectdiscovery/utils/maps"
sliceutil "github.com/projectdiscovery/utils/slice" sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings" stringsutil "github.com/projectdiscovery/utils/strings"
syncutil "github.com/projectdiscovery/utils/sync"
urlutil "github.com/projectdiscovery/utils/url" urlutil "github.com/projectdiscovery/utils/url"
"github.com/rs/xid"
) )
const ( const (
@ -65,7 +68,8 @@ type Config struct {
IncludeConditions []string IncludeConditions []string
Catalog catalog.Catalog Catalog catalog.Catalog
ExecutorOptions protocols.ExecutorOptions ExecutorOptions *protocols.ExecutorOptions
Logger *gologger.Logger
} }
// Store is a storage for loaded nuclei templates // Store is a storage for loaded nuclei templates
@ -82,13 +86,15 @@ type Store struct {
preprocessor templates.Preprocessor preprocessor templates.Preprocessor
logger *gologger.Logger
// NotFoundCallback is called for each not found template // NotFoundCallback is called for each not found template
// This overrides error handling for not found templates // This overrides error handling for not found templates
NotFoundCallback func(template string) bool NotFoundCallback func(template string) bool
} }
// NewConfig returns a new loader config // NewConfig returns a new loader config
func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts protocols.ExecutorOptions) *Config { func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts *protocols.ExecutorOptions) *Config {
loaderConfig := Config{ loaderConfig := Config{
Templates: options.Templates, Templates: options.Templates,
Workflows: options.Workflows, Workflows: options.Workflows,
@ -111,6 +117,7 @@ func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts pro
Catalog: catalog, Catalog: catalog,
ExecutorOptions: executerOpts, ExecutorOptions: executerOpts,
AITemplatePrompt: options.AITemplatePrompt, AITemplatePrompt: options.AITemplatePrompt,
Logger: options.Logger,
} }
loaderConfig.RemoteTemplateDomainList = append(loaderConfig.RemoteTemplateDomainList, TrustedTemplateDomains...) loaderConfig.RemoteTemplateDomainList = append(loaderConfig.RemoteTemplateDomainList, TrustedTemplateDomains...)
return &loaderConfig return &loaderConfig
@ -145,6 +152,7 @@ func New(cfg *Config) (*Store, error) {
}, cfg.Catalog), }, cfg.Catalog),
finalTemplates: cfg.Templates, finalTemplates: cfg.Templates,
finalWorkflows: cfg.Workflows, finalWorkflows: cfg.Workflows,
logger: cfg.Logger,
} }
// Do a check to see if we have URLs in templates flag, if so // Do a check to see if we have URLs in templates flag, if so
@ -231,13 +239,15 @@ func (store *Store) ReadTemplateFromURI(uri string, remote bool) ([]byte, error)
uri = handleTemplatesEditorURLs(uri) uri = handleTemplatesEditorURLs(uri)
remoteTemplates, _, err := getRemoteTemplatesAndWorkflows([]string{uri}, nil, store.config.RemoteTemplateDomainList) remoteTemplates, _, err := getRemoteTemplatesAndWorkflows([]string{uri}, nil, store.config.RemoteTemplateDomainList)
if err != nil || len(remoteTemplates) == 0 { if err != nil || len(remoteTemplates) == 0 {
return nil, errorutil.NewWithErr(err).Msgf("Could not load template %s: got %v", uri, remoteTemplates) return nil, errkit.Wrapf(err, "Could not load template %s: got %v", uri, remoteTemplates)
} }
resp, err := retryablehttp.Get(remoteTemplates[0]) resp, err := retryablehttp.Get(remoteTemplates[0])
if err != nil { if err != nil {
return nil, err return nil, err
} }
defer resp.Body.Close() defer func() {
_ = resp.Body.Close()
}()
return io.ReadAll(resp.Body) return io.ReadAll(resp.Body)
} else { } else {
return os.ReadFile(uri) return os.ReadFile(uri)
@ -293,11 +303,11 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
if strings.Contains(err.Error(), templates.ErrExcluded.Error()) { if strings.Contains(err.Error(), templates.ErrExcluded.Error()) {
stats.Increment(templates.TemplatesExcludedStats) stats.Increment(templates.TemplatesExcludedStats)
if config.DefaultConfig.LogAllEvents { if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error()) store.logger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
} }
continue continue
} }
gologger.Warning().Msg(err.Error()) store.logger.Warning().Msg(err.Error())
} }
} }
parserItem, ok := store.config.ExecutorOptions.Parser.(*templates.Parser) parserItem, ok := store.config.ExecutorOptions.Parser.(*templates.Parser)
@ -306,6 +316,8 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
} }
templatesCache := parserItem.Cache() templatesCache := parserItem.Cache()
loadedTemplateIDs := mapsutil.NewSyncLockMap[string, struct{}]()
for templatePath := range validPaths { for templatePath := range validPaths {
template, _, _ := templatesCache.Has(templatePath) template, _, _ := templatesCache.Has(templatePath)
@ -330,6 +342,12 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
} }
if template != nil { if template != nil {
if loadedTemplateIDs.Has(template.ID) {
store.logger.Debug().Msgf("Skipping duplicate template ID '%s' from path '%s'", template.ID, templatePath)
continue
}
_ = loadedTemplateIDs.Set(template.ID, struct{}{})
template.Path = templatePath template.Path = templatePath
store.templates = append(store.templates, template) store.templates = append(store.templates, template)
} }
@ -356,15 +374,13 @@ func (store *Store) ValidateTemplates() error {
func (store *Store) areWorkflowsValid(filteredWorkflowPaths map[string]struct{}) bool { func (store *Store) areWorkflowsValid(filteredWorkflowPaths map[string]struct{}) bool {
return store.areWorkflowOrTemplatesValid(filteredWorkflowPaths, true, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) { return store.areWorkflowOrTemplatesValid(filteredWorkflowPaths, true, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) {
return false, nil return store.config.ExecutorOptions.Parser.LoadWorkflow(templatePath, store.config.Catalog)
// return store.config.ExecutorOptions.Parser.LoadWorkflow(templatePath, store.config.Catalog)
}) })
} }
func (store *Store) areTemplatesValid(filteredTemplatePaths map[string]struct{}) bool { func (store *Store) areTemplatesValid(filteredTemplatePaths map[string]struct{}) bool {
return store.areWorkflowOrTemplatesValid(filteredTemplatePaths, false, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) { return store.areWorkflowOrTemplatesValid(filteredTemplatePaths, false, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) {
return false, nil return store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog)
// return store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog)
}) })
} }
@ -373,7 +389,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
for templatePath := range filteredTemplatePaths { for templatePath := range filteredTemplatePaths {
if _, err := load(templatePath, store.tagFilter); err != nil { if _, err := load(templatePath, store.tagFilter); err != nil {
if isParsingError("Error occurred loading template %s: %s\n", templatePath, err) { if isParsingError(store, "Error occurred loading template %s: %s\n", templatePath, err) {
areTemplatesValid = false areTemplatesValid = false
continue continue
} }
@ -381,7 +397,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
template, err := templates.Parse(templatePath, store.preprocessor, store.config.ExecutorOptions) template, err := templates.Parse(templatePath, store.preprocessor, store.config.ExecutorOptions)
if err != nil { if err != nil {
if isParsingError("Error occurred parsing template %s: %s\n", templatePath, err) { if isParsingError(store, "Error occurred parsing template %s: %s\n", templatePath, err) {
areTemplatesValid = false areTemplatesValid = false
continue continue
} }
@ -406,7 +422,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
// TODO: until https://github.com/projectdiscovery/nuclei-templates/issues/11324 is deployed // TODO: until https://github.com/projectdiscovery/nuclei-templates/issues/11324 is deployed
// disable strict validation to allow GH actions to run // disable strict validation to allow GH actions to run
// areTemplatesValid = false // areTemplatesValid = false
gologger.Warning().Msgf("Found duplicate template ID during validation '%s' => '%s': %s\n", templatePath, existingTemplatePath, template.ID) store.logger.Warning().Msgf("Found duplicate template ID during validation '%s' => '%s': %s\n", templatePath, existingTemplatePath, template.ID)
} }
if !isWorkflow && len(template.Workflows) > 0 { if !isWorkflow && len(template.Workflows) > 0 {
continue continue
@ -429,7 +445,7 @@ func areWorkflowTemplatesValid(store *Store, workflows []*workflows.WorkflowTemp
} }
_, err := store.config.Catalog.GetTemplatePath(workflow.Template) _, err := store.config.Catalog.GetTemplatePath(workflow.Template)
if err != nil { if err != nil {
if isParsingError("Error occurred loading template %s: %s\n", workflow.Template, err) { if isParsingError(store, "Error occurred loading template %s: %s\n", workflow.Template, err) {
return false return false
} }
} }
@ -437,14 +453,14 @@ func areWorkflowTemplatesValid(store *Store, workflows []*workflows.WorkflowTemp
return true return true
} }
func isParsingError(message string, template string, err error) bool { func isParsingError(store *Store, message string, template string, err error) bool {
if errors.Is(err, templates.ErrExcluded) { if errors.Is(err, templates.ErrExcluded) {
return false return false
} }
if errors.Is(err, templates.ErrCreateTemplateExecutor) { if errors.Is(err, templates.ErrCreateTemplateExecutor) {
return false return false
} }
gologger.Error().Msgf(message, template, err) store.logger.Error().Msgf(message, template, err)
return true return true
} }
@ -463,12 +479,12 @@ func (store *Store) LoadWorkflows(workflowsList []string) []*templates.Template
for workflowPath := range workflowPathMap { for workflowPath := range workflowPathMap {
loaded, err := store.config.ExecutorOptions.Parser.LoadWorkflow(workflowPath, store.config.Catalog) loaded, err := store.config.ExecutorOptions.Parser.LoadWorkflow(workflowPath, store.config.Catalog)
if err != nil { if err != nil {
gologger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err) store.logger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err)
} }
if loaded { if loaded {
parsed, err := templates.Parse(workflowPath, store.preprocessor, store.config.ExecutorOptions) parsed, err := templates.Parse(workflowPath, store.preprocessor, store.config.ExecutorOptions)
if err != nil { if err != nil {
gologger.Warning().Msgf("Could not parse workflow %s: %s\n", workflowPath, err) store.logger.Warning().Msgf("Could not parse workflow %s: %s\n", workflowPath, err)
} else if parsed != nil { } else if parsed != nil {
loadedWorkflows = append(loadedWorkflows, parsed) loadedWorkflows = append(loadedWorkflows, parsed)
} }
@ -485,8 +501,16 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
templatePathMap := store.pathFilter.Match(includedTemplates) templatePathMap := store.pathFilter.Match(includedTemplates)
loadedTemplates := sliceutil.NewSyncSlice[*templates.Template]() loadedTemplates := sliceutil.NewSyncSlice[*templates.Template]()
loadedTemplateIDs := mapsutil.NewSyncLockMap[string, struct{}]()
loadTemplate := func(tmpl *templates.Template) { loadTemplate := func(tmpl *templates.Template) {
if loadedTemplateIDs.Has(tmpl.ID) {
store.logger.Debug().Msgf("Skipping duplicate template ID '%s' from path '%s'", tmpl.ID, tmpl.Path)
return
}
_ = loadedTemplateIDs.Set(tmpl.ID, struct{}{})
loadedTemplates.Append(tmpl) loadedTemplates.Append(tmpl)
// increment signed/unsigned counters // increment signed/unsigned counters
if tmpl.Verified { if tmpl.Verified {
@ -500,10 +524,22 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
} }
} }
var wgLoadTemplates sync.WaitGroup wgLoadTemplates, errWg := syncutil.New(syncutil.WithSize(50))
if errWg != nil {
panic("could not create wait group")
}
if store.config.ExecutorOptions.Options.ExecutionId == "" {
store.config.ExecutorOptions.Options.ExecutionId = xid.New().String()
}
dialers := protocolstate.GetDialersWithId(store.config.ExecutorOptions.Options.ExecutionId)
if dialers == nil {
panic("dialers with executionId " + store.config.ExecutorOptions.Options.ExecutionId + " not found")
}
for templatePath := range templatePathMap { for templatePath := range templatePathMap {
wgLoadTemplates.Add(1) wgLoadTemplates.Add()
go func(templatePath string) { go func(templatePath string) {
defer wgLoadTemplates.Done() defer wgLoadTemplates.Done()
@ -515,7 +551,7 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
if !errors.Is(err, templates.ErrIncompatibleWithOfflineMatching) { if !errors.Is(err, templates.ErrIncompatibleWithOfflineMatching) {
stats.Increment(templates.RuntimeWarningsStats) stats.Increment(templates.RuntimeWarningsStats)
} }
gologger.Warning().Msgf("Could not parse template %s: %s\n", templatePath, err) store.logger.Warning().Msgf("Could not parse template %s: %s\n", templatePath, err)
} else if parsed != nil { } else if parsed != nil {
if !parsed.Verified && store.config.ExecutorOptions.Options.DisableUnsignedTemplates { if !parsed.Verified && store.config.ExecutorOptions.Options.DisableUnsignedTemplates {
// skip unverified templates when prompted to // skip unverified templates when prompted to
@ -544,19 +580,26 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// check if the template is a DAST template // check if the template is a DAST template
// also allow global matchers template to be loaded // also allow global matchers template to be loaded
if parsed.IsFuzzing() || parsed.Options.GlobalMatchers != nil && parsed.Options.GlobalMatchers.HasMatchers() { if parsed.IsFuzzing() || parsed.Options.GlobalMatchers != nil && parsed.Options.GlobalMatchers.HasMatchers() {
loadTemplate(parsed) if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
stats.Increment(templates.ExcludedHeadlessTmplStats)
if config.DefaultConfig.LogAllEvents {
store.logger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else {
loadTemplate(parsed)
}
} }
} else if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless { } else if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
// donot include headless template in final list if headless flag is not set // donot include headless template in final list if headless flag is not set
stats.Increment(templates.ExcludedHeadlessTmplStats) stats.Increment(templates.ExcludedHeadlessTmplStats)
if config.DefaultConfig.LogAllEvents { if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath) store.logger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
} }
} else if len(parsed.RequestsCode) > 0 && !store.config.ExecutorOptions.Options.EnableCodeTemplates { } else if len(parsed.RequestsCode) > 0 && !store.config.ExecutorOptions.Options.EnableCodeTemplates {
// donot include 'Code' protocol custom template in final list if code flag is not set // donot include 'Code' protocol custom template in final list if code flag is not set
stats.Increment(templates.ExcludedCodeTmplStats) stats.Increment(templates.ExcludedCodeTmplStats)
if config.DefaultConfig.LogAllEvents { if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Code flag is required for code protocol template '%s'.\n", aurora.Yellow("WRN").String(), templatePath) store.logger.Print().Msgf("[%v] Code flag is required for code protocol template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
} }
} else if len(parsed.RequestsCode) > 0 && !parsed.Verified && len(parsed.Workflows) == 0 { } else if len(parsed.RequestsCode) > 0 && !parsed.Verified && len(parsed.Workflows) == 0 {
// donot include unverified 'Code' protocol custom template in final list // donot include unverified 'Code' protocol custom template in final list
@ -564,12 +607,12 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// these will be skipped so increment skip counter // these will be skipped so increment skip counter
stats.Increment(templates.SkippedUnsignedStats) stats.Increment(templates.SkippedUnsignedStats)
if config.DefaultConfig.LogAllEvents { if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Tampered/Unsigned template at %v.\n", aurora.Yellow("WRN").String(), templatePath) store.logger.Print().Msgf("[%v] Tampered/Unsigned template at %v.\n", aurora.Yellow("WRN").String(), templatePath)
} }
} else if parsed.IsFuzzing() && !store.config.ExecutorOptions.Options.DAST { } else if parsed.IsFuzzing() && !store.config.ExecutorOptions.Options.DAST {
stats.Increment(templates.ExludedDastTmplStats) stats.Increment(templates.ExludedDastTmplStats)
if config.DefaultConfig.LogAllEvents { if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] -dast flag is required for DAST template '%s'.\n", aurora.Yellow("WRN").String(), templatePath) store.logger.Print().Msgf("[%v] -dast flag is required for DAST template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
} }
} else { } else {
loadTemplate(parsed) loadTemplate(parsed)
@ -580,11 +623,11 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
if strings.Contains(err.Error(), templates.ErrExcluded.Error()) { if strings.Contains(err.Error(), templates.ErrExcluded.Error()) {
stats.Increment(templates.TemplatesExcludedStats) stats.Increment(templates.TemplatesExcludedStats)
if config.DefaultConfig.LogAllEvents { if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error()) store.logger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
} }
return return
} }
gologger.Warning().Msg(err.Error()) store.logger.Warning().Msg(err.Error())
} }
}(templatePath) }(templatePath)
} }
@ -640,7 +683,7 @@ func workflowContainsProtocol(workflow []*workflows.WorkflowTemplate) bool {
func (s *Store) logErroredTemplates(erred map[string]error) { func (s *Store) logErroredTemplates(erred map[string]error) {
for template, err := range erred { for template, err := range erred {
if s.NotFoundCallback == nil || !s.NotFoundCallback(template) { if s.NotFoundCallback == nil || !s.NotFoundCallback(template) {
gologger.Error().Msgf("Could not find template '%s': %s", template, err) s.logger.Error().Msgf("Could not find template '%s': %s", template, err)
} }
} }
} }

View File

@ -5,13 +5,16 @@ import (
"fmt" "fmt"
"net/url" "net/url"
"strings" "strings"
"sync"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions" "github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions"
"github.com/projectdiscovery/nuclei/v3/pkg/utils" "github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/retryablehttp-go"
sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings" stringsutil "github.com/projectdiscovery/utils/strings"
syncutil "github.com/projectdiscovery/utils/sync"
) )
type ContentType string type ContentType string
@ -28,64 +31,73 @@ type RemoteContent struct {
} }
func getRemoteTemplatesAndWorkflows(templateURLs, workflowURLs, remoteTemplateDomainList []string) ([]string, []string, error) { func getRemoteTemplatesAndWorkflows(templateURLs, workflowURLs, remoteTemplateDomainList []string) ([]string, []string, error) {
remoteContentChannel := make(chan RemoteContent) var (
err error
muErr sync.Mutex
)
remoteTemplateList := sliceutil.NewSyncSlice[string]()
remoteWorkFlowList := sliceutil.NewSyncSlice[string]()
for _, templateURL := range templateURLs { awg, errAwg := syncutil.New(syncutil.WithSize(50))
go getRemoteContent(templateURL, remoteTemplateDomainList, remoteContentChannel, Template) if errAwg != nil {
} return nil, nil, errAwg
for _, workflowURL := range workflowURLs {
go getRemoteContent(workflowURL, remoteTemplateDomainList, remoteContentChannel, Workflow)
} }
var remoteTemplateList []string loadItem := func(URL string, contentType ContentType) {
var remoteWorkFlowList []string defer awg.Done()
var err error
for i := 0; i < (len(templateURLs) + len(workflowURLs)); i++ { remoteContent := getRemoteContent(URL, remoteTemplateDomainList, contentType)
remoteContent := <-remoteContentChannel
if remoteContent.Error != nil { if remoteContent.Error != nil {
muErr.Lock()
if err != nil { if err != nil {
err = errors.New(remoteContent.Error.Error() + ": " + err.Error()) err = errors.New(remoteContent.Error.Error() + ": " + err.Error())
} else { } else {
err = remoteContent.Error err = remoteContent.Error
} }
muErr.Unlock()
} else { } else {
if remoteContent.Type == Template { switch remoteContent.Type {
remoteTemplateList = append(remoteTemplateList, remoteContent.Content...) case Template:
} else if remoteContent.Type == Workflow { remoteTemplateList.Append(remoteContent.Content...)
remoteWorkFlowList = append(remoteWorkFlowList, remoteContent.Content...) case Workflow:
remoteWorkFlowList.Append(remoteContent.Content...)
} }
} }
} }
return remoteTemplateList, remoteWorkFlowList, err
for _, templateURL := range templateURLs {
awg.Add()
go loadItem(templateURL, Template)
}
for _, workflowURL := range workflowURLs {
awg.Add()
go loadItem(workflowURL, Workflow)
}
awg.Wait()
return remoteTemplateList.Slice, remoteWorkFlowList.Slice, err
} }
func getRemoteContent(URL string, remoteTemplateDomainList []string, remoteContentChannel chan<- RemoteContent, contentType ContentType) { func getRemoteContent(URL string, remoteTemplateDomainList []string, contentType ContentType) RemoteContent {
if err := validateRemoteTemplateURL(URL, remoteTemplateDomainList); err != nil { if err := validateRemoteTemplateURL(URL, remoteTemplateDomainList); err != nil {
remoteContentChannel <- RemoteContent{ return RemoteContent{Error: err}
Error: err,
}
return
} }
if strings.HasPrefix(URL, "http") && stringsutil.HasSuffixAny(URL, extensions.YAML) { if strings.HasPrefix(URL, "http") && stringsutil.HasSuffixAny(URL, extensions.YAML) {
remoteContentChannel <- RemoteContent{ return RemoteContent{
Content: []string{URL}, Content: []string{URL},
Type: contentType, Type: contentType,
} }
return
} }
response, err := retryablehttp.DefaultClient().Get(URL) response, err := retryablehttp.DefaultClient().Get(URL)
if err != nil { if err != nil {
remoteContentChannel <- RemoteContent{ return RemoteContent{Error: err}
Error: err,
}
return
} }
defer response.Body.Close() defer func() {
_ = response.Body.Close()
}()
if response.StatusCode < 200 || response.StatusCode > 299 { if response.StatusCode < 200 || response.StatusCode > 299 {
remoteContentChannel <- RemoteContent{ return RemoteContent{Error: fmt.Errorf("get \"%s\": unexpect status %d", URL, response.StatusCode)}
Error: fmt.Errorf("get \"%s\": unexpect status %d", URL, response.StatusCode),
}
return
} }
scanner := bufio.NewScanner(response.Body) scanner := bufio.NewScanner(response.Body)
@ -97,23 +109,17 @@ func getRemoteContent(URL string, remoteTemplateDomainList []string, remoteConte
} }
if utils.IsURL(text) { if utils.IsURL(text) {
if err := validateRemoteTemplateURL(text, remoteTemplateDomainList); err != nil { if err := validateRemoteTemplateURL(text, remoteTemplateDomainList); err != nil {
remoteContentChannel <- RemoteContent{ return RemoteContent{Error: err}
Error: err,
}
return
} }
} }
templateList = append(templateList, text) templateList = append(templateList, text)
} }
if err := scanner.Err(); err != nil { if err := scanner.Err(); err != nil {
remoteContentChannel <- RemoteContent{ return RemoteContent{Error: errors.Wrap(err, "get \"%s\"")}
Error: errors.Wrap(err, "get \"%s\""),
}
return
} }
remoteContentChannel <- RemoteContent{ return RemoteContent{
Content: templateList, Content: templateList,
Type: contentType, Type: contentType,
} }

View File

@ -1,6 +1,7 @@
package core package core
import ( import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/output" "github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols" "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/types"
@ -17,14 +18,16 @@ import (
type Engine struct { type Engine struct {
workPool *WorkPool workPool *WorkPool
options *types.Options options *types.Options
executerOpts protocols.ExecutorOptions executerOpts *protocols.ExecutorOptions
Callback func(*output.ResultEvent) // Executed on results Callback func(*output.ResultEvent) // Executed on results
Logger *gologger.Logger
} }
// New returns a new Engine instance // New returns a new Engine instance
func New(options *types.Options) *Engine { func New(options *types.Options) *Engine {
engine := &Engine{ engine := &Engine{
options: options, options: options,
Logger: options.Logger,
} }
engine.workPool = engine.GetWorkPool() engine.workPool = engine.GetWorkPool()
return engine return engine
@ -47,12 +50,12 @@ func (e *Engine) GetWorkPool() *WorkPool {
// SetExecuterOptions sets the executer options for the engine. This is required // SetExecuterOptions sets the executer options for the engine. This is required
// before using the engine to perform any execution. // before using the engine to perform any execution.
func (e *Engine) SetExecuterOptions(options protocols.ExecutorOptions) { func (e *Engine) SetExecuterOptions(options *protocols.ExecutorOptions) {
e.executerOpts = options e.executerOpts = options
} }
// ExecuterOptions returns protocols.ExecutorOptions for nuclei engine. // ExecuterOptions returns protocols.ExecutorOptions for nuclei engine.
func (e *Engine) ExecuterOptions() protocols.ExecutorOptions { func (e *Engine) ExecuterOptions() *protocols.ExecutorOptions {
return e.executerOpts return e.executerOpts
} }

View File

@ -5,7 +5,6 @@ import (
"sync" "sync"
"sync/atomic" "sync/atomic"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider" "github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/output" "github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
@ -50,7 +49,7 @@ func (e *Engine) ExecuteScanWithOpts(ctx context.Context, templatesList []*templ
totalReqAfterClustering := getRequestCount(finalTemplates) * int(target.Count()) totalReqAfterClustering := getRequestCount(finalTemplates) * int(target.Count())
if !noCluster && totalReqAfterClustering < totalReqBeforeCluster { if !noCluster && totalReqAfterClustering < totalReqBeforeCluster {
gologger.Info().Msgf("Templates clustered: %d (Reduced %d Requests)", clusterCount, totalReqBeforeCluster-totalReqAfterClustering) e.Logger.Info().Msgf("Templates clustered: %d (Reduced %d Requests)", clusterCount, totalReqBeforeCluster-totalReqAfterClustering)
} }
// 0 matches means no templates were found in the directory // 0 matches means no templates were found in the directory
@ -110,6 +109,8 @@ func (e *Engine) executeTemplateSpray(ctx context.Context, templatesList []*temp
defer wp.Wait() defer wp.Wait()
for _, template := range templatesList { for _, template := range templatesList {
template := template
select { select {
case <-ctx.Done(): case <-ctx.Done():
return results return results

View File

@ -4,9 +4,10 @@ import (
"context" "context"
"sync" "sync"
"sync/atomic" "sync/atomic"
"time"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider" "github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/scan" "github.com/projectdiscovery/nuclei/v3/pkg/scan"
"github.com/projectdiscovery/nuclei/v3/pkg/templates" "github.com/projectdiscovery/nuclei/v3/pkg/templates"
@ -38,7 +39,7 @@ func (e *Engine) executeAllSelfContained(ctx context.Context, alltemplates []*te
match, err = template.Executer.Execute(ctx) match, err = template.Executer.Execute(ctx)
} }
if err != nil { if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err) e.options.Logger.Warning().Msgf("[%s] Could not execute step (self-contained): %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
} }
results.CompareAndSwap(false, match) results.CompareAndSwap(false, match)
}(v) }(v)
@ -47,8 +48,15 @@ func (e *Engine) executeAllSelfContained(ctx context.Context, alltemplates []*te
// executeTemplateWithTargets executes a given template on x targets (with a internal targetpool(i.e concurrency)) // executeTemplateWithTargets executes a given template on x targets (with a internal targetpool(i.e concurrency))
func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templates.Template, target provider.InputProvider, results *atomic.Bool) { func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templates.Template, target provider.InputProvider, results *atomic.Bool) {
// this is target pool i.e max target to execute if e.workPool == nil {
wg := e.workPool.InputPool(template.Type()) e.workPool = e.GetWorkPool()
}
// Bounded worker pool using input concurrency
pool := e.workPool.InputPool(template.Type())
workerCount := 1
if pool != nil && pool.Size > 0 {
workerCount = pool.Size
}
var ( var (
index uint32 index uint32
@ -77,6 +85,41 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
currentInfo.Unlock() currentInfo.Unlock()
} }
// task represents a single target execution unit
type task struct {
index uint32
skip bool
value *contextargs.MetaInput
}
tasks := make(chan task)
var workersWg sync.WaitGroup
workersWg.Add(workerCount)
for i := 0; i < workerCount; i++ {
go func() {
defer workersWg.Done()
for t := range tasks {
func() {
defer cleanupInFlight(t.index)
select {
case <-ctx.Done():
return
default:
}
if t.skip {
return
}
match, err := e.executeTemplateOnInput(ctx, template, t.value)
if err != nil {
e.options.Logger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), t.value.Input, err)
}
results.CompareAndSwap(false, match)
}()
}
}()
}
target.Iterate(func(scannedValue *contextargs.MetaInput) bool { target.Iterate(func(scannedValue *contextargs.MetaInput) bool {
select { select {
case <-ctx.Done(): case <-ctx.Done():
@ -88,13 +131,13 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
// skips indexes lower than the minimum in-flight at interruption time // skips indexes lower than the minimum in-flight at interruption time
var skip bool var skip bool
if resumeFromInfo.Completed { // the template was completed if resumeFromInfo.Completed { // the template was completed
gologger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Template already completed\n", template.ID, scannedValue.Input) e.options.Logger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Template already completed", template.ID, scannedValue.Input)
skip = true skip = true
} else if index < resumeFromInfo.SkipUnder { // index lower than the sliding window (bulk-size) } else if index < resumeFromInfo.SkipUnder { // index lower than the sliding window (bulk-size)
gologger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Target already processed\n", template.ID, scannedValue.Input) e.options.Logger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Target already processed", template.ID, scannedValue.Input)
skip = true skip = true
} else if _, isInFlight := resumeFromInfo.InFlight[index]; isInFlight { // the target wasn't completed successfully } else if _, isInFlight := resumeFromInfo.InFlight[index]; isInFlight { // the target wasn't completed successfully
gologger.Debug().Msgf("[%s] Repeating \"%s\": Resume - Target wasn't completed\n", template.ID, scannedValue.Input) e.options.Logger.Debug().Msgf("[%s] Repeating \"%s\": Resume - Target wasn't completed", template.ID, scannedValue.Input)
// skip is already false, but leaving it here for clarity // skip is already false, but leaving it here for clarity
skip = false skip = false
} else if index > resumeFromInfo.DoAbove { // index above the sliding window (bulk-size) } else if index > resumeFromInfo.DoAbove { // index above the sliding window (bulk-size)
@ -108,46 +151,32 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
// Skip if the host has had errors // Skip if the host has had errors
if e.executerOpts.HostErrorsCache != nil && e.executerOpts.HostErrorsCache.Check(e.executerOpts.ProtocolType.String(), contextargs.NewWithMetaInput(ctx, scannedValue)) { if e.executerOpts.HostErrorsCache != nil && e.executerOpts.HostErrorsCache.Check(e.executerOpts.ProtocolType.String(), contextargs.NewWithMetaInput(ctx, scannedValue)) {
skipEvent := &output.ResultEvent{
TemplateID: template.ID,
TemplatePath: template.Path,
Info: template.Info,
Type: e.executerOpts.ProtocolType.String(),
Host: scannedValue.Input,
MatcherStatus: false,
Error: "host was skipped as it was found unresponsive",
Timestamp: time.Now(),
}
if e.Callback != nil {
e.Callback(skipEvent)
} else if e.executerOpts.Output != nil {
_ = e.executerOpts.Output.Write(skipEvent)
}
return true return true
} }
wg.Add() tasks <- task{index: index, skip: skip, value: scannedValue}
go func(index uint32, skip bool, value *contextargs.MetaInput) {
defer wg.Done()
defer cleanupInFlight(index)
if skip {
return
}
var match bool
var err error
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
ctx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
match = e.executeWorkflow(ctx, template.CompiledWorkflow)
default:
if e.Callback != nil {
if results, err := template.Executer.ExecuteWithResults(ctx); err == nil {
for _, result := range results {
e.Callback(result)
}
}
match = true
} else {
match, err = template.Executer.Execute(ctx)
}
}
if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
}
results.CompareAndSwap(false, match)
}(index, skip, scannedValue)
index++ index++
return true return true
}) })
wg.Wait()
close(tasks)
workersWg.Wait()
// on completion marks the template as completed // on completion marks the template as completed
currentInfo.Lock() currentInfo.Lock()
@ -185,30 +214,35 @@ func (e *Engine) executeTemplatesOnTarget(ctx context.Context, alltemplates []*t
go func(template *templates.Template, value *contextargs.MetaInput, wg *syncutil.AdaptiveWaitGroup) { go func(template *templates.Template, value *contextargs.MetaInput, wg *syncutil.AdaptiveWaitGroup) {
defer wg.Done() defer wg.Done()
var match bool match, err := e.executeTemplateOnInput(ctx, template, value)
var err error
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
ctx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
match = e.executeWorkflow(ctx, template.CompiledWorkflow)
default:
if e.Callback != nil {
if results, err := template.Executer.ExecuteWithResults(ctx); err == nil {
for _, result := range results {
e.Callback(result)
}
}
match = true
} else {
match, err = template.Executer.Execute(ctx)
}
}
if err != nil { if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err) e.options.Logger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), value.Input, err)
} }
results.CompareAndSwap(false, match) results.CompareAndSwap(false, match)
}(tpl, target, sg) }(tpl, target, sg)
} }
} }
// executeTemplateOnInput performs template execution for a single input and returns match status and error
func (e *Engine) executeTemplateOnInput(ctx context.Context, template *templates.Template, value *contextargs.MetaInput) (bool, error) {
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
scanCtx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
return e.executeWorkflow(scanCtx, template.CompiledWorkflow), nil
default:
if e.Callback != nil {
results, err := template.Executer.ExecuteWithResults(scanCtx)
if err != nil {
return false, err
}
for _, result := range results {
e.Callback(result)
}
return len(results) > 0, nil
}
return template.Executer.Execute(scanCtx)
}
}

Some files were not shown because too many files have changed in this diff Show More