Merge branch 'dev' into RDP-Enc-func

This commit is contained in:
Mzack9999 2025-09-25 22:07:17 +02:00
commit 61bd0828dc
373 changed files with 10772 additions and 3223 deletions

View File

@ -0,0 +1,35 @@
{
"permissions": {
"allow": [
"Bash(find:*)",
"Bash(mkdir:*)",
"Bash(cp:*)",
"Bash(ls:*)",
"Bash(make:*)",
"Bash(go:*)",
"Bash(golangci-lint:*)",
"Bash(git merge:*)",
"Bash(git add:*)",
"Bash(git commit:*)",
"Bash(git push:*)",
"Bash(git pull:*)",
"Bash(git fetch:*)",
"Bash(git checkout:*)",
"WebFetch(*)",
"Write(*)",
"WebSearch(*)",
"MultiEdit(*)",
"Edit(*)",
"Bash(gh:*)",
"Bash(grep:*)",
"Bash(tree:*)",
"Bash(./nuclei:*)",
"WebFetch(domain:github.com)"
],
"deny": [
"Bash(make run:*)",
"Bash(./bin/nuclei:*)"
],
"defaultMode": "acceptEdits"
}
}

76
.github/DISCUSSION_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,76 @@
# Nuclei Discussion Guidelines
## Before Creating a Discussion
1. **Search existing discussions and issues** to avoid duplicates
2. **Check the documentation** and README first
3. **Browse the FAQ** and common questions
## Bug Reports in Discussions
When reporting a bug in [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a), please include:
### Required Information:
- **Clear title** with `[BUG]` prefix (e.g., "[BUG] Nuclei crashes when...")
- **Current behavior** - What's happening now?
- **Expected behavior** - What should happen instead?
- **Steps to reproduce** - Commands or actions that trigger the issue
- **Environment details**:
- OS and version
- Nuclei version (`nuclei -version`)
- Go version (if installed via `go install`)
- **Log output** - Run with `-verbose` or `-debug` for detailed logs
- **Redact sensitive information** - Remove target URLs, credentials, etc.
### After Discussion:
- Maintainers will review and validate the bug report
- Valid bugs will be converted to issues with proper labels and tracking
- Questions and misconfigurations will be resolved in the discussion
## Feature Requests in Discussions
When requesting a feature in [Ideas Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/ideas), please include:
### Required Information:
- **Clear title** with `[FEATURE]` prefix (e.g., "[FEATURE] Add support for...")
- **Feature description** - What do you want to be added?
- **Use case** - Why is this feature needed? What problem does it solve?
- **Implementation ideas** - If you have suggestions on how it could work
- **Alternatives considered** - What other solutions have you thought about?
### After Discussion:
- Community and maintainers will discuss the feasibility
- Popular and viable features will be converted to issues
- Similar features may be grouped together
- Rejected features will be explained in the discussion
## Getting Help
For general questions, troubleshooting, and "how-to" topics:
- Use [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a)
- Join the [Discord server](https://discord.gg/projectdiscovery) #nuclei channel
- Check existing discussions for similar questions
## Discussion to Issue Conversion Process
Only maintainers can convert discussions to issues. The process:
1. **Validation** - Maintainers review the discussion for completeness and validity
2. **Classification** - Determine if it's a bug, feature, enhancement, etc.
3. **Issue creation** - Create a properly formatted issue with appropriate labels
4. **Linking** - Link the issue back to the original discussion
5. **Resolution** - Mark the discussion as resolved or close it
This process ensures:
- High-quality issues that are actionable
- Proper triage and labeling
- Reduced noise in the issue tracker
- Community involvement in the validation process
## Why This Process?
- **Better organization** - Issues contain only validated, actionable items
- **Community input** - Discussions allow for community feedback before escalation
- **Quality control** - Maintainers ensure proper formatting and information
- **Reduced maintenance** - Fewer invalid or duplicate issues to manage
- **Clear separation** - Questions vs. actual bugs/features are clearly distinguished

View File

@ -2,14 +2,22 @@ blank_issues_enabled: false
contact_links:
- name: Ask an question / advise on using nuclei
url: https://github.com/projectdiscovery/nuclei/discussions/categories/q-a
about: Ask a question or request support for using nuclei
- name: 🐛 Report a Bug (Start with Discussion)
url: https://github.com/orgs/projectdiscovery/discussions/new?category=q-a
about: Start by reporting your issue in discussions for proper triage. Issues will be created after review to avoid duplicate/invalid reports.
- name: Share idea / feature to discuss for nuclei
url: https://github.com/projectdiscovery/nuclei/discussions/categories/ideas
about: Share idea / feature to discuss for nuclei
- name: 💡 Request a Feature (Start with Discussion)
url: https://github.com/orgs/projectdiscovery/discussions/new?category=ideas
about: Share your feature idea in discussions first. This helps validate and refine the request before creating an issue.
- name: Connect with PD Team (Discord)
- name: ❓ Ask Questions / Get Help
url: https://github.com/orgs/projectdiscovery/discussions
about: Get help and ask questions about using Nuclei. Many questions don't require issues.
- name: 🔍 Browse Existing Issues
url: https://github.com/projectdiscovery/nuclei/issues
about: Check existing issues to see if your problem has already been reported or is being worked on.
- name: 💬 Connect with PD Team (Discord)
url: https://discord.gg/projectdiscovery
about: Connect with PD Team for direct communication
about: Join our Discord for real-time discussions and community support on the #nuclei channel.

View File

@ -0,0 +1,45 @@
# Issue Template References
## Overview
This folder contains the preserved issue templates that are **not** directly accessible to users. These templates serve as references for maintainers when converting discussions to issues.
## New Workflow
### For Users:
1. **All reports start in Discussions** - Users cannot create issues directly
2. Bug reports go to [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a)
3. Feature requests go to [Ideas Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/ideas)
4. This helps filter out duplicate questions, invalid reports, and ensures proper triage
### For Maintainers:
1. **Review discussions** in both Q&A and Ideas categories
2. **Validate the reports** - ensure they're actual bugs/valid feature requests
3. **Use reference templates** when converting discussions to issues:
- Copy content from `bug-report-reference.yml` or `feature-request-reference.yml`
- Create a new issue manually with the appropriate template structure
- Link back to the original discussion
- Close the discussion or mark it as resolved
## Benefits
- **Better triage**: Avoid cluttering issues with questions and invalid reports
- **Community involvement**: Discussions allow for community input before creating issues
- **Quality control**: Maintainers can ensure issues follow proper format and contain necessary information
- **Reduced noise**: Only validated, actionable items become issues
## Reference Templates
- `bug-report-reference.yml` - Use when converting bug reports from discussions to issues
- `feature-request-reference.yml` - Use when converting feature requests from discussions to issues
## Converting a Discussion to Issue
1. Identify a valid discussion that needs to become an issue
2. Go to the main repository's Issues tab
3. Click "New Issue"
4. Manually create the issue using the reference template structure
5. Include all relevant information from the discussion
6. Add a comment linking back to the original discussion
7. Apply appropriate labels
8. Close or mark the discussion as resolved with a link to the created issue

View File

@ -2,6 +2,7 @@ addReviewers: true
reviewers:
- dogancanbakir
- dwisiswant0
- mzack9999
numberOfReviewers: 1
skipKeywords:

27
.github/stale.yml vendored
View File

@ -1,27 +0,0 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 7
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
# exemptLabels:
# - pinned
# - security
# Only issues or pull requests with all of these labels are check if stale.
onlyLabels:
- "Status: Abandoned"
- "Type: Question"
# Label to use when marking as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View File

@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
if: github.actor == 'dependabot[bot]'
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
token: ${{ secrets.DEPENDABOT_PAT }}

View File

@ -13,7 +13,7 @@ jobs:
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go/compat-checks@v1
with:
release-test: true

View File

@ -11,7 +11,7 @@ jobs:
if: "${{ !endsWith(github.actor, '[bot]') }}"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/git@v1
- run: make syntax-docs

View File

@ -28,7 +28,7 @@ jobs:
LIST_FILE: "/tmp/targets-${{ matrix.targets }}.txt"
PROFILE_MEM: "/tmp/nuclei-profile-${{ matrix.targets }}-targets"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/git@v1
- uses: projectdiscovery/actions/setup/go@v1
- name: Generate list

View File

@ -16,7 +16,7 @@ jobs:
env:
OUTPUT: "/tmp/results.sarif"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: go install golang.org/x/vuln/cmd/govulncheck@latest
- run: govulncheck -scan package -format sarif ./... > $OUTPUT

View File

@ -11,7 +11,7 @@ jobs:
env:
BENCH_OUT: "/tmp/bench.out"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: make build-test
- run: ./bin/nuclei.test -test.run - -test.bench=. -test.benchmem ./cmd/nuclei/ | tee $BENCH_OUT

View File

@ -16,7 +16,7 @@ jobs:
LIST_FILE: "/tmp/targets-${{ matrix.count }}.txt"
PROFILE_MEM: "/tmp/nuclei-perf-test-${{ matrix.count }}"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: make verify
- name: Generate list

View File

@ -10,7 +10,7 @@ jobs:
release:
runs-on: ubuntu-latest-16-cores
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
fetch-depth: 0
- uses: projectdiscovery/actions/setup/go@v1

41
.github/workflows/stale.yaml vendored Normal file
View File

@ -0,0 +1,41 @@
name: 💤 Stale
on:
schedule:
- cron: '0 0 * * 0' # Weekly
jobs:
stale:
runs-on: ubuntu-latest
permissions:
actions: write
contents: write # only for delete-branch option
issues: write
pull-requests: write
steps:
- uses: actions/stale@v10
with:
days-before-stale: 90
days-before-close: 7
stale-issue-label: "Status: Stale"
stale-pr-label: "Status: Stale"
stale-issue-message: >
This issue has been automatically marked as stale because it has not
had recent activity. It will be closed in 7 days if no further
activity occurs. Thank you for your contributions!
stale-pr-message: >
This pull request has been automatically marked as stale due to
inactivity. It will be closed in 7 days if no further activity
occurs. Please update if you wish to keep it open.
close-issue-message: >
This issue has been automatically closed due to inactivity. If you
think this is a mistake or would like to continue the discussion,
please comment or feel free to reopen it.
close-pr-message: >
This pull request has been automatically closed due to inactivity.
If you think this is a mistake or would like to continue working on
it, please comment or feel free to reopen it.
close-issue-label: "Status: Abandoned"
close-pr-label: "Status: Abandoned"
exempt-issue-labels: "Status: Abandoned"
exempt-pr-labels: "Status: Abandoned"

View File

@ -22,9 +22,9 @@ jobs:
if: "${{ !endsWith(github.actor, '[bot]') }}"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/golangci-lint@v1
- uses: projectdiscovery/actions/golangci-lint/v2@v1
tests:
name: "Tests"
@ -35,7 +35,7 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: "${{ matrix.os }}"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: make vet
- run: make build
@ -52,16 +52,18 @@ jobs:
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- name: "Simple"
run: go run .
working-directory: examples/simple/
# - run: go run . # Temporarily disabled very flaky in github actions
# working-directory: examples/advanced/
- name: "with Speed Control"
run: go run .
working-directory: examples/with_speed_control/
# TODO: FIX with ExecutionID (ref: https://github.com/projectdiscovery/nuclei/pull/6296)
# - name: "with Speed Control"
# run: go run .
# working-directory: examples/with_speed_control/
integration:
name: "Integration tests"
@ -72,7 +74,7 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/python@v1
- run: bash run.sh "${{ matrix.os }}"
@ -91,10 +93,10 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/python@v1
- run: bash run.sh "${{ matrix.os }}"
- run: bash run.sh
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
working-directory: cmd/functional-test/
@ -104,7 +106,7 @@ jobs:
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: make template-validate
@ -117,7 +119,7 @@ jobs:
contents: read
security-events: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: github/codeql-action/init@v3
with:
languages: 'go'
@ -129,7 +131,7 @@ jobs:
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/goreleaser@v1
@ -141,7 +143,7 @@ jobs:
TARGET_URL: "http://scanme.sh/a/?b=c"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- run: make build
- name: "Setup environment (push)"
if: ${{ github.event_name == 'push' }}

2
.gitignore vendored
View File

@ -28,6 +28,8 @@
/scrapefunc
/scrapefuncs
/tsgen
/integration_tests/integration-test
/integration_tests/nuclei
# Templates
/*.yaml

View File

@ -38,9 +38,9 @@ builds:
# goarch: [amd64]
archives:
- format: zip
- formats: [zip]
id: nuclei
builds: [nuclei-cli]
ids: [nuclei-cli]
name_template: '{{ .ProjectName }}_{{ .Version }}_{{ if eq .Os "darwin" }}macOS{{ else }}{{ .Os }}{{ end }}_{{ .Arch }}'
checksum:

83
CLAUDE.md Normal file
View File

@ -0,0 +1,83 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Nuclei is a modern, high-performance vulnerability scanner built in Go that leverages YAML-based templates for customizable vulnerability detection. It supports multiple protocols (HTTP, DNS, TCP, SSL, WebSocket, WHOIS, JavaScript, Code) and is designed for zero false positives through real-world condition simulation.
## Development Commands
### Building and Testing
- `make build` - Build the main nuclei binary to ./bin/nuclei
- `make test` - Run unit tests with race detection
- `make integration` - Run integration tests (builds and runs test suite)
- `make functional` - Run functional tests
- `make vet` - Run go vet for code analysis
- `make tidy` - Clean up go modules
### Validation and Linting
- `make template-validate` - Validate nuclei templates using the built binary
- `go fmt ./...` - Format Go code
- `go vet ./...` - Static analysis
### Development Tools
- `make devtools-all` - Build all development tools (bindgen, tsgen, scrapefuncs)
- `make jsupdate-all` - Update JavaScript bindings and TypeScript definitions
- `make docs` - Generate documentation
- `make memogen` - Generate memoization code for JavaScript libraries
### Testing Specific Components
- Run single test: `go test -v ./pkg/path/to/package -run TestName`
- Integration tests are in `integration_tests/` and can be run via `make integration`
## Architecture Overview
### Core Components
- **cmd/nuclei** - Main CLI entry point with flag parsing and configuration
- **internal/runner** - Core runner that orchestrates the entire scanning process
- **pkg/core** - Execution engine with work pools and template clustering
- **pkg/templates** - Template parsing, compilation, and management
- **pkg/protocols** - Protocol implementations (HTTP, DNS, Network, etc.)
- **pkg/operators** - Matching and extraction logic (matchers/extractors)
- **pkg/catalog** - Template discovery and loading from disk/remote sources
### Protocol Architecture
Each protocol (HTTP, DNS, Network, etc.) implements:
- Request interface with Compile(), ExecuteWithResults(), Match(), Extract() methods
- Operators embedding for matching/extraction functionality
- Protocol-specific request building and execution logic
### Template System
- Templates are YAML files defining vulnerability detection logic
- Compiled into executable requests with operators (matchers/extractors)
- Support for workflows (multi-step template execution)
- Template clustering optimizes identical requests across multiple templates
### Key Execution Flow
1. Template loading and compilation via pkg/catalog/loader
2. Input provider setup for targets
3. Engine creation with work pools for concurrency
4. Template execution with result collection via operators
5. Output writing and reporting integration
### JavaScript Integration
- Custom JavaScript runtime for code protocol templates
- Auto-generated bindings in pkg/js/generated/
- Library implementations in pkg/js/libs/
- Development tools for binding generation in pkg/js/devtools/
## Template Development
- Templates located in separate nuclei-templates repository
- YAML format with info, requests, and operators sections
- Support for multiple protocol types in single template
- Built-in DSL functions for dynamic content generation
- Template validation available via `make template-validate`
## Key Directories
- **lib/** - SDK for embedding nuclei as a library
- **examples/** - Usage examples for different scenarios
- **integration_tests/** - Integration test suite with protocol-specific tests
- **pkg/fuzz/** - Fuzzing engine and DAST capabilities
- **pkg/input/** - Input processing for various formats (Burp, OpenAPI, etc.)
- **pkg/reporting/** - Result export and issue tracking integrations

View File

@ -1,5 +1,5 @@
# Build
FROM golang:1.22-alpine AS builder
FROM golang:1.24-alpine AS builder
RUN apk add build-base
WORKDIR /app

View File

@ -15,8 +15,8 @@ ifneq ($(shell go env GOOS),darwin)
endif
.PHONY: all build build-stats clean devtools-all devtools-bindgen devtools-scrapefuncs
.PHONY: devtools-tsgen docs docgen dsl-docs functional fuzzplayground go-build syntax-docs
.PHONY: integration jsupdate-all jsupdate-bindgen jsupdate-tsgen memogen scan-charts test
.PHONY: devtools-tsgen docs docgen dsl-docs functional fuzzplayground go-build lint lint-strict syntax-docs
.PHONY: integration jsupdate-all jsupdate-bindgen jsupdate-tsgen memogen scan-charts test test-with-lint
.PHONY: tidy ts verify download vet template-validate
all: build
@ -146,5 +146,18 @@ dsl-docs:
template-validate: build
template-validate:
./bin/nuclei -ut
./bin/nuclei -validate -et http/technologies
./bin/nuclei -validate -w workflows -et http/technologies
./bin/nuclei -validate \
-et .github/ \
-et helpers/payloads/ \
-et http/technologies \
-t dns \
-t ssl \
-t network \
-t http/exposures \
-ept code
./bin/nuclei -validate \
-w workflows \
-et .github/ \
-et helpers/payloads/ \
-et http/technologies \
-ept code

View File

@ -7,7 +7,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">`Korean`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">`Indonesia`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">`Spanish`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_JP.md">`日本語`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_JP.md">`日本語`</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">`Portuguese`</a>
</div>
@ -111,7 +111,7 @@ Browse the full Nuclei [**`documentation here`**](https://docs.projectdiscovery.
### Installation
`nuclei` requires **go1.22** to install successfully. Run the following command to get the repo:
`nuclei` requires **go1.23** to install successfully. Run the following command to get the repo:
```sh
go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest
@ -356,6 +356,7 @@ CLOUD:
AUTHENTICATION:
-sf, -secret-file string[] path to config file containing secrets for nuclei authenticated scan
-ps, -prefetch-secrets prefetch secrets from the secrets file
# NOTE: Headers in secrets files preserve exact casing (useful for case-sensitive APIs)
EXAMPLES:

View File

@ -33,7 +33,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a>
</p>

View File

@ -31,7 +31,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a>
</p>

View File

@ -33,7 +33,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a>
</p>

View File

@ -30,7 +30,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中国語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">韓国語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">インドネシア語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">スペイン語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">スペイン語</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">ポルトガル語</a>
</p>

View File

@ -31,7 +31,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README.md">English</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">한국어</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">스페인어</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">스페인어</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">포르투갈어</a>
</p>
@ -341,7 +341,7 @@ Nuclei를 사용하면 자체 검사 모음으로 테스트 접근 방식을 사
- 몇 분 안에 수천 개의 호스트를 처리할 수 있음.
- 간단한 YAML DSL로 사용자 지정 테스트 접근 방식을 쉽게 자동화할 수 있음.
버그 바운티 워크플로에 맞는 다른 오픈 소스 프로젝트를 확인할 수 있습니다.: [github.com/projectdiscovery](http://github.com/projectdiscovery), 또한, 우리는 매일 [Chaos에서 DNS 데이터를 갱신해 호스팅합니다.](http://chaos.projectdiscovery.io).
버그 바운티 워크플로에 맞는 다른 오픈 소스 프로젝트를 확인할 수 있습니다.: [github.com/projectdiscovery](http://github.com/projectdiscovery), 또한, 우리는 매일 [Chaos에서 DNS 데이터를 갱신해 호스팅합니다](http://chaos.projectdiscovery.io).
</td>
</tr>

View File

@ -31,7 +31,7 @@
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_CN.md">中文</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_KR.md">Korean</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ID.md">Indonesia</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_ES.md">Spanish</a>
<a href="https://github.com/projectdiscovery/nuclei/blob/main/README_PT-BR.md">Portuguese</a>
</p>

View File

@ -19,7 +19,9 @@ func writeToFile(filename string, data []byte) {
if err != nil {
log.Fatalf("Could not create file %s: %s\n", filename, err)
}
defer file.Close()
defer func() {
_ = file.Close()
}()
_, err = file.Write(data)
if err != nil {

View File

@ -27,7 +27,7 @@ var (
func main() {
flag.Parse()
debug := os.Getenv("DEBUG") == "true"
debug := os.Getenv("DEBUG") == "true" || os.Getenv("RUNNER_DEBUG") == "1"
if err, errored := runFunctionalTests(debug); err != nil {
log.Fatalf("Could not run functional tests: %s\n", err)
@ -41,7 +41,9 @@ func runFunctionalTests(debug bool) (error, bool) {
if err != nil {
return errors.Wrap(err, "could not open test cases"), true
}
defer file.Close()
defer func() {
_ = file.Close()
}()
errored, failedTestCases := runTestCases(file, debug)

View File

@ -1,27 +1,43 @@
#!/bin/bash
# reading os type from arguments
CURRENT_OS=$1
if [ "${RUNNER_OS}" == "Windows" ]; then
EXT=".exe"
elif [ "${RUNNER_OS}" == "macOS" ]; then
if [ "${CI}" == "true" ]; then
sudo sysctl -w kern.maxfiles{,perproc}=524288
sudo launchctl limit maxfiles 65536 524288
fi
if [ "${CURRENT_OS}" == "windows-latest" ];then
extension=.exe
ORIGINAL_ULIMIT="$(ulimit -n)"
ulimit -n 65536 || true
fi
mkdir -p .nuclei-config/nuclei/
touch .nuclei-config/nuclei/.nuclei-ignore
echo "::group::Building functional-test binary"
go build -o functional-test$extension
go build -o "functional-test${EXT}"
echo "::endgroup::"
echo "::group::Building Nuclei binary from current branch"
go build -o nuclei_dev$extension ../nuclei
echo "::endgroup::"
echo "::group::Installing nuclei templates"
./nuclei_dev$extension -update-templates
go build -o "nuclei-dev${EXT}" ../nuclei
echo "::endgroup::"
echo "::group::Building latest release of nuclei"
go build -o nuclei$extension -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei
go build -o "nuclei${EXT}" -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei
echo "::endgroup::"
echo 'Starting Nuclei functional test'
./functional-test$extension -main ./nuclei$extension -dev ./nuclei_dev$extension -testcases testcases.txt
echo "::group::Installing nuclei templates"
eval "./nuclei-dev${EXT} -update-templates"
echo "::endgroup::"
echo "::group::Validating templates"
eval "./nuclei-dev${EXT} -validate"
echo "::endgroup::"
echo "Starting Nuclei functional test"
eval "./functional-test${EXT} -main ./nuclei${EXT} -dev ./nuclei-dev${EXT} -testcases testcases.txt"
if [ "${RUNNER_OS}" == "macOS" ]; then
ulimit -n "${ORIGINAL_ULIMIT}" || true
fi

View File

@ -23,7 +23,9 @@ func main() {
if err != nil {
log.Fatalf("Could not create file: %s\n", err)
}
defer file.Close()
defer func() {
_ = file.Close()
}()
err = filepath.WalkDir(templatesDirectory, func(path string, d fs.DirEntry, err error) error {
if err != nil || d.IsDir() {

View File

@ -18,7 +18,9 @@ func (h *customConfigDirTest) Execute(filePath string) error {
if err != nil {
return err
}
defer os.RemoveAll(customTempDirectory)
defer func() {
_ = os.RemoveAll(customTempDirectory)
}()
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, []string{"NUCLEI_CONFIG_DIR=" + customTempDirectory}, "-t", filePath, "-u", "8x8exch02.8x8.com")
if err != nil {
return err

View File

@ -21,7 +21,7 @@ type dslVersionWarning struct{}
func (d *dslVersionWarning) Execute(templatePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "DSL version parsing warning test")
_, _ = fmt.Fprintf(w, "DSL version parsing warning test")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -37,7 +37,7 @@ type dslShowVersionWarning struct{}
func (d *dslShowVersionWarning) Execute(templatePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "DSL version parsing warning test")
_, _ = fmt.Fprintf(w, "DSL version parsing warning test")
})
ts := httptest.NewServer(router)
defer ts.Close()

View File

@ -0,0 +1,104 @@
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/mongo"
"github.com/testcontainers/testcontainers-go"
mongocontainer "github.com/testcontainers/testcontainers-go/modules/mongodb"
osutil "github.com/projectdiscovery/utils/os"
mongoclient "go.mongodb.org/mongo-driver/mongo"
mongooptions "go.mongodb.org/mongo-driver/mongo/options"
)
const (
dbName = "test"
dbImage = "mongo:8"
)
var exportersTestCases = []TestCaseInfo{
{Path: "exporters/mongo", TestCase: &mongoExporter{}, DisableOn: func() bool {
return osutil.IsWindows() || osutil.IsOSX()
}},
}
type mongoExporter struct{}
func (m *mongoExporter) Execute(filepath string) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute)
defer cancel()
// Start a MongoDB container
mongodbContainer, err := mongocontainer.Run(ctx, dbImage)
defer func() {
if err := testcontainers.TerminateContainer(mongodbContainer); err != nil {
log.Printf("failed to terminate container: %s", err)
}
}()
if err != nil {
return fmt.Errorf("failed to start container: %w", err)
}
connString, err := mongodbContainer.ConnectionString(ctx)
if err != nil {
return fmt.Errorf("failed to get connection string for MongoDB container: %s", err)
}
connString = connString + dbName
// Create a MongoDB exporter and write a test result to the database
opts := mongo.Options{
ConnectionString: connString,
CollectionName: "test",
BatchSize: 1, // Ensure we write the result immediately
}
exporter, err := mongo.New(&opts)
if err != nil {
return fmt.Errorf("failed to create MongoDB exporter: %s", err)
}
defer func() {
if err := exporter.Close(); err != nil {
fmt.Printf("failed to close exporter: %s\n", err)
}
}()
res := &output.ResultEvent{
Request: "test request",
Response: "test response",
}
err = exporter.Export(res)
if err != nil {
return fmt.Errorf("failed to export result event to MongoDB: %s", err)
}
// Verify that the result was written to the database
clientOptions := mongooptions.Client().ApplyURI(connString)
client, err := mongoclient.Connect(ctx, clientOptions)
if err != nil {
return fmt.Errorf("error creating MongoDB client: %s", err)
}
defer func() {
if err := client.Disconnect(ctx); err != nil {
fmt.Printf("failed to disconnect from MongoDB: %s\n", err)
}
}()
collection := client.Database(dbName).Collection(opts.CollectionName)
var actualRes output.ResultEvent
err = collection.FindOne(ctx, map[string]interface{}{"request": res.Request}).Decode(&actualRes)
if err != nil {
return fmt.Errorf("failed to find document in MongoDB: %s", err)
}
if actualRes.Request != res.Request || actualRes.Response != res.Response {
return fmt.Errorf("exported result does not match expected result: got %v, want %v", actualRes, res)
}
return nil
}

View File

@ -49,7 +49,7 @@ func (t *iterateValuesFlow) Execute(filePath string) error {
}
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte(fmt.Sprint(testemails)))
_, _ = fmt.Fprint(w, testemails)
})
router.GET("/user/"+getBase64(testemails[0]), func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.WriteHeader(http.StatusOK)

View File

@ -55,7 +55,7 @@ func (h *httpFuzzQuery) Execute(filePath string) error {
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header().Set("Content-Type", "text/html")
value := r.URL.Query().Get("id")
fmt.Fprintf(w, "This is test matcher text: %v", value)
_, _ = fmt.Fprintf(w, "This is test matcher text: %v", value)
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -75,7 +75,7 @@ func (h *fuzzModeOverride) Execute(filePath string) error {
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header().Set("Content-Type", "text/html")
value := r.URL.Query().Get("id")
fmt.Fprintf(w, "This is test matcher text: %v", value)
_, _ = fmt.Fprintf(w, "This is test matcher text: %v", value)
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -120,7 +120,7 @@ func (h *fuzzTypeOverride) Execute(filePath string) error {
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header().Set("Content-Type", "text/html")
value := r.URL.Query().Get("id")
fmt.Fprintf(w, "This is test matcher text: %v", value)
_, _ = fmt.Fprintf(w, "This is test matcher text: %v", value)
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -164,7 +164,7 @@ func (h *HeadlessFuzzingQuery) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
resp := fmt.Sprintf("<html><body>%s</body></html>", r.URL.Query().Get("url"))
fmt.Fprint(w, resp)
_, _ = fmt.Fprint(w, resp)
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -190,7 +190,7 @@ func (h *fuzzMultipleMode) Execute(filePath string) error {
}
w.Header().Set("Content-Type", "text/html")
resp := fmt.Sprintf("<html><body><h1>This is multi-mode fuzzing test: %v <h1></body></html>", xClientId)
fmt.Fprint(w, resp)
_, _ = fmt.Fprint(w, resp)
})
ts := httptest.NewTLSServer(router)
defer ts.Close()

View File

@ -82,14 +82,15 @@ func (h *clientCertificate) Execute(filePath string) error {
return
}
fmt.Fprintf(w, "Hello, %s!\n", r.TLS.PeerCertificates[0].Subject)
_, _ = fmt.Fprintf(w, "Hello, %s!\n", r.TLS.PeerCertificates[0].Subject)
})
_ = os.WriteFile("server.crt", []byte(serverCRT), permissionutil.ConfigFilePermission)
_ = os.WriteFile("server.key", []byte(serverKey), permissionutil.ConfigFilePermission)
defer os.Remove("server.crt")
defer os.Remove("server.key")
defer func() {
_ = os.Remove("server.crt")
_ = os.Remove("server.key")
}()
serverCert, _ := tls.LoadX509KeyPair("server.crt", "server.key")
certPool := x509.NewCertPool()

View File

@ -178,7 +178,9 @@ func (h *headlessFileUpload) Execute(filePath string) error {
return
}
defer file.Close()
defer func() {
_ = file.Close()
}()
content, err := io.ReadAll(file)
if err != nil {
@ -235,7 +237,9 @@ func (h *headlessFileUploadNegative) Execute(filePath string) error {
return
}
defer file.Close()
defer func() {
_ = file.Close()
}()
content, err := io.ReadAll(file)
if err != nil {

View File

@ -19,7 +19,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
logutil "github.com/projectdiscovery/utils/log"
sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings"
@ -108,7 +108,7 @@ func (h *httpMatcherExtractorDynamicExtractor) Execute(filePath string) error {
<a href="/domains">Domains</a>
</body>
</html>`
fmt.Fprint(w, html)
_, _ = fmt.Fprint(w, html)
})
router.GET("/domains", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
html := `<!DOCTYPE html>
@ -121,7 +121,7 @@ func (h *httpMatcherExtractorDynamicExtractor) Execute(filePath string) error {
</body>
</html>
`
fmt.Fprint(w, html)
_, _ = fmt.Fprint(w, html)
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -143,7 +143,7 @@ func (h *httpInteractshRequest) Execute(filePath string) error {
value := r.Header.Get("url")
if value != "" {
if resp, _ := retryablehttp.DefaultClient().Get(value); resp != nil {
resp.Body.Close()
_ = resp.Body.Close()
}
}
})
@ -196,7 +196,7 @@ func (d *httpDefaultMatcherCondition) Execute(filePath string) error {
return err
}
if routerErr != nil {
return errorutil.NewWithErr(routerErr).Msgf("failed to send http request to interactsh server")
return errkit.Wrap(routerErr, "failed to send http request to interactsh server")
}
if err := expectResultsCount(results, 1); err != nil {
return err
@ -213,7 +213,7 @@ func (h *httpInteractshStopAtFirstMatchRequest) Execute(filePath string) error {
value := r.Header.Get("url")
if value != "" {
if resp, _ := retryablehttp.DefaultClient().Get(value); resp != nil {
resp.Body.Close()
_ = resp.Body.Close()
}
}
})
@ -235,7 +235,7 @@ func (h *httpGetHeaders) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text")
_, _ = fmt.Fprintf(w, "This is test headers matcher text")
}
})
ts := httptest.NewServer(router)
@ -256,7 +256,7 @@ func (h *httpGetQueryString) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
if strings.EqualFold(r.URL.Query().Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test querystring matcher text")
_, _ = fmt.Fprintf(w, "This is test querystring matcher text")
}
})
ts := httptest.NewServer(router)
@ -279,7 +279,7 @@ func (h *httpGetRedirects) Execute(filePath string) error {
http.Redirect(w, r, "/redirected", http.StatusFound)
})
router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test redirects matcher text")
_, _ = fmt.Fprintf(w, "This is test redirects matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -329,7 +329,7 @@ func (h *httpDisableRedirects) Execute(filePath string) error {
http.Redirect(w, r, "/redirected", http.StatusMovedPermanently)
})
router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test redirects matcher text")
_, _ = fmt.Fprintf(w, "This is test redirects matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -348,7 +348,7 @@ type httpGet struct{}
func (h *httpGet) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -367,7 +367,7 @@ type httpDSLVariable struct{}
func (h *httpDSLVariable) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -450,7 +450,7 @@ func (h *httpPostBody) Execute(filePath string) error {
return
}
if strings.EqualFold(r.Form.Get("username"), "test") && strings.EqualFold(r.Form.Get("password"), "nuclei") {
fmt.Fprintf(w, "This is test post-body matcher text")
_, _ = fmt.Fprintf(w, "This is test post-body matcher text")
}
})
ts := httptest.NewServer(router)
@ -485,7 +485,7 @@ func (h *httpPostJSONBody) Execute(filePath string) error {
return
}
if strings.EqualFold(obj.Username, "test") && strings.EqualFold(obj.Password, "nuclei") {
fmt.Fprintf(w, "This is test post-json-body matcher text")
_, _ = fmt.Fprintf(w, "This is test post-json-body matcher text")
}
})
ts := httptest.NewServer(router)
@ -525,7 +525,7 @@ func (h *httpPostMultipartBody) Execute(filePath string) error {
return
}
if strings.EqualFold(password[0], "nuclei") && strings.EqualFold(file[0].Filename, "username") {
fmt.Fprintf(w, "This is test post-multipart matcher text")
_, _ = fmt.Fprintf(w, "This is test post-multipart matcher text")
}
})
ts := httptest.NewServer(router)
@ -555,12 +555,12 @@ func (h *httpRawDynamicExtractor) Execute(filePath string) error {
return
}
if strings.EqualFold(r.Form.Get("testing"), "parameter") {
fmt.Fprintf(w, "Token: 'nuclei'")
_, _ = fmt.Fprintf(w, "Token: 'nuclei'")
}
})
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
if strings.EqualFold(r.URL.Query().Get("username"), "nuclei") {
fmt.Fprintf(w, "Test is test-dynamic-extractor-raw matcher text")
_, _ = fmt.Fprintf(w, "Test is test-dynamic-extractor-raw matcher text")
}
})
ts := httptest.NewServer(router)
@ -584,7 +584,7 @@ func (h *httpRawGetQuery) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
if strings.EqualFold(r.URL.Query().Get("test"), "nuclei") {
fmt.Fprintf(w, "Test is test raw-get-query-matcher text")
_, _ = fmt.Fprintf(w, "Test is test raw-get-query-matcher text")
}
})
ts := httptest.NewServer(router)
@ -604,7 +604,7 @@ type httpRawGet struct{}
func (h *httpRawGet) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "Test is test raw-get-matcher text")
_, _ = fmt.Fprintf(w, "Test is test raw-get-matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -628,12 +628,12 @@ func (h *httpRawWithParams) Execute(filePath string) error {
// we intentionally use params["test"] instead of params.Get("test") to test the case where
// there are multiple parameters with the same name
if !reflect.DeepEqual(params["key1"], []string{"value1"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value1"}, params["key1"])
errx = errkit.Append(errx, errkit.New("key1 not found in params", "expected", []string{"value1"}, "got", params["key1"]))
}
if !reflect.DeepEqual(params["key2"], []string{"value2"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value2"}, params["key2"])
errx = errkit.Append(errx, errkit.New("key2 not found in params", "expected", []string{"value2"}, "got", params["key2"]))
}
fmt.Fprintf(w, "Test is test raw-params-matcher text")
_, _ = fmt.Fprintf(w, "Test is test raw-params-matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -685,11 +685,11 @@ func (h *httpRawPayload) Execute(filePath string) error {
routerErr = err
return
}
if !(strings.EqualFold(r.Header.Get("another_header"), "bnVjbGVp") || strings.EqualFold(r.Header.Get("another_header"), "Z3Vlc3Q=")) {
if !strings.EqualFold(r.Header.Get("another_header"), "bnVjbGVp") && !strings.EqualFold(r.Header.Get("another_header"), "Z3Vlc3Q=") {
return
}
if strings.EqualFold(r.Form.Get("username"), "test") && (strings.EqualFold(r.Form.Get("password"), "nuclei") || strings.EqualFold(r.Form.Get("password"), "guest")) {
fmt.Fprintf(w, "Test is raw-payload matcher text")
_, _ = fmt.Fprintf(w, "Test is raw-payload matcher text")
}
})
ts := httptest.NewServer(router)
@ -719,7 +719,7 @@ func (h *httpRawPostBody) Execute(filePath string) error {
return
}
if strings.EqualFold(r.Form.Get("username"), "test") && strings.EqualFold(r.Form.Get("password"), "nuclei") {
fmt.Fprintf(w, "Test is test raw-post-body-matcher text")
_, _ = fmt.Fprintf(w, "Test is test raw-post-body-matcher text")
}
})
ts := httptest.NewServer(router)
@ -829,10 +829,7 @@ func (h *httpPaths) Execute(filepath string) error {
}
if len(expected) > len(actual) {
actualValuesIndex := len(actual) - 1
if actualValuesIndex < 0 {
actualValuesIndex = 0
}
actualValuesIndex := max(len(actual)-1, 0)
return fmt.Errorf("missing values : %v", expected[actualValuesIndex:])
} else if len(expected) < len(actual) {
return fmt.Errorf("unexpected values : %v", actual[len(expected)-1:])
@ -872,7 +869,7 @@ func (h *httpRawCookieReuse) Execute(filePath string) error {
}
if strings.EqualFold(cookie.Value, "test") {
fmt.Fprintf(w, "Test is test-cookie-reuse matcher text")
_, _ = fmt.Fprintf(w, "Test is test-cookie-reuse matcher text")
}
})
ts := httptest.NewServer(router)
@ -950,7 +947,9 @@ func (h *httpRequestSelfContained) Execute(filePath string) error {
go func() {
_ = server.ListenAndServe()
}()
defer server.Close()
defer func() {
_ = server.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil {
@ -972,10 +971,10 @@ func (h *httpRequestSelfContainedWithParams) Execute(filePath string) error {
// we intentionally use params["test"] instead of params.Get("test") to test the case where
// there are multiple parameters with the same name
if !reflect.DeepEqual(params["something"], []string{"here"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"here"}, params["something"])
errx = errkit.Append(errx, errkit.New("something not found in params", "expected", []string{"here"}, "got", params["something"]))
}
if !reflect.DeepEqual(params["key"], []string{"value"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value"}, params["key"])
errx = errkit.Append(errx, errkit.New("key not found in params", "expected", []string{"value"}, "got", params["key"]))
}
_, _ = w.Write([]byte("This is self-contained response"))
})
@ -986,7 +985,9 @@ func (h *httpRequestSelfContainedWithParams) Execute(filePath string) error {
go func() {
_ = server.ListenAndServe()
}()
defer server.Close()
defer func() {
_ = server.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil {
@ -1019,17 +1020,21 @@ func (h *httpRequestSelfContainedFileInput) Execute(filePath string) error {
go func() {
_ = server.ListenAndServe()
}()
defer server.Close()
defer func() {
_ = server.Close()
}()
// create temp file
FileLoc, err := os.CreateTemp("", "self-contained-payload-*.txt")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to create temp file")
return errkit.Wrap(err, "failed to create temp file")
}
if _, err := FileLoc.Write([]byte("one\ntwo\n")); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write payload to temp file")
return errkit.Wrap(err, "failed to write payload to temp file")
}
defer FileLoc.Close()
defer func() {
_ = FileLoc.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-V", "test="+FileLoc.Name(), "-esc")
if err != nil {
@ -1041,7 +1046,7 @@ func (h *httpRequestSelfContainedFileInput) Execute(filePath string) error {
}
if !sliceutil.ElementsMatch(gotReqToEndpoints, []string{"/one", "/two", "/one", "/two"}) {
return errorutil.NewWithTag(filePath, "expected requests to be sent to `/one` and `/two` endpoints but were sent to `%v`", gotReqToEndpoints)
return errkit.New("expected requests to be sent to `/one` and `/two` endpoints but were sent to `%v`", gotReqToEndpoints, "filePath", filePath)
}
return nil
}
@ -1052,7 +1057,7 @@ type httpGetCaseInsensitive struct{}
func (h *httpGetCaseInsensitive) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "THIS IS TEST MATCHER TEXT")
_, _ = fmt.Fprintf(w, "THIS IS TEST MATCHER TEXT")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -1071,7 +1076,7 @@ type httpGetCaseInsensitiveCluster struct{}
func (h *httpGetCaseInsensitiveCluster) Execute(filesPath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -1154,7 +1159,7 @@ type httpStopAtFirstMatch struct{}
func (h *httpStopAtFirstMatch) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test")
_, _ = fmt.Fprintf(w, "This is test")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -1173,7 +1178,7 @@ type httpStopAtFirstMatchWithExtractors struct{}
func (h *httpStopAtFirstMatchWithExtractors) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test")
_, _ = fmt.Fprintf(w, "This is test")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -1192,7 +1197,7 @@ type httpVariables struct{}
func (h *httpVariables) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "%s\n%s\n%s", r.Header.Get("Test"), r.Header.Get("Another"), r.Header.Get("Email"))
_, _ = fmt.Fprintf(w, "%s\n%s\n%s", r.Header.Get("Test"), r.Header.Get("Another"), r.Header.Get("Email"))
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -1294,7 +1299,7 @@ func (h *httpRedirectMatchURL) Execute(filePath string) error {
_, _ = w.Write([]byte("This is test redirects matcher text"))
})
router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test redirects matcher text")
_, _ = fmt.Fprintf(w, "This is test redirects matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -1342,7 +1347,7 @@ func (h *annotationTimeout) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
time.Sleep(4 * time.Second)
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -1362,7 +1367,7 @@ func (h *customAttackType) Execute(filePath string) error {
got := []string{}
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
got = append(got, r.URL.RawQuery)
fmt.Fprintf(w, "This is test custom payload")
_, _ = fmt.Fprintf(w, "This is test custom payload")
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -1410,7 +1415,7 @@ func (h *httpCLBodyWithoutHeader) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header()["Content-Length"] = []string{"-1"}
fmt.Fprintf(w, "this is a test")
_, _ = fmt.Fprintf(w, "this is a test")
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -1430,7 +1435,7 @@ func (h *httpCLBodyWithHeader) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.Header()["Content-Length"] = []string{"50000"}
fmt.Fprintf(w, "this is a test")
_, _ = fmt.Fprintf(w, "this is a test")
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -1449,7 +1454,7 @@ type ConstantWithCliVar struct{}
func (h *ConstantWithCliVar) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprint(w, r.URL.Query().Get("p"))
_, _ = fmt.Fprint(w, r.URL.Query().Get("p"))
})
ts := httptest.NewTLSServer(router)
defer ts.Close()
@ -1486,10 +1491,10 @@ type httpDisablePathAutomerge struct{}
func (h *httpDisablePathAutomerge) Execute(filePath string) error {
router := httprouter.New()
router.GET("/api/v1/test", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprint(w, r.URL.Query().Get("id"))
_, _ = fmt.Fprint(w, r.URL.Query().Get("id"))
})
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprint(w, "empty path in raw request")
_, _ = fmt.Fprint(w, "empty path in raw request")
})
ts := httptest.NewServer(router)
@ -1523,10 +1528,10 @@ func (h *httpPreprocessor) Execute(filePath string) error {
value := r.URL.RequestURI()
if re.MatchString(value) {
w.WriteHeader(http.StatusOK)
fmt.Fprint(w, "ok")
_, _ = fmt.Fprint(w, "ok")
} else {
w.WriteHeader(http.StatusBadRequest)
fmt.Fprint(w, "not ok")
_, _ = fmt.Fprint(w, "not ok")
}
})
ts := httptest.NewServer(router)
@ -1547,11 +1552,11 @@ func (h *httpMultiRequest) Execute(filePath string) error {
router := httprouter.New()
router.GET("/ping", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.WriteHeader(http.StatusOK)
fmt.Fprint(w, "ping")
_, _ = fmt.Fprint(w, "ping")
})
router.GET("/pong", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
w.WriteHeader(http.StatusOK)
fmt.Fprint(w, "pong")
_, _ = fmt.Fprint(w, "pong")
})
ts := httptest.NewServer(router)
defer ts.Close()

View File

@ -4,6 +4,7 @@ import (
"flag"
"fmt"
"os"
"regexp"
"runtime"
"strings"
@ -56,6 +57,7 @@ var (
"flow": flowTestcases,
"javascript": jsTestcases,
"matcher-status": matcherStatusTestcases,
"exporters": exportersTestCases,
}
// flakyTests are run with a retry count of 3
flakyTests = map[string]bool{
@ -89,7 +91,9 @@ func main() {
// start fuzz playground server
defer fuzzplayground.Cleanup()
server := fuzzplayground.GetPlaygroundServer()
defer server.Close()
defer func() {
_ = server.Close()
}()
go func() {
if err := server.Start("localhost:8082"); err != nil {
if !strings.Contains(err.Error(), "Server closed") {
@ -208,7 +212,7 @@ func execute(testCase testutils.TestCase, templatePath string) (string, error) {
}
func expectResultsCount(results []string, expectedNumbers ...int) error {
results = filterHeadlessLogs(results)
results = filterLines(results)
match := sliceutil.Contains(expectedNumbers, len(results))
if !match {
return fmt.Errorf("incorrect number of results: %d (actual) vs %v (expected) \nResults:\n\t%s\n", len(results), expectedNumbers, strings.Join(results, "\n\t")) // nolint:all
@ -222,6 +226,13 @@ func normalizeSplit(str string) []string {
})
}
// filterLines applies all filtering functions to the results
func filterLines(results []string) []string {
results = filterHeadlessLogs(results)
results = filterUnsignedTemplatesWarnings(results)
return results
}
// if chromium is not installed go-rod installs it in .cache directory
// this function filters out the logs from download and installation
func filterHeadlessLogs(results []string) []string {
@ -235,3 +246,16 @@ func filterHeadlessLogs(results []string) []string {
}
return filtered
}
// filterUnsignedTemplatesWarnings filters out warning messages about unsigned templates
func filterUnsignedTemplatesWarnings(results []string) []string {
filtered := []string{}
unsignedTemplatesRegex := regexp.MustCompile(`Loading \d+ unsigned templates for scan\. Use with caution\.`)
for _, result := range results {
if unsignedTemplatesRegex.MatchString(result) {
continue
}
filtered = append(filtered, result)
}
return filtered
}

View File

@ -15,11 +15,15 @@ var jsTestcases = []TestCaseInfo{
{Path: "protocols/javascript/ssh-server-fingerprint.yaml", TestCase: &javascriptSSHServerFingerprint{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }},
{Path: "protocols/javascript/net-multi-step.yaml", TestCase: &networkMultiStep{}},
{Path: "protocols/javascript/net-https.yaml", TestCase: &javascriptNetHttps{}},
{Path: "protocols/javascript/oracle-auth-test.yaml", TestCase: &javascriptOracleAuthTest{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }},
{Path: "protocols/javascript/vnc-pass-brute.yaml", TestCase: &javascriptVncPassBrute{}},
}
var (
redisResource *dockertest.Resource
sshResource *dockertest.Resource
oracleResource *dockertest.Resource
vncResource *dockertest.Resource
pool *dockertest.Pool
defaultRetry = 3
)
@ -98,6 +102,71 @@ func (j *javascriptSSHServerFingerprint) Execute(filePath string) error {
return multierr.Combine(errs...)
}
type javascriptOracleAuthTest struct{}
func (j *javascriptOracleAuthTest) Execute(filePath string) error {
if oracleResource == nil || pool == nil {
// skip test as oracle is not running
return nil
}
tempPort := oracleResource.GetPort("1521/tcp")
finalURL := "localhost:" + tempPort
defer purge(oracleResource)
errs := []error{}
for i := 0; i < defaultRetry; i++ {
results := []string{}
var err error
_ = pool.Retry(func() error {
//let ssh server start
time.Sleep(3 * time.Second)
results, err = testutils.RunNucleiTemplateAndGetResults(filePath, finalURL, debug)
return nil
})
if err != nil {
return err
}
if err := expectResultsCount(results, 1); err == nil {
return nil
} else {
errs = append(errs, err)
}
}
return multierr.Combine(errs...)
}
type javascriptVncPassBrute struct{}
func (j *javascriptVncPassBrute) Execute(filePath string) error {
if vncResource == nil || pool == nil {
// skip test as vnc is not running
return nil
}
tempPort := vncResource.GetPort("5900/tcp")
finalURL := "localhost:" + tempPort
defer purge(vncResource)
errs := []error{}
for i := 0; i < defaultRetry; i++ {
results := []string{}
var err error
_ = pool.Retry(func() error {
//let ssh server start
time.Sleep(3 * time.Second)
results, err = testutils.RunNucleiTemplateAndGetResults(filePath, finalURL, debug)
return nil
})
if err != nil {
return err
}
if err := expectResultsCount(results, 1); err == nil {
return nil
} else {
errs = append(errs, err)
}
}
return multierr.Combine(errs...)
}
// purge any given resource if it is not nil
func purge(resource *dockertest.Resource) {
if resource != nil && pool != nil {
@ -163,4 +232,41 @@ func init() {
if err := sshResource.Expire(30); err != nil {
log.Printf("Could not expire resource: %s", err)
}
// setup a temporary oracle instance
oracleResource, err = pool.RunWithOptions(&dockertest.RunOptions{
Repository: "gvenzl/oracle-xe",
Tag: "latest",
Env: []string{
"ORACLE_PASSWORD=mysecret",
},
Platform: "linux/amd64",
})
if err != nil {
log.Printf("Could not start Oracle resource: %s", err)
return
}
// by default expire after 30 sec
if err := oracleResource.Expire(30); err != nil {
log.Printf("Could not expire Oracle resource: %s", err)
}
// setup a temporary vnc server
vncResource, err = pool.RunWithOptions(&dockertest.RunOptions{
Repository: "dorowu/ubuntu-desktop-lxde-vnc",
Tag: "latest",
Env: []string{
"VNC_PASSWORD=mysecret",
},
Platform: "linux/amd64",
})
if err != nil {
log.Printf("Could not start resource: %s", err)
return
}
// by default expire after 30 sec
if err := vncResource.Expire(30); err != nil {
log.Printf("Could not expire resource: %s", err)
}
}

View File

@ -48,9 +48,9 @@ func (h *goIntegrationTest) Execute(templatePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text")
_, _ = fmt.Fprintf(w, "This is test headers matcher text")
}
})
ts := httptest.NewServer(router)
@ -68,17 +68,21 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
cache := hosterrorscache.New(30, hosterrorscache.DefaultMaxHostsCount, nil)
defer cache.Close()
defaultOpts := types.DefaultOptions()
defaultOpts.ExecutionId = "test"
mockProgress := &testutils.MockProgressClient{}
reportingClient, err := reporting.New(&reporting.Options{}, "", false)
reportingClient, err := reporting.New(&reporting.Options{ExecutionId: defaultOpts.ExecutionId}, "", false)
if err != nil {
return nil, err
}
defer reportingClient.Close()
defaultOpts := types.DefaultOptions()
_ = protocolstate.Init(defaultOpts)
_ = protocolinit.Init(defaultOpts)
defer protocolstate.Close(defaultOpts.ExecutionId)
defaultOpts.Templates = goflags.StringSlice{templatePath}
defaultOpts.ExcludeTags = config.ReadIgnoreFile().Tags
@ -100,7 +104,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
ratelimiter := ratelimit.New(context.Background(), 150, time.Second)
defer ratelimiter.Stop()
executerOpts := protocols.ExecutorOptions{
executerOpts := &protocols.ExecutorOptions{
Output: outputWriter,
Options: defaultOpts,
Progress: mockProgress,
@ -116,7 +120,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
engine := core.New(defaultOpts)
engine.SetExecuterOptions(executerOpts)
workflowLoader, err := parsers.NewLoader(&executerOpts)
workflowLoader, err := parsers.NewLoader(executerOpts)
if err != nil {
log.Fatalf("Could not create workflow loader: %s\n", err)
}
@ -128,7 +132,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
}
store.Load()
_ = engine.Execute(context.Background(), store.Templates(), provider.NewSimpleInputProviderWithUrls(templateURL))
_ = engine.Execute(context.Background(), store.Templates(), provider.NewSimpleInputProviderWithUrls(defaultOpts.ExecutionId, templateURL))
engine.WorkPool().Wait() // Wait for the scan to finish
return results, nil

View File

@ -10,7 +10,7 @@ import (
"github.com/julienschmidt/httprouter"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
permissionutil "github.com/projectdiscovery/utils/permission"
)
@ -31,9 +31,9 @@ func (h *remoteTemplateList) Execute(templateList string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text")
_, _ = fmt.Fprintf(w, "This is test headers matcher text")
}
})
@ -55,7 +55,9 @@ func (h *remoteTemplateList) Execute(templateList string) error {
if err != nil {
return err
}
defer os.Remove("test-config.yaml")
defer func() {
_ = os.Remove("test-config.yaml")
}()
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-template-url", ts.URL+"/template_list", "-config", "test-config.yaml")
if err != nil {
@ -72,9 +74,9 @@ func (h *excludedTemplate) Execute(templateList string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text")
_, _ = fmt.Fprintf(w, "This is test headers matcher text")
}
})
ts := httptest.NewServer(router)
@ -95,9 +97,9 @@ func (h *remoteTemplateListNotAllowed) Execute(templateList string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text")
_, _ = fmt.Fprintf(w, "This is test headers matcher text")
}
})
@ -130,9 +132,9 @@ func (h *remoteWorkflowList) Execute(workflowList string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
if strings.EqualFold(r.Header.Get("test"), "nuclei") {
fmt.Fprintf(w, "This is test headers matcher text")
_, _ = fmt.Fprintf(w, "This is test headers matcher text")
}
})
@ -154,7 +156,9 @@ func (h *remoteWorkflowList) Execute(workflowList string) error {
if err != nil {
return err
}
defer os.Remove("test-config.yaml")
defer func() {
_ = os.Remove("test-config.yaml")
}()
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-workflow-url", ts.URL+"/workflow_list", "-config", "test-config.yaml")
if err != nil {
@ -177,7 +181,9 @@ func (h *nonExistentTemplateList) Execute(nonExistingTemplateList string) error
if err != nil {
return err
}
defer os.Remove("test-config.yaml")
defer func() {
_ = os.Remove("test-config.yaml")
}()
_, err = testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-template-url", ts.URL+"/404", "-config", "test-config.yaml")
if err == nil {
@ -200,7 +206,9 @@ func (h *nonExistentWorkflowList) Execute(nonExistingWorkflowList string) error
if err != nil {
return err
}
defer os.Remove("test-config.yaml")
defer func() {
_ = os.Remove("test-config.yaml")
}()
_, err = testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", ts.URL, "-workflow-url", ts.URL+"/404", "-config", "test-config.yaml")
if err == nil {
@ -215,7 +223,7 @@ type loadTemplateWithID struct{}
func (h *loadTemplateWithID) Execute(nooop string) error {
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", "scanme.sh", "-id", "self-signed-ssl")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id")
return errkit.Wrap(err, "failed to load template with id")
}
return expectResultsCount(results, 1)
}

View File

@ -33,7 +33,9 @@ func (h *networkBasic) Execute(filePath string) error {
var routerErr error
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil {
@ -50,11 +52,11 @@ func (h *networkBasic) Execute(filePath string) error {
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug)
if err != nil {
fmt.Fprintf(os.Stderr, "Could not run nuclei: %s\n", err)
_, _ = fmt.Fprintf(os.Stderr, "Could not run nuclei: %s\n", err)
return err
}
if routerErr != nil {
fmt.Fprintf(os.Stderr, "routerErr: %s\n", routerErr)
_, _ = fmt.Fprintf(os.Stderr, "routerErr: %s\n", routerErr)
return routerErr
}
@ -68,7 +70,9 @@ func (h *networkMultiStep) Execute(filePath string) error {
var routerErr error
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 5, time.Duration(5)*time.Second)
if err != nil {
@ -114,7 +118,9 @@ type networkRequestSelContained struct{}
// Execute executes a test case and returns an error if occurred
func (h *networkRequestSelContained) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
_, _ = conn.Write([]byte("Authentication successful"))
})
@ -134,7 +140,9 @@ func (h *networkVariables) Execute(filePath string) error {
var routerErr error
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil {
@ -162,7 +170,9 @@ type networkPort struct{}
func (n *networkPort) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, 23846, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil {
@ -195,7 +205,9 @@ func (n *networkPort) Execute(filePath string) error {
// this is positive test case where we expect port to be overridden and 34567 to be used
ts2 := testutils.NewTCPServer(nil, 34567, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil {

View File

@ -4,7 +4,7 @@ import (
"fmt"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
var profileLoaderTestcases = []TestCaseInfo{
@ -16,9 +16,9 @@ var profileLoaderTestcases = []TestCaseInfo{
type profileLoaderByRelFile struct{}
func (h *profileLoaderByRelFile) Execute(testName string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", "cloud.yml")
results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", "cloud.yml")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id")
return errkit.Wrap(err, "failed to load template with id")
}
if len(results) <= 10 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 10, len(results))
@ -29,9 +29,9 @@ func (h *profileLoaderByRelFile) Execute(testName string) error {
type profileLoaderById struct{}
func (h *profileLoaderById) Execute(testName string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", "cloud")
results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", "cloud")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id")
return errkit.Wrap(err, "failed to load template with id")
}
if len(results) <= 10 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 10, len(results))
@ -43,9 +43,9 @@ func (h *profileLoaderById) Execute(testName string) error {
type customProfileLoader struct{}
func (h *customProfileLoader) Execute(filepath string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", filepath)
results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", filepath)
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id")
return errkit.Wrap(err, "failed to load template with id")
}
if len(results) < 1 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 1, len(results))

View File

@ -21,7 +21,9 @@ type sslBasic struct{}
// Execute executes a test case and returns an error if occurred
func (h *sslBasic) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data := make([]byte, 4)
if _, err := conn.Read(data); err != nil {
return
@ -42,7 +44,9 @@ type sslBasicZtls struct{}
// Execute executes a test case and returns an error if occurred
func (h *sslBasicZtls) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data := make([]byte, 4)
if _, err := conn.Read(data); err != nil {
return
@ -63,7 +67,9 @@ type sslCustomCipher struct{}
// Execute executes a test case and returns an error if occurred
func (h *sslCustomCipher) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{CipherSuites: []uint16{tls.TLS_AES_128_GCM_SHA256}}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data := make([]byte, 4)
if _, err := conn.Read(data); err != nil {
return
@ -84,7 +90,9 @@ type sslCustomVersion struct{}
// Execute executes a test case and returns an error if occurred
func (h *sslCustomVersion) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{MinVersion: tls.VersionTLS12, MaxVersion: tls.VersionTLS12}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data := make([]byte, 4)
if _, err := conn.Read(data); err != nil {
return
@ -104,7 +112,9 @@ type sslWithVars struct{}
func (h *sslWithVars) Execute(filePath string) error {
ts := testutils.NewTCPServer(&tls.Config{}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data := make([]byte, 4)
if _, err := conn.Read(data); err != nil {
return
@ -128,7 +138,9 @@ func (h *sslMultiReq) Execute(filePath string) error {
MinVersion: tls.VersionSSL30,
MaxVersion: tls.VersionTLS11,
}, defaultStaticPort, func(conn net.Conn) {
defer conn.Close()
defer func() {
_ = conn.Close()
}()
data := make([]byte, 4)
if _, err := conn.Read(data); err != nil {
return

View File

@ -4,7 +4,7 @@ import (
"os"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
var templatesDirTestCases = []TestCaseInfo{
@ -17,9 +17,11 @@ type templateDirWithTargetTest struct{}
func (h *templateDirWithTargetTest) Execute(filePath string) error {
tempdir, err := os.MkdirTemp("", "nuclei-update-dir-*")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to create temp dir")
return errkit.Wrap(err, "failed to create temp dir")
}
defer os.RemoveAll(tempdir)
defer func() {
_ = os.RemoveAll(tempdir)
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "8x8exch02.8x8.com", debug, "-ud", tempdir)
if err != nil {

View File

@ -62,7 +62,7 @@ type workflowBasic struct{}
func (h *workflowBasic) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -81,7 +81,7 @@ type workflowConditionMatched struct{}
func (h *workflowConditionMatched) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -100,7 +100,7 @@ type workflowConditionUnmatch struct{}
func (h *workflowConditionUnmatch) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -119,7 +119,7 @@ type workflowMatcherName struct{}
func (h *workflowMatcherName) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -138,7 +138,7 @@ type workflowComplexConditions struct{}
func (h *workflowComplexConditions) Execute(filePath string) error {
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -162,11 +162,11 @@ type workflowHttpKeyValueShare struct{}
func (h *workflowHttpKeyValueShare) Execute(filePath string) error {
router := httprouter.New()
router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "href=\"test-value\"")
_, _ = fmt.Fprintf(w, "href=\"test-value\"")
})
router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
body, _ := io.ReadAll(r.Body)
fmt.Fprintf(w, "%s", body)
_, _ = fmt.Fprintf(w, "%s", body)
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -214,11 +214,11 @@ func (h *workflowMultiProtocolKeyValueShare) Execute(filePath string) error {
router := httprouter.New()
// the response of path1 contains a domain that will be extracted and shared with the second template
router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "href=\"blog.projectdiscovery.io\"")
_, _ = fmt.Fprintf(w, "href=\"blog.projectdiscovery.io\"")
})
// path2 responds with the value of the "extracted" query parameter, e.g.: /path2?extracted=blog.projectdiscovery.io => blog.projectdiscovery.io
router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "%s", r.URL.Query().Get("extracted"))
_, _ = fmt.Fprintf(w, "%s", r.URL.Query().Get("extracted"))
})
ts := httptest.NewServer(router)
defer ts.Close()
@ -238,15 +238,15 @@ func (h *workflowMultiMatchKeyValueShare) Execute(filePath string) error {
var receivedData []string
router := httprouter.New()
router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "This is test matcher text")
_, _ = fmt.Fprintf(w, "This is test matcher text")
})
router.GET("/path1", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Fprintf(w, "href=\"test-value-%s\"", r.URL.Query().Get("v"))
_, _ = fmt.Fprintf(w, "href=\"test-value-%s\"", r.URL.Query().Get("v"))
})
router.GET("/path2", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
body, _ := io.ReadAll(r.Body)
receivedData = append(receivedData, string(body))
fmt.Fprintf(w, "test-value")
_, _ = fmt.Fprintf(w, "test-value")
})
ts := httptest.NewServer(router)
defer ts.Close()

View File

@ -13,14 +13,16 @@ import (
"strings"
"time"
"github.com/projectdiscovery/gologger"
_pdcp "github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/utils/auth/pdcp"
"github.com/projectdiscovery/utils/env"
_ "github.com/projectdiscovery/utils/pprof"
stringsutil "github.com/projectdiscovery/utils/strings"
"github.com/rs/xid"
"gopkg.in/yaml.v2"
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/gologger/levels"
"github.com/projectdiscovery/interactsh/pkg/client"
"github.com/projectdiscovery/nuclei/v3/internal/runner"
@ -38,7 +40,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/types/scanstrategy"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/monitor"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file"
unitutils "github.com/projectdiscovery/utils/unit"
updateutils "github.com/projectdiscovery/utils/update"
@ -52,16 +54,18 @@ var (
)
func main() {
options.Logger = gologger.DefaultLogger
// enables CLI specific configs mostly interactive behavior
config.CurrentAppMode = config.AppModeCLI
if err := runner.ConfigureOptions(); err != nil {
gologger.Fatal().Msgf("Could not initialize options: %s\n", err)
options.Logger.Fatal().Msgf("Could not initialize options: %s\n", err)
}
_ = readConfig()
if options.ListDslSignatures {
gologger.Info().Msgf("The available custom DSL functions are:")
options.Logger.Info().Msgf("The available custom DSL functions are:")
fmt.Println(dsl.GetPrintableDslFunctionSignatures(options.NoColor))
return
}
@ -72,7 +76,7 @@ func main() {
templates.UseOptionsForSigner(options)
tsigner, err := signer.NewTemplateSigner(nil, nil) // will read from env , config or generate new keys
if err != nil {
gologger.Fatal().Msgf("couldn't initialize signer crypto engine: %s\n", err)
options.Logger.Fatal().Msgf("couldn't initialize signer crypto engine: %s\n", err)
}
successCounter := 0
@ -88,7 +92,7 @@ func main() {
if err != templates.ErrNotATemplate {
// skip warnings and errors as given items are not templates
errorCounter++
gologger.Error().Msgf("could not sign '%s': %s\n", iterItem, err)
options.Logger.Error().Msgf("could not sign '%s': %s\n", iterItem, err)
}
} else {
successCounter++
@ -97,10 +101,10 @@ func main() {
return nil
})
if err != nil {
gologger.Error().Msgf("%s\n", err)
options.Logger.Error().Msgf("%s\n", err)
}
}
gologger.Info().Msgf("All templates signatures were elaborated success=%d failed=%d\n", successCounter, errorCounter)
options.Logger.Info().Msgf("All templates signatures were elaborated success=%d failed=%d\n", successCounter, errorCounter)
return
}
@ -111,7 +115,7 @@ func main() {
createProfileFile := func(ext, profileType string) *os.File {
f, err := os.Create(memProfile + ext)
if err != nil {
gologger.Fatal().Msgf("profile: could not create %s profile %q file: %v", profileType, f.Name(), err)
options.Logger.Fatal().Msgf("profile: could not create %s profile %q file: %v", profileType, f.Name(), err)
}
return f
}
@ -125,45 +129,47 @@ func main() {
// Start tracing
if err := trace.Start(traceFile); err != nil {
gologger.Fatal().Msgf("profile: could not start trace: %v", err)
options.Logger.Fatal().Msgf("profile: could not start trace: %v", err)
}
// Start CPU profiling
if err := pprof.StartCPUProfile(cpuProfileFile); err != nil {
gologger.Fatal().Msgf("profile: could not start CPU profile: %v", err)
options.Logger.Fatal().Msgf("profile: could not start CPU profile: %v", err)
}
defer func() {
// Start heap memory snapshot
if err := pprof.WriteHeapProfile(memProfileFile); err != nil {
gologger.Fatal().Msgf("profile: could not write memory profile: %v", err)
options.Logger.Fatal().Msgf("profile: could not write memory profile: %v", err)
}
pprof.StopCPUProfile()
memProfileFile.Close()
traceFile.Close()
_ = memProfileFile.Close()
_ = traceFile.Close()
trace.Stop()
runtime.MemProfileRate = oldMemProfileRate
gologger.Info().Msgf("CPU profile saved at %q", cpuProfileFile.Name())
gologger.Info().Msgf("Memory usage snapshot saved at %q", memProfileFile.Name())
gologger.Info().Msgf("Traced at %q", traceFile.Name())
options.Logger.Info().Msgf("CPU profile saved at %q", cpuProfileFile.Name())
options.Logger.Info().Msgf("Memory usage snapshot saved at %q", memProfileFile.Name())
options.Logger.Info().Msgf("Traced at %q", traceFile.Name())
}()
}
options.ExecutionId = xid.New().String()
runner.ParseOptions(options)
if options.ScanUploadFile != "" {
if err := runner.UploadResultsToCloud(options); err != nil {
gologger.Fatal().Msgf("could not upload scan results to cloud dashboard: %s\n", err)
options.Logger.Fatal().Msgf("could not upload scan results to cloud dashboard: %s\n", err)
}
return
}
nucleiRunner, err := runner.New(options)
if err != nil {
gologger.Fatal().Msgf("Could not create runner: %s\n", err)
options.Logger.Fatal().Msgf("Could not create runner: %s\n", err)
}
if nucleiRunner == nil {
return
@ -176,13 +182,13 @@ func main() {
stackMonitor.RegisterCallback(func(dumpID string) error {
resumeFileName := fmt.Sprintf("crash-resume-file-%s.dump", dumpID)
if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...")
options.Logger.Info().Msgf("Uploading scan results to cloud...")
}
nucleiRunner.Close()
gologger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
options.Logger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
err := nucleiRunner.SaveResumeConfig(resumeFileName)
if err != nil {
return errorutil.NewWithErr(err).Msgf("couldn't create crash resume file")
return errkit.Wrap(err, "couldn't create crash resume file")
}
return nil
})
@ -191,43 +197,41 @@ func main() {
// Setup graceful exits
resumeFileName := types.DefaultResumeFilePath()
c := make(chan os.Signal, 1)
defer close(c)
signal.Notify(c, os.Interrupt)
go func() {
for range c {
gologger.Info().Msgf("CTRL+C pressed: Exiting\n")
<-c
options.Logger.Info().Msgf("CTRL+C pressed: Exiting\n")
if options.DASTServer {
nucleiRunner.Close()
os.Exit(1)
}
gologger.Info().Msgf("Attempting graceful shutdown...")
options.Logger.Info().Msgf("Attempting graceful shutdown...")
if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...")
options.Logger.Info().Msgf("Uploading scan results to cloud...")
}
nucleiRunner.Close()
if options.ShouldSaveResume() {
gologger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
options.Logger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
err := nucleiRunner.SaveResumeConfig(resumeFileName)
if err != nil {
gologger.Error().Msgf("Couldn't create resume file: %s\n", err)
options.Logger.Error().Msgf("Couldn't create resume file: %s\n", err)
}
}
os.Exit(1)
}
}()
if err := nucleiRunner.RunEnumeration(); err != nil {
if options.Validate {
gologger.Fatal().Msgf("Could not validate templates: %s\n", err)
options.Logger.Fatal().Msgf("Could not validate templates: %s\n", err)
} else {
gologger.Fatal().Msgf("Could not run nuclei: %s\n", err)
options.Logger.Fatal().Msgf("Could not run nuclei: %s\n", err)
}
}
nucleiRunner.Close()
// on successful execution remove the resume file in case it exists
if fileutil.FileExists(resumeFileName) {
os.Remove(resumeFileName)
_ = os.Remove(resumeFileName)
}
}
@ -260,6 +264,8 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.StringVarP(&options.InputFileMode, "input-mode", "im", "list", fmt.Sprintf("mode of input file (%v)", provider.SupportedInputFormats())),
flagSet.BoolVarP(&options.FormatUseRequiredOnly, "required-only", "ro", false, "use only required fields in input format when generating requests"),
flagSet.BoolVarP(&options.SkipFormatValidation, "skip-format-validation", "sfv", false, "skip format validation (like missing vars) when parsing input file"),
flagSet.BoolVarP(&options.VarsTextTemplating, "vars-text-templating", "vtt", false, "enable text templating for vars in input file (only for yaml input mode)"),
flagSet.StringSliceVarP(&options.VarsFilePaths, "var-file-paths", "vfp", nil, "list of yaml file contained vars to inject into yaml input", goflags.CommaSeparatedStringSliceOptions),
)
flagSet.CreateGroup("templates", "Templates",
@ -542,11 +548,11 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
h := &pdcp.PDCPCredHandler{}
_, err := h.GetCreds()
if err != nil {
gologger.Fatal().Msg("To utilize the `-ai` flag, please configure your API key with the `-auth` flag or set the `PDCP_API_KEY` environment variable")
options.Logger.Fatal().Msg("To utilize the `-ai` flag, please configure your API key with the `-auth` flag or set the `PDCP_API_KEY` environment variable")
}
}
gologger.DefaultLogger.SetTimestamp(options.Timestamp, levels.LevelDebug)
options.Logger.SetTimestamp(options.Timestamp, levels.LevelDebug)
if options.VerboseVerbose {
// hide release notes if silent mode is enabled
@ -568,13 +574,49 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
config.DefaultConfig.SetConfigDir(customConfigDir)
readFlagsConfig(flagSet)
}
if cfgFile != "" {
if !fileutil.FileExists(cfgFile) {
gologger.Fatal().Msgf("given config file '%s' does not exist", cfgFile)
options.Logger.Fatal().Msgf("given config file '%s' does not exist", cfgFile)
}
// merge config file with flags
if err := flagSet.MergeConfigFile(cfgFile); err != nil {
gologger.Fatal().Msgf("Could not read config: %s\n", err)
options.Logger.Fatal().Msgf("Could not read config: %s\n", err)
}
if !options.Vars.IsEmpty() {
// Maybe we should add vars to the config file as well even if they are set via flags?
file, err := os.Open(cfgFile)
if err != nil {
gologger.Fatal().Msgf("Could not open config file: %s\n", err)
}
defer func() {
_ = file.Close()
}()
data := make(map[string]interface{})
err = yaml.NewDecoder(file).Decode(&data)
if err != nil {
gologger.Fatal().Msgf("Could not decode config file: %s\n", err)
}
variables := data["var"]
if variables != nil {
if varSlice, ok := variables.([]interface{}); ok {
for _, value := range varSlice {
if strVal, ok := value.(string); ok {
err = options.Vars.Set(strVal)
if err != nil {
gologger.Warning().Msgf("Could not set variable from config file: %s\n", err)
}
} else {
gologger.Warning().Msgf("Skipping non-string variable in config: %#v", value)
}
}
} else {
gologger.Warning().Msgf("No 'var' section found in config file: %s", cfgFile)
}
}
}
}
if options.NewTemplatesDirectory != "" {
@ -587,7 +629,7 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
if tp := findProfilePathById(templateProfile, defaultProfilesPath); tp != "" {
templateProfile = tp
} else {
gologger.Fatal().Msgf("'%s' is not a profile-id or profile path", templateProfile)
options.Logger.Fatal().Msgf("'%s' is not a profile-id or profile path", templateProfile)
}
}
if !filepath.IsAbs(templateProfile) {
@ -602,17 +644,17 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
}
}
if !fileutil.FileExists(templateProfile) {
gologger.Fatal().Msgf("given template profile file '%s' does not exist", templateProfile)
options.Logger.Fatal().Msgf("given template profile file '%s' does not exist", templateProfile)
}
if err := flagSet.MergeConfigFile(templateProfile); err != nil {
gologger.Fatal().Msgf("Could not read template profile: %s\n", err)
options.Logger.Fatal().Msgf("Could not read template profile: %s\n", err)
}
}
if len(options.SecretsFile) > 0 {
for _, secretFile := range options.SecretsFile {
if !fileutil.FileExists(secretFile) {
gologger.Fatal().Msgf("given secrets file '%s' does not exist", options.SecretsFile)
options.Logger.Fatal().Msgf("given secrets file '%s' does not exist", secretFile)
}
}
}
@ -638,25 +680,25 @@ func readFlagsConfig(flagset *goflags.FlagSet) {
if err != nil {
// something went wrong either dir is not readable or something else went wrong upstream in `goflags`
// warn and exit in this case
gologger.Warning().Msgf("Could not read config file: %s\n", err)
options.Logger.Warning().Msgf("Could not read config file: %s\n", err)
return
}
cfgFile := config.DefaultConfig.GetFlagsConfigFilePath()
if !fileutil.FileExists(cfgFile) {
if !fileutil.FileExists(defaultCfgFile) {
// if default config does not exist, warn and exit
gologger.Warning().Msgf("missing default config file : %s", defaultCfgFile)
options.Logger.Warning().Msgf("missing default config file : %s", defaultCfgFile)
return
}
// if does not exist copy it from the default config
if err = fileutil.CopyFile(defaultCfgFile, cfgFile); err != nil {
gologger.Warning().Msgf("Could not copy config file: %s\n", err)
options.Logger.Warning().Msgf("Could not copy config file: %s\n", err)
}
return
}
// if config file exists, merge it with the default config
if err = flagset.MergeConfigFile(cfgFile); err != nil {
gologger.Warning().Msgf("failed to merge configfile with flags got: %s\n", err)
options.Logger.Warning().Msgf("failed to merge configfile with flags got: %s\n", err)
}
}
@ -667,29 +709,29 @@ func disableUpdatesCallback() {
// printVersion prints the nuclei version and exits.
func printVersion() {
gologger.Info().Msgf("Nuclei Engine Version: %s", config.Version)
gologger.Info().Msgf("Nuclei Config Directory: %s", config.DefaultConfig.GetConfigDir())
gologger.Info().Msgf("Nuclei Cache Directory: %s", config.DefaultConfig.GetCacheDir()) // cache dir contains resume files
gologger.Info().Msgf("PDCP Directory: %s", pdcp.PDCPDir)
options.Logger.Info().Msgf("Nuclei Engine Version: %s", config.Version)
options.Logger.Info().Msgf("Nuclei Config Directory: %s", config.DefaultConfig.GetConfigDir())
options.Logger.Info().Msgf("Nuclei Cache Directory: %s", config.DefaultConfig.GetCacheDir()) // cache dir contains resume files
options.Logger.Info().Msgf("PDCP Directory: %s", pdcp.PDCPDir)
os.Exit(0)
}
// printTemplateVersion prints the nuclei template version and exits.
func printTemplateVersion() {
cfg := config.DefaultConfig
gologger.Info().Msgf("Public nuclei-templates version: %s (%s)\n", cfg.TemplateVersion, cfg.TemplatesDirectory)
options.Logger.Info().Msgf("Public nuclei-templates version: %s (%s)\n", cfg.TemplateVersion, cfg.TemplatesDirectory)
if fileutil.FolderExists(cfg.CustomS3TemplatesDirectory) {
gologger.Info().Msgf("Custom S3 templates location: %s\n", cfg.CustomS3TemplatesDirectory)
options.Logger.Info().Msgf("Custom S3 templates location: %s\n", cfg.CustomS3TemplatesDirectory)
}
if fileutil.FolderExists(cfg.CustomGitHubTemplatesDirectory) {
gologger.Info().Msgf("Custom GitHub templates location: %s ", cfg.CustomGitHubTemplatesDirectory)
options.Logger.Info().Msgf("Custom GitHub templates location: %s ", cfg.CustomGitHubTemplatesDirectory)
}
if fileutil.FolderExists(cfg.CustomGitLabTemplatesDirectory) {
gologger.Info().Msgf("Custom GitLab templates location: %s ", cfg.CustomGitLabTemplatesDirectory)
options.Logger.Info().Msgf("Custom GitLab templates location: %s ", cfg.CustomGitLabTemplatesDirectory)
}
if fileutil.FolderExists(cfg.CustomAzureTemplatesDirectory) {
gologger.Info().Msgf("Custom Azure templates location: %s ", cfg.CustomAzureTemplatesDirectory)
options.Logger.Info().Msgf("Custom Azure templates location: %s ", cfg.CustomAzureTemplatesDirectory)
}
os.Exit(0)
}
@ -705,13 +747,13 @@ Following files will be deleted:
Note: Make sure you have backup of your custom nuclei-templates before proceeding
`, config.DefaultConfig.GetConfigDir(), config.DefaultConfig.TemplatesDirectory)
gologger.Print().Msg(warning)
options.Logger.Print().Msg(warning)
reader := bufio.NewReader(os.Stdin)
for {
fmt.Print("Are you sure you want to continue? [y/n]: ")
resp, err := reader.ReadString('\n')
if err != nil {
gologger.Fatal().Msgf("could not read response: %s", err)
options.Logger.Fatal().Msgf("could not read response: %s", err)
}
resp = strings.TrimSpace(resp)
if stringsutil.EqualFoldAny(resp, "y", "yes") {
@ -724,13 +766,13 @@ Note: Make sure you have backup of your custom nuclei-templates before proceedin
}
err := os.RemoveAll(config.DefaultConfig.GetConfigDir())
if err != nil {
gologger.Fatal().Msgf("could not delete config dir: %s", err)
options.Logger.Fatal().Msgf("could not delete config dir: %s", err)
}
err = os.RemoveAll(config.DefaultConfig.TemplatesDirectory)
if err != nil {
gologger.Fatal().Msgf("could not delete templates dir: %s", err)
options.Logger.Fatal().Msgf("could not delete templates dir: %s", err)
}
gologger.Info().Msgf("Successfully deleted all nuclei configurations files and nuclei-templates")
options.Logger.Info().Msgf("Successfully deleted all nuclei configurations files and nuclei-templates")
os.Exit(0)
}
@ -750,14 +792,7 @@ func findProfilePathById(profileId, templatesDir string) string {
return nil
})
if err != nil && err.Error() != "FOUND" {
gologger.Error().Msgf("%s\n", err)
options.Logger.Error().Msgf("%s\n", err)
}
return profilePath
}
func init() {
// print stacktrace of errors in debug mode
if strings.EqualFold(os.Getenv("DEBUG"), "true") {
errorutil.ShowStackTrace = true
}
}

View File

@ -3,28 +3,55 @@ package main_test
import (
"net/http"
"net/http/httptest"
"os"
"testing"
"time"
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/gologger/levels"
"github.com/projectdiscovery/nuclei/v3/internal/runner"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
)
func BenchmarkRunEnumeration(b *testing.B) {
var (
projectPath string
targetURL string
)
func TestMain(m *testing.M) {
// Set up
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
_ = os.Setenv("DISABLE_STDOUT", "true")
var err error
projectPath, err = os.MkdirTemp("", "nuclei-benchmark-")
if err != nil {
panic(err)
}
dummyServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
}))
defer dummyServer.Close()
targetURL = dummyServer.URL
options := &types.Options{
RemoteTemplateDomainList: goflags.StringSlice{
"cloud.projectdiscovery.io",
},
ProjectPath: "/tmp",
Targets: goflags.StringSlice{dummyServer.URL},
// Execute tests
exitCode := m.Run()
// Tear down
dummyServer.Close()
_ = os.RemoveAll(projectPath)
_ = os.Unsetenv("DISABLE_STDOUT")
os.Exit(exitCode)
}
func getDefaultOptions() *types.Options {
return &types.Options{
RemoteTemplateDomainList: []string{"cloud.projectdiscovery.io"},
ProjectPath: projectPath,
StatsInterval: 5,
MetricsPort: 9092,
MaxHostError: 30,
@ -65,23 +92,45 @@ func BenchmarkRunEnumeration(b *testing.B) {
LoadHelperFileFunction: types.DefaultOptions().LoadHelperFileFunction,
// DialerKeepAlive: time.Duration(0),
// DASTServerAddress: "localhost:9055",
ExecutionId: "test",
Logger: gologger.DefaultLogger,
}
}
func runEnumBenchmark(b *testing.B, options *types.Options) {
runner.ParseOptions(options)
// Disable logging to reduce benchmark noise.
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
nucleiRunner, err := runner.New(options)
if err != nil {
b.Fatalf("failed to create runner: %s", err)
}
defer nucleiRunner.Close()
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
if err := nucleiRunner.RunEnumeration(); err != nil {
b.Fatalf("RunEnumeration failed: %s", err)
b.Fatalf("%s failed: %s", b.Name(), err)
}
}
}
func BenchmarkRunEnumeration(b *testing.B) {
// Default case: run enumeration with default options == all nuclei-templates
// b.Run("Default", func(b *testing.B) {
// options := getDefaultOptions()
// options.Targets = []string{targetURL}
// runEnumBenchmark(b, options)
// })
// Case: https://github.com/projectdiscovery/nuclei/pull/6258
b.Run("Multiproto", func(b *testing.B) {
options := getDefaultOptions()
options.Targets = []string{targetURL}
options.Templates = []string{"./cmd/nuclei/testdata/benchmark/multiproto/"}
runEnumBenchmark(b, options)
})
}

View File

@ -0,0 +1,239 @@
id: basic-template-multiproto-mixed
info:
name: Test Template Multiple Protocols (Mixed)
author: pdteam
severity: info
http:
- method: GET
id: first_iter_http
path:
- '{{BaseURL}}/1'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/2'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/3'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/4'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/5'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/6'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/7'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/8'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/9'
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /10 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /11 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /12 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /13 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /14 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET / HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /15 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /16 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /17 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /18 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"

View File

@ -0,0 +1,292 @@
id: basic-template-multiproto-raw
info:
name: Test Template Multiple Protocols RAW
author: pdteam
severity: info
http:
- raw:
- |
GET /1 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /2 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /3 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /4 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /5 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /6 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /7 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /8 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /9 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /10 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /11 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /12 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /13 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /14 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET / HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /15 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /16 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /17 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"
- raw:
- |
GET /18 HTTP/1.1
Host: {{Hostname}}
Origin: {{BaseURL}}
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko)
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Language: en-US,en;q=0.9
matchers:
- type: word
words:
- "Test is test matcher text"

View File

@ -0,0 +1,170 @@
id: basic-template-multiproto
info:
name: Test Template Multiple Protocols
author: pdteam
severity: info
http:
- method: GET
id: first_iter_http
path:
- '{{BaseURL}}/1'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/2'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/3'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/4'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/5'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/6'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/7'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/8'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/9'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/10'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/11'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/12'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/13'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/14'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/15'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/16'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/17'
matchers:
- type: word
words:
- "Test is test matcher text"
- method: GET
path:
- '{{BaseURL}}/18'
matchers:
- type: word
words:
- "Test is test matcher text"

View File

@ -23,7 +23,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
"gopkg.in/yaml.v3"
)
@ -135,7 +135,9 @@ func process(opts options) error {
if err != nil {
return err
}
defer os.RemoveAll(tempDir)
defer func() {
_ = os.RemoveAll(tempDir)
}()
var errFile *os.File
if opts.errorLogFile != "" {
@ -143,7 +145,9 @@ func process(opts options) error {
if err != nil {
gologger.Fatal().Msgf("could not open error log file: %s\n", err)
}
defer errFile.Close()
defer func() {
_ = errFile.Close()
}()
}
templateCatalog := disk.NewCatalog(filepath.Dir(opts.input))
@ -226,7 +230,7 @@ func logErrMsg(path string, err error, debug bool, errFile *os.File) string {
msg = fmt.Sprintf("❌ template: %s err: %s\n", path, err)
}
if errFile != nil {
_, _ = errFile.WriteString(fmt.Sprintf("❌ template: %s err: %s\n", path, err))
_, _ = fmt.Fprintf(errFile, "❌ template: %s err: %s\n", path, err)
}
return msg
}
@ -239,7 +243,7 @@ func enhanceTemplate(data string) (string, bool, error) {
return data, false, err
}
if resp.StatusCode != 200 {
return data, false, errorutil.New("unexpected status code: %v", resp.Status)
return data, false, errkit.New("unexpected status code: %v", resp.Status)
}
var templateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&templateResp); err != nil {
@ -250,20 +254,20 @@ func enhanceTemplate(data string) (string, bool, error) {
}
if templateResp.ValidateErrorCount > 0 {
if len(templateResp.ValidateError) > 0 {
return data, false, errorutil.NewWithTag("validate", templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line)
return data, false, errkit.New(templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line, "tag", "validate")
}
return data, false, errorutil.New("validation failed").WithTag("validate")
return data, false, errkit.New("validation failed", "tag", "validate")
}
if templateResp.Error.Name != "" {
return data, false, errorutil.New("%s", templateResp.Error.Name)
return data, false, errkit.New("%s", templateResp.Error.Name)
}
if strings.TrimSpace(templateResp.Enhanced) == "" && !templateResp.Lint {
if templateResp.LintError.Reason != "" {
return data, false, errorutil.NewWithTag("lint", templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line)
return data, false, errkit.New(templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line, "tag", "lint")
}
return data, false, errorutil.NewWithTag("lint", "at line: %v", templateResp.LintError.Mark.Line)
return data, false, errkit.New("at line: %v", templateResp.LintError.Mark.Line, "tag", "lint")
}
return data, false, errorutil.New("template enhance failed")
return data, false, errkit.New("template enhance failed")
}
// formatTemplate formats template data using templateman format api
@ -273,7 +277,7 @@ func formatTemplate(data string) (string, bool, error) {
return data, false, err
}
if resp.StatusCode != 200 {
return data, false, errorutil.New("unexpected status code: %v", resp.Status)
return data, false, errkit.New("unexpected status code: %v", resp.Status)
}
var templateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&templateResp); err != nil {
@ -284,20 +288,20 @@ func formatTemplate(data string) (string, bool, error) {
}
if templateResp.ValidateErrorCount > 0 {
if len(templateResp.ValidateError) > 0 {
return data, false, errorutil.NewWithTag("validate", templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line)
return data, false, errkit.New(templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line, "tag", "validate")
}
return data, false, errorutil.New("validation failed").WithTag("validate")
return data, false, errkit.New("validation failed", "tag", "validate")
}
if templateResp.Error.Name != "" {
return data, false, errorutil.New("%s", templateResp.Error.Name)
return data, false, errkit.New("%s", templateResp.Error.Name)
}
if strings.TrimSpace(templateResp.Updated) == "" && !templateResp.Lint {
if templateResp.LintError.Reason != "" {
return data, false, errorutil.NewWithTag("lint", templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line)
return data, false, errkit.New(templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line, "tag", "lint")
}
return data, false, errorutil.NewWithTag("lint", "at line: %v", templateResp.LintError.Mark.Line)
return data, false, errkit.New("at line: %v", templateResp.LintError.Mark.Line, "tag", "lint")
}
return data, false, errorutil.New("template format failed")
return data, false, errkit.New("template format failed")
}
// lintTemplate lints template data using templateman lint api
@ -307,7 +311,7 @@ func lintTemplate(data string) (bool, error) {
return false, err
}
if resp.StatusCode != 200 {
return false, errorutil.New("unexpected status code: %v", resp.Status)
return false, errkit.New("unexpected status code: %v", resp.Status)
}
var lintResp TemplateLintResp
if err := json.NewDecoder(resp.Body).Decode(&lintResp); err != nil {
@ -317,9 +321,9 @@ func lintTemplate(data string) (bool, error) {
return true, nil
}
if lintResp.LintError.Reason != "" {
return false, errorutil.NewWithTag("lint", lintResp.LintError.Reason+" : at line %v", lintResp.LintError.Mark.Line)
return false, errkit.New(lintResp.LintError.Reason+" : at line %v", lintResp.LintError.Mark.Line, "tag", "lint")
}
return false, errorutil.NewWithTag("lint", "at line: %v", lintResp.LintError.Mark.Line)
return false, errkit.New("at line: %v", lintResp.LintError.Mark.Line, "tag", "lint")
}
// validateTemplate validates template data using templateman validate api
@ -329,7 +333,7 @@ func validateTemplate(data string) (bool, error) {
return false, err
}
if resp.StatusCode != 200 {
return false, errorutil.New("unexpected status code: %v", resp.Status)
return false, errkit.New("unexpected status code: %v", resp.Status)
}
var validateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&validateResp); err != nil {
@ -340,14 +344,14 @@ func validateTemplate(data string) (bool, error) {
}
if validateResp.ValidateErrorCount > 0 {
if len(validateResp.ValidateError) > 0 {
return false, errorutil.NewWithTag("validate", validateResp.ValidateError[0].Message+": at line %v", validateResp.ValidateError[0].Mark.Line)
return false, errkit.New(validateResp.ValidateError[0].Message+": at line %v", validateResp.ValidateError[0].Mark.Line, "tag", "validate")
}
return false, errorutil.New("validation failed").WithTag("validate")
return false, errkit.New("validation failed", "tag", "validate")
}
if validateResp.Error.Name != "" {
return false, errorutil.New("%s", validateResp.Error.Name)
return false, errkit.New("%s", validateResp.Error.Name)
}
return false, errorutil.New("template validation failed")
return false, errkit.New("template validation failed")
}
// parseAndAddMaxRequests parses and adds max requests to templates
@ -397,7 +401,7 @@ func parseAndAddMaxRequests(catalog catalog.Catalog, path, data string) (string,
// parseTemplate parses a template and returns the template object
func parseTemplate(catalog catalog.Catalog, templatePath string) (*templates.Template, error) {
executorOpts := protocols.ExecutorOptions{
executorOpts := &protocols.ExecutorOptions{
Catalog: catalog,
Options: defaultOpts,
}

View File

@ -18,7 +18,9 @@ func main() {
defer fuzzplayground.Cleanup()
server := fuzzplayground.GetPlaygroundServer()
defer server.Close()
defer func() {
_ = server.Close()
}()
// Start the server
if err := server.Start(addr); err != nil {

View File

@ -99,12 +99,12 @@ func main() {
gologger.Info().Msgf("✓ Template signed & verified successfully")
}
func defaultExecutorOpts(templatePath string) protocols.ExecutorOptions {
func defaultExecutorOpts(templatePath string) *protocols.ExecutorOptions {
// use parsed options when initializing signer instead of default options
options := types.DefaultOptions()
templates.UseOptionsForSigner(options)
catalog := disk.NewCatalog(filepath.Dir(templatePath))
executerOpts := protocols.ExecutorOptions{
executerOpts := &protocols.ExecutorOptions{
Catalog: catalog,
Options: options,
TemplatePath: templatePath,

View File

@ -1,6 +1,7 @@
package main
import (
"context"
"log"
"sync"
"time"
@ -34,7 +35,7 @@ func main() {
}
func initializeNucleiEngine() (*nuclei.NucleiEngine, error) {
return nuclei.NewNucleiEngine(
return nuclei.NewNucleiEngineCtx(context.TODO(),
nuclei.WithTemplateFilters(nuclei.TemplateFilters{Tags: []string{"oast"}}),
nuclei.EnableStatsWithOpts(nuclei.StatsOptions{MetricServerPort: 6064}),
nuclei.WithGlobalRateLimit(1, time.Second),

427
go.mod
View File

@ -1,141 +1,173 @@
module github.com/projectdiscovery/nuclei/v3
go 1.23.0
go 1.24.2
toolchain go1.24.1
toolchain go1.24.4
require (
github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible
github.com/andygrunwald/go-jira v1.16.0
github.com/antchfx/htmlquery v1.3.0
github.com/andygrunwald/go-jira v1.16.1
github.com/antchfx/htmlquery v1.3.4
github.com/bluele/gcache v0.0.2
github.com/go-playground/validator/v10 v10.14.1
github.com/go-playground/validator/v10 v10.26.0
github.com/go-rod/rod v0.116.2
github.com/gobwas/ws v1.2.1
github.com/gobwas/ws v1.4.0
github.com/google/go-github v17.0.0+incompatible
github.com/invopop/jsonschema v0.12.0
github.com/itchyny/gojq v0.12.13
github.com/invopop/jsonschema v0.13.0
github.com/itchyny/gojq v0.12.17
github.com/json-iterator/go v1.1.12
github.com/julienschmidt/httprouter v1.3.0
github.com/logrusorgru/aurora v2.0.3+incompatible
github.com/miekg/dns v1.1.62
github.com/olekukonko/tablewriter v0.0.5
github.com/miekg/dns v1.1.66
github.com/olekukonko/tablewriter v1.0.8
github.com/pkg/errors v0.9.1
github.com/projectdiscovery/clistats v0.1.1
github.com/projectdiscovery/fastdialer v0.4.0
github.com/projectdiscovery/hmap v0.0.88
github.com/projectdiscovery/fastdialer v0.4.11
github.com/projectdiscovery/hmap v0.0.94
github.com/projectdiscovery/interactsh v1.2.4
github.com/projectdiscovery/rawhttp v0.1.90
github.com/projectdiscovery/retryabledns v1.0.99
github.com/projectdiscovery/retryablehttp-go v1.0.110
github.com/projectdiscovery/retryabledns v1.0.107
github.com/projectdiscovery/retryablehttp-go v1.0.125
github.com/projectdiscovery/yamldoc-go v1.0.6
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rs/xid v1.6.0
github.com/segmentio/ksuid v1.0.4
github.com/shirou/gopsutil/v3 v3.24.2 // indirect
github.com/shirou/gopsutil/v3 v3.24.5 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/spf13/cast v1.5.1
github.com/spf13/cast v1.9.2
github.com/syndtr/goleveldb v1.0.0
github.com/valyala/fasttemplate v1.2.2
github.com/weppos/publicsuffix-go v0.40.2
github.com/xanzy/go-gitlab v0.107.0
github.com/weppos/publicsuffix-go v0.50.0
go.uber.org/multierr v1.11.0
golang.org/x/net v0.39.0
golang.org/x/oauth2 v0.22.0
golang.org/x/text v0.24.0
golang.org/x/net v0.44.0
golang.org/x/oauth2 v0.30.0
golang.org/x/text v0.29.0
gopkg.in/yaml.v2 v2.4.0
)
require (
carvel.dev/ytt v0.52.0
code.gitea.io/sdk/gitea v0.17.0
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0
github.com/DataDog/gostackparse v0.6.0
github.com/DataDog/gostackparse v0.7.0
github.com/Masterminds/semver/v3 v3.2.1
github.com/Mzack9999/gcache v0.0.0-20230410081825-519e28eab057
github.com/Mzack9999/goja v0.0.0-20250507184235-e46100e9c697
github.com/Mzack9999/goja_nodejs v0.0.0-20250507184139-66bcbf65c883
github.com/alexsnet/go-vnc v0.1.0
github.com/alitto/pond v1.9.2
github.com/antchfx/xmlquery v1.3.17
github.com/antchfx/xmlquery v1.4.4
github.com/antchfx/xpath v1.3.3
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
github.com/aws/aws-sdk-go-v2 v1.19.0
github.com/aws/aws-sdk-go-v2/config v1.18.28
github.com/aws/aws-sdk-go-v2/credentials v1.13.27
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.72
github.com/aws/aws-sdk-go-v2/service/s3 v1.37.0
github.com/bytedance/sonic v1.12.8
github.com/aws/aws-sdk-go-v2 v1.36.5
github.com/aws/aws-sdk-go-v2/config v1.29.17
github.com/aws/aws-sdk-go-v2/credentials v1.17.70
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.82
github.com/aws/aws-sdk-go-v2/service/s3 v1.82.0
github.com/bytedance/sonic v1.14.0
github.com/cespare/xxhash v1.1.0
github.com/charmbracelet/glamour v0.8.0
github.com/charmbracelet/glamour v0.10.0
github.com/clbanning/mxj/v2 v2.7.0
github.com/ditashi/jsbeautifier-go v0.0.0-20141206144643-2520a8026a9c
github.com/docker/go-units v0.5.0
github.com/dop251/goja v0.0.0-20240220182346-e401ed450204
github.com/fatih/structs v1.1.0
github.com/getkin/kin-openapi v0.126.0
github.com/go-git/go-git/v5 v5.13.0
github.com/go-ldap/ldap/v3 v3.4.5
github.com/getkin/kin-openapi v0.132.0
github.com/go-git/go-git/v5 v5.16.2
github.com/go-ldap/ldap/v3 v3.4.11
github.com/go-pg/pg v8.0.7+incompatible
github.com/go-sql-driver/mysql v1.7.1
github.com/go-sql-driver/mysql v1.9.3
github.com/goccy/go-json v0.10.5
github.com/google/uuid v1.6.0
github.com/h2non/filetype v1.1.3
github.com/invopop/yaml v0.3.1
github.com/jcmturner/gokrb5/v8 v8.4.4
github.com/kitabisa/go-ci v1.0.3
github.com/labstack/echo/v4 v4.13.3
github.com/labstack/echo/v4 v4.13.4
github.com/leslie-qiwa/flat v0.0.0-20230424180412-f9d1cf014baa
github.com/lib/pq v1.10.9
github.com/mattn/go-sqlite3 v1.14.22
github.com/mholt/archives v0.1.0
github.com/microsoft/go-mssqldb v1.6.0
github.com/ory/dockertest/v3 v3.10.0
github.com/praetorian-inc/fingerprintx v1.1.9
github.com/projectdiscovery/dsl v0.4.2
github.com/mattn/go-sqlite3 v1.14.28
github.com/mholt/archives v0.1.3
github.com/microsoft/go-mssqldb v1.9.2
github.com/ory/dockertest/v3 v3.12.0
github.com/praetorian-inc/fingerprintx v1.1.15
github.com/projectdiscovery/dsl v0.7.0
github.com/projectdiscovery/fasttemplate v0.0.2
github.com/projectdiscovery/gcache v0.0.0-20241015120333-12546c6e3f4c
github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb
github.com/projectdiscovery/goflags v0.1.74
github.com/projectdiscovery/gologger v1.1.53
github.com/projectdiscovery/gologger v1.1.55
github.com/projectdiscovery/gostruct v0.0.2
github.com/projectdiscovery/gozero v0.0.3
github.com/projectdiscovery/httpx v1.7.0
github.com/projectdiscovery/gozero v0.1.0
github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a
github.com/projectdiscovery/mapcidr v1.1.34
github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5
github.com/projectdiscovery/ratelimit v0.0.80
github.com/projectdiscovery/rdap v0.9.1-0.20221108103045-9865884d1917
github.com/projectdiscovery/networkpolicy v0.1.25
github.com/projectdiscovery/ratelimit v0.0.82
github.com/projectdiscovery/rdap v0.9.0
github.com/projectdiscovery/sarif v0.0.1
github.com/projectdiscovery/tlsx v1.1.9
github.com/projectdiscovery/uncover v1.0.10
github.com/projectdiscovery/useragent v0.0.100
github.com/projectdiscovery/utils v0.4.18
github.com/projectdiscovery/wappalyzergo v0.2.25
github.com/redis/go-redis/v9 v9.1.0
github.com/projectdiscovery/tlsx v1.2.1
github.com/projectdiscovery/uncover v1.1.0
github.com/projectdiscovery/useragent v0.0.101
github.com/projectdiscovery/utils v0.5.0
github.com/projectdiscovery/wappalyzergo v0.2.47
github.com/redis/go-redis/v9 v9.11.0
github.com/seh-msft/burpxml v1.0.1
github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466
github.com/stretchr/testify v1.10.0
github.com/sijms/go-ora/v2 v2.9.0
github.com/stretchr/testify v1.11.1
github.com/tarunKoyalwar/goleak v0.0.0-20240429141123-0efa90dbdcf9
github.com/yassinebenaid/godump v0.10.0
github.com/zmap/zgrab2 v0.1.8-0.20230806160807-97ba87c0e706
go.mongodb.org/mongo-driver v1.17.0
golang.org/x/term v0.31.0
github.com/testcontainers/testcontainers-go v0.38.0
github.com/testcontainers/testcontainers-go/modules/mongodb v0.37.0
github.com/yassinebenaid/godump v0.11.1
github.com/zmap/zgrab2 v0.1.8
gitlab.com/gitlab-org/api/client-go v0.130.1
go.mongodb.org/mongo-driver v1.17.4
golang.org/x/term v0.35.0
gopkg.in/yaml.v3 v3.0.1
moul.io/http2curl v1.0.0
)
require (
aead.dev/minisign v0.2.0 // indirect
dario.cat/mergo v1.0.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 // indirect
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect
dario.cat/mergo v1.0.2 // indirect
filippo.io/edwards25519 v1.1.0 // indirect
git.mills.io/prologic/smtpd v0.0.0-20210710122116-a525b76c287a // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/Mzack9999/go-http-digest-auth-client v0.6.1-0.20220414142836-eb8883508809 // indirect
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect
github.com/ProtonMail/go-crypto v1.1.6 // indirect
github.com/PuerkitoBio/goquery v1.10.3 // indirect
github.com/STARRY-S/zip v0.2.1 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/akrylysov/pogreb v0.10.2 // indirect
github.com/alecthomas/chroma/v2 v2.14.0 // indirect
github.com/andybalholm/brotli v1.1.1 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.27 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.30 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.4 // indirect
github.com/alecthomas/kingpin/v2 v2.4.0 // indirect
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3 // indirect
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.32 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.36 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.36 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.25.5 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 // indirect
github.com/aws/smithy-go v1.22.4 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect
github.com/bits-and-blooms/bloom/v3 v3.5.0 // indirect
@ -143,96 +175,162 @@ require (
github.com/bodgit/sevenzip v1.6.0 // indirect
github.com/bodgit/windows v1.0.1 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/bytedance/sonic/loader v0.2.2 // indirect
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/charmbracelet/lipgloss v0.13.0 // indirect
github.com/charmbracelet/x/ansi v0.3.2 // indirect
github.com/cheggaaa/pb/v3 v3.1.4 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/caddyserver/certmagic v0.19.2 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect
github.com/charmbracelet/x/ansi v0.8.0 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/cheggaaa/pb/v3 v3.1.6 // indirect
github.com/cloudflare/cfssl v1.6.4 // indirect
github.com/cloudflare/circl v1.3.8 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect
github.com/containerd/continuity v0.4.2 // indirect
github.com/cyphar/filepath-securejoin v0.2.5 // indirect
github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 // indirect
github.com/containerd/continuity v0.4.5 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
github.com/containerd/errdefs/pkg v0.3.0 // indirect
github.com/containerd/log v0.1.0 // indirect
github.com/containerd/platforms v0.2.1 // indirect
github.com/cpuguy83/dockercfg v0.3.2 // indirect
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/davidmz/go-pageant v1.0.2 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/docker/cli v24.0.5+incompatible // indirect
github.com/docker/docker v24.0.9+incompatible // indirect
github.com/docker/go-connections v0.4.0 // indirect
github.com/fatih/color v1.16.0 // indirect
github.com/distribution/reference v0.6.0 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/docker/cli v27.4.1+incompatible // indirect
github.com/docker/docker v28.3.3+incompatible // indirect
github.com/docker/go-connections v0.6.0 // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/ebitengine/purego v0.8.4 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/felixge/fgprof v0.9.5 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/free5gc/util v1.0.5-0.20230511064842-2e120956883b // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/gaissmai/bart v0.17.10 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/gaissmai/bart v0.25.0 // indirect
github.com/geoffgarside/ber v1.1.0 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/gin-gonic/gin v1.9.1 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.4 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect
github.com/go-fed/httpsig v1.1.0 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.6.2 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/swag v0.23.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v5 v5.2.1 // indirect
github.com/golang-jwt/jwt/v5 v5.2.2 // indirect
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
github.com/golang-sql/sqlexp v0.1.0 // indirect
github.com/google/certificate-transparency-go v1.1.4 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/certificate-transparency-go v1.3.2 // indirect
github.com/google/go-github/v30 v30.1.0 // indirect
github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7 // indirect
github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 // indirect
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/go-version v1.6.0 // indirect
github.com/hashicorp/go-version v1.7.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hbakhtiyor/strsim v0.0.0-20190107154042-4d2bbb273edf // indirect
github.com/hdm/jarm-go v0.0.7 // indirect
github.com/iangcarroll/cookiemonster v1.6.0 // indirect
github.com/imdario/mergo v0.3.16 // indirect
github.com/itchyny/timefmt-go v0.1.6 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jcmturner/aescts/v2 v2.0.0 // indirect
github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect
github.com/jcmturner/gofork v1.7.6 // indirect
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/k14s/starlark-go v0.0.0-20200720175618-3a5c849cc368 // indirect
github.com/kataras/jwt v0.1.10 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/libdns/libdns v0.2.1 // indirect
github.com/logrusorgru/aurora/v4 v4.0.0 // indirect
github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/lufia/plan9stats v0.0.0-20250821153705-5981dea3221d // indirect
github.com/mackerelio/go-osstat v0.2.4 // indirect
github.com/magiconair/properties v1.8.10 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mholt/archiver/v3 v3.5.1 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mholt/acmez v1.2.0 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect
github.com/minio/minlz v1.0.0 // indirect
github.com/minio/selfupdate v0.6.1-0.20230907112617-f11e74f84ca7 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/moby/term v0.5.0 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect
github.com/moby/go-archive v0.1.0 // indirect
github.com/moby/patternmatcher v0.6.0 // indirect
github.com/moby/sys/sequential v0.6.0 // indirect
github.com/moby/sys/user v0.4.0 // indirect
github.com/moby/sys/userns v0.1.0 // indirect
github.com/moby/term v0.5.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/morikuni/aec v1.0.0 // indirect
github.com/muesli/reflow v0.3.0 // indirect
github.com/muesli/termenv v0.15.3-0.20240618155329-98d742f6907a // indirect
github.com/nwaples/rardecode/v2 v2.0.1 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/nwaples/rardecode/v2 v2.1.0 // indirect
github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect
github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect
github.com/olekukonko/errors v1.1.0 // indirect
github.com/olekukonko/ll v0.0.9 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.0.2 // indirect
github.com/opencontainers/runc v1.1.14 // indirect
github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/opencontainers/runc v1.2.3 // indirect
github.com/openrdap/rdap v0.9.1 // indirect
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
github.com/perimeterx/marshmallow v1.1.5 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pjbgf/sha1cd v0.3.0 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/pjbgf/sha1cd v0.3.2 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/projectdiscovery/asnmap v1.1.1 // indirect
github.com/projectdiscovery/cdncheck v1.1.15 // indirect
github.com/projectdiscovery/blackrock v0.0.1 // indirect
github.com/projectdiscovery/cdncheck v1.2.0 // indirect
github.com/projectdiscovery/freeport v0.0.7 // indirect
github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect
github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect
github.com/refraction-networking/utls v1.6.7 // indirect
github.com/refraction-networking/utls v1.7.1 // indirect
github.com/sashabaranov/go-openai v1.37.0 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/shirou/gopsutil/v4 v4.25.7 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.3.0 // indirect
github.com/skeema/knownhosts v1.3.1 // indirect
github.com/sorairolake/lzip-go v0.3.5 // indirect
github.com/therootcompany/xz v1.0.1 // indirect
github.com/tidwall/btree v1.7.0 // indirect
github.com/tidwall/buntdb v1.3.1 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
@ -242,133 +340,82 @@ require (
github.com/tidwall/rtred v0.1.2 // indirect
github.com/tidwall/tinyqueue v0.1.1 // indirect
github.com/tim-ywliu/nested-logrus-formatter v1.3.2 // indirect
github.com/tklauser/go-sysconf v0.3.15 // indirect
github.com/tklauser/numcpus v0.10.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect
github.com/ulikunitz/xz v0.5.15 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
github.com/ysmood/fetchup v0.2.3 // indirect
github.com/ysmood/got v0.40.0 // indirect
github.com/yuin/goldmark v1.7.4 // indirect
github.com/yuin/goldmark-emoji v1.0.3 // indirect
github.com/yuin/goldmark v1.7.13 // indirect
github.com/yuin/goldmark-emoji v1.0.5 // indirect
github.com/zcalusic/sysinfo v1.0.2 // indirect
github.com/zeebo/blake3 v0.2.3 // indirect
go.uber.org/goleak v1.3.0 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 // indirect
go.opentelemetry.io/otel v1.37.0 // indirect
go.opentelemetry.io/otel/metric v1.37.0 // indirect
go.opentelemetry.io/otel/trace v1.37.0 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/sync v0.13.0 // indirect
golang.org/x/sync v0.17.0 // indirect
gopkg.in/djherbis/times.v1 v1.3.0 // indirect
mellium.im/sasl v0.3.1 // indirect
mellium.im/sasl v0.3.2 // indirect
)
require (
git.mills.io/prologic/smtpd v0.0.0-20210710122116-a525b76c287a // indirect
github.com/Mzack9999/go-http-digest-auth-client v0.6.1-0.20220414142836-eb8883508809 // indirect
github.com/PuerkitoBio/goquery v1.10.3 // indirect
github.com/akrylysov/pogreb v0.10.2 // indirect
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/antchfx/xpath v1.2.4
github.com/aymerick/douceur v0.2.0 // indirect
github.com/caddyserver/certmagic v0.19.2 // indirect
github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dimchansky/utfbom v1.1.1 // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/goburrow/cache v0.1.4 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/golang-jwt/jwt/v4 v4.5.1 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/uuid v1.6.0
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
github.com/hdm/jarm-go v0.0.7 // indirect
github.com/itchyny/timefmt-go v0.1.5 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/leodido/go-urn v1.2.4 // indirect
github.com/libdns/libdns v0.2.1 // indirect
github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mholt/acmez v1.2.0 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/projectdiscovery/blackrock v0.0.1 // indirect
github.com/projectdiscovery/networkpolicy v0.1.13
github.com/rivo/uniseg v0.4.7 // indirect
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/trivago/tgo v1.0.7
github.com/ulikunitz/xz v0.5.12 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/ysmood/goob v0.4.0 // indirect
github.com/ysmood/gson v0.7.3 // indirect
github.com/ysmood/leakless v0.9.0 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/zmap/rc2 v0.0.0-20190804163417-abaa70531248 // indirect
github.com/zmap/zcrypto v0.0.0-20240512203510-0fef58d9a9db // indirect
go.etcd.io/bbolt v1.3.10 // indirect
go.uber.org/zap v1.25.0 // indirect
go.etcd.io/bbolt v1.4.0 // indirect
go.uber.org/zap v1.27.0 // indirect
goftp.io/server/v2 v2.0.1 // indirect
golang.org/x/crypto v0.37.0 // indirect
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8
golang.org/x/mod v0.22.0 // indirect
golang.org/x/sys v0.32.0 // indirect
golang.org/x/time v0.8.0 // indirect
golang.org/x/tools v0.29.0
google.golang.org/protobuf v1.34.2 // indirect
golang.org/x/crypto v0.42.0 // indirect
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
golang.org/x/mod v0.27.0 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.36.0
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect
gopkg.in/corvus-ch/zbase32.v1 v1.0.0 // indirect
)
require (
github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/ProtonMail/go-crypto v1.1.3 // indirect
github.com/alecthomas/chroma v0.10.0
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.36 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.12.13 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.19.3 // indirect
github.com/aws/smithy-go v1.13.5 // indirect
github.com/dop251/goja_nodejs v0.0.0-20230821135201-94e508132562
github.com/emirpasic/gods v1.18.1 // indirect
github.com/go-echarts/go-echarts/v2 v2.3.3
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.6.0 // indirect
github.com/imdario/mergo v0.3.16 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jcmturner/gokrb5/v8 v8.4.4
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
github.com/go-echarts/go-echarts/v2 v2.6.0
gopkg.in/warnings.v0 v0.1.2 // indirect
)
// https://go.dev/ref/mod#go-mod-file-retract
retract v3.2.0 // retract due to broken js protocol issue
// Fix genproto version conflicts
replace (
google.golang.org/genproto => google.golang.org/genproto v0.0.0-20240814211410-ddb44dafa142
google.golang.org/genproto/googleapis/api => google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142
google.golang.org/genproto/googleapis/rpc => google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1
)

1878
go.sum

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
{{- if .Values.interactsh.ingress.enabled -}}
{{- $fullName := include "nuclei.fullname" . -}}
{{- $svcPort := .Values.service.port -}}
{{- $svcPort := .Values.interactsh.service.port -}}
{{- $svcName := .Values.interactsh.service.name -}}
{{- if and .Values.interactsh.ingress.className (not (semverCompare ">=1.20-0" .Capabilities.KubeVersion.GitVersion)) }}
{{- if not (hasKey .Values.interactsh.ingress.annotations "kubernetes.io/ingress.class") }}
{{- $_ := set .Values.interactsh.ingress.annotations "kubernetes.io/ingress.class" .Values.interactsh.ingress.className}}
@ -49,11 +50,11 @@ spec:
backend:
{{- if semverCompare ">=1.20-0" $.Capabilities.KubeVersion.GitVersion }}
service:
name: {{ $fullName }}
name: {{ $svcName }}
port:
number: {{ $svcPort }}
{{- else }}
serviceName: {{ $fullName }}
serviceName: {{ $svcName }}
servicePort: {{ $svcPort }}
{{- end }}
{{- end }}

View File

@ -0,0 +1,38 @@
id: fuzz-body
info:
name: fuzzing error sqli payloads in http req body
author: pdteam
severity: info
description: |
This template attempts to find SQL injection vulnerabilities by fuzzing http body
It automatically handles and parses json,xml,multipart form and x-www-form-urlencoded data
and performs fuzzing on the value of every key
http:
- pre-condition:
- type: dsl
dsl:
- method != "GET"
- method != "HEAD"
condition: and
payloads:
injection:
- "'"
- "\""
- ";"
fuzzing:
- part: body
type: postfix
mode: single
fuzz:
- '{{injection}}'
stop-at-first-match: true
matchers:
- type: word
words:
- "unrecognized token:"
- "null"

View File

@ -0,0 +1,38 @@
id: vnc-password-test
info:
name: VNC Password Authentication Test
author: pdteam
severity: high
description: |
Tests VNC authentication with correct and incorrect passwords.
metadata:
shodan-query: product:"vnc"
tags: js,network,vnc,authentication
javascript:
- pre-condition: |
isPortOpen(Host,Port)
code: |
let vnc = require('nuclei/vnc');
let client = new vnc.VNCClient();
client.Connect(Host, Port, Password);
args:
Host: "{{Host}}"
Port: "5900"
Password: "{{passwords}}"
payloads:
passwords:
- ""
- root
- password
- admin
- mysecret
stop-at-first-match: true
matchers:
- type: dsl
dsl:
- "success == true"

View File

@ -19,7 +19,7 @@ import (
"github.com/projectdiscovery/retryablehttp-go"
pdcpauth "github.com/projectdiscovery/utils/auth/pdcp"
"github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
unitutils "github.com/projectdiscovery/utils/unit"
updateutils "github.com/projectdiscovery/utils/update"
urlutil "github.com/projectdiscovery/utils/url"
@ -55,10 +55,11 @@ type UploadWriter struct {
scanName string
counter atomic.Int32
TeamID string
Logger *gologger.Logger
}
// NewUploadWriter creates a new upload writer
func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*UploadWriter, error) {
func NewUploadWriter(ctx context.Context, logger *gologger.Logger, creds *pdcpauth.PDCPCredentials) (*UploadWriter, error) {
if creds == nil {
return nil, fmt.Errorf("no credentials provided")
}
@ -66,6 +67,7 @@ func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*Upl
creds: creds,
done: make(chan struct{}, 1),
TeamID: NoneTeamID,
Logger: logger,
}
var err error
reader, writer := io.Pipe()
@ -75,11 +77,11 @@ func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*Upl
output.WithJson(true, true),
)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create output writer")
return nil, errkit.Wrap(err, "could not create output writer")
}
tmp, err := urlutil.Parse(creds.Server)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not parse server url")
return nil, errkit.Wrap(err, "could not parse server url")
}
tmp.Path = uploadEndpoint
tmp.Update()
@ -127,7 +129,9 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// continuously read from the reader and send to channel
go func() {
defer r.Close()
defer func() {
_ = r.Close()
}()
defer close(ch)
for {
data, err := reader.ReadString('\n')
@ -145,9 +149,9 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
close(u.done)
// if no scanid is generated no results were uploaded
if u.scanID == "" {
gologger.Verbose().Msgf("Scan results upload to cloud skipped, no results found to upload")
u.Logger.Verbose().Msgf("Scan results upload to cloud skipped, no results found to upload")
} else {
gologger.Info().Msgf("%v Scan results uploaded to cloud, you can view scan results at %v", u.counter.Load(), getScanDashBoardURL(u.scanID, u.TeamID))
u.Logger.Info().Msgf("%v Scan results uploaded to cloud, you can view scan results at %v", u.counter.Load(), getScanDashBoardURL(u.scanID, u.TeamID))
}
}()
// temporary buffer to store the results
@ -160,7 +164,7 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// flush before exit
if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
}
}
return
@ -168,14 +172,14 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// flush the buffer
if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
}
}
case line, ok := <-ch:
if !ok {
if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
}
}
return
@ -183,7 +187,7 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
if buff.Len()+len(line) > MaxChunkSize {
// flush existing buffer
if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
}
} else {
buff.WriteString(line)
@ -195,35 +199,37 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// uploadChunk uploads a chunk of data to the server
func (u *UploadWriter) uploadChunk(buff *bytes.Buffer) error {
if err := u.upload(buff.Bytes()); err != nil {
return errorutil.NewWithErr(err).Msgf("could not upload chunk")
return errkit.Wrap(err, "could not upload chunk")
}
// if successful, reset the buffer
buff.Reset()
// log in verbose mode
gologger.Warning().Msgf("Uploaded results chunk, you can view scan results at %v", getScanDashBoardURL(u.scanID, u.TeamID))
u.Logger.Warning().Msgf("Uploaded results chunk, you can view scan results at %v", getScanDashBoardURL(u.scanID, u.TeamID))
return nil
}
func (u *UploadWriter) upload(data []byte) error {
req, err := u.getRequest(data)
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not create upload request")
return errkit.Wrap(err, "could not create upload request")
}
resp, err := u.client.Do(req)
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not upload results")
return errkit.Wrap(err, "could not upload results")
}
defer resp.Body.Close()
defer func() {
_ = resp.Body.Close()
}()
bin, err := io.ReadAll(resp.Body)
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not get id from response")
return errkit.Wrap(err, "could not get id from response")
}
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("could not upload results got status code %v on %v", resp.StatusCode, resp.Request.URL.String())
}
var uploadResp uploadResponse
if err := json.Unmarshal(bin, &uploadResp); err != nil {
return errorutil.NewWithErr(err).Msgf("could not unmarshal response got %v", string(bin))
return errkit.Wrap(err, fmt.Sprintf("could not unmarshal response got %v", string(bin)))
}
if uploadResp.ID != "" && u.scanID == "" {
u.scanID = uploadResp.ID
@ -248,15 +254,15 @@ func (u *UploadWriter) getRequest(bin []byte) (*retryablehttp.Request, error) {
}
req, err := retryablehttp.NewRequest(method, url, bytes.NewReader(bin))
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create cloud upload request")
return nil, errkit.Wrap(err, "could not create cloud upload request")
}
// add pdtm meta params
req.URL.Params.Merge(updateutils.GetpdtmParams(config.Version))
req.Params.Merge(updateutils.GetpdtmParams(config.Version))
// if it is upload endpoint also include name if it exists
if u.scanName != "" && req.URL.Path == uploadEndpoint {
req.URL.Params.Add("name", u.scanName)
if u.scanName != "" && req.Path == uploadEndpoint {
req.Params.Add("name", u.scanName)
}
req.URL.Update()
req.Update()
req.Header.Set(pdcpauth.ApiKeyHeaderName, u.creds.APIKey)
if u.TeamID != NoneTeamID && u.TeamID != "" {

View File

@ -47,7 +47,7 @@ func DoHealthCheck(options *types.Options) string {
}
c4, err := net.Dial("tcp4", "scanme.sh:80")
if err == nil && c4 != nil {
c4.Close()
_ = c4.Close()
}
testResult = "Ok"
if err != nil {
@ -56,7 +56,7 @@ func DoHealthCheck(options *types.Options) string {
test.WriteString(fmt.Sprintf("IPv4 connectivity to scanme.sh:80 => %s\n", testResult))
c6, err := net.Dial("tcp6", "scanme.sh:80")
if err == nil && c6 != nil {
c6.Close()
_ = c6.Close()
}
testResult = "Ok"
if err != nil {
@ -65,7 +65,7 @@ func DoHealthCheck(options *types.Options) string {
test.WriteString(fmt.Sprintf("IPv6 connectivity to scanme.sh:80 => %s\n", testResult))
u4, err := net.Dial("udp4", "scanme.sh:53")
if err == nil && u4 != nil {
u4.Close()
_ = u4.Close()
}
testResult = "Ok"
if err != nil {

View File

@ -2,11 +2,11 @@ package runner
import (
"context"
"fmt"
"sync/atomic"
"time"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/hmap/store/hybrid"
"github.com/projectdiscovery/httpx/common/httpx"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
@ -28,7 +28,7 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
// currently http probing for input mode types is not supported
return hm, nil
}
gologger.Info().Msgf("Running httpx on input host")
r.Logger.Info().Msgf("Running httpx on input host")
httpxOptions := httpx.DefaultOptions
if r.options.AliveHttpProxy != "" {
@ -38,7 +38,13 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
}
httpxOptions.RetryMax = r.options.Retries
httpxOptions.Timeout = time.Duration(r.options.Timeout) * time.Second
httpxOptions.NetworkPolicy = protocolstate.NetworkPolicy
dialers := protocolstate.GetDialersWithId(r.options.ExecutionId)
if dialers == nil {
return nil, fmt.Errorf("dialers not initialized for %s", r.options.ExecutionId)
}
httpxOptions.NetworkPolicy = dialers.NetworkPolicy
httpxClient, err := httpx.New(&httpxOptions)
if err != nil {
return nil, errors.Wrap(err, "could not create httpx client")
@ -57,7 +63,7 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
if r.options.ProbeConcurrency > 0 && swg.Size != r.options.ProbeConcurrency {
if err := swg.Resize(context.Background(), r.options.ProbeConcurrency); err != nil {
gologger.Error().Msgf("Could not resize workpool: %s\n", err)
r.Logger.Error().Msgf("Could not resize workpool: %s\n", err)
}
}
@ -74,6 +80,6 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
})
swg.Wait()
gologger.Info().Msgf("Found %d URL from httpx", count.Load())
r.Logger.Info().Msgf("Found %d URL from httpx", count.Load())
return hm, nil
}

View File

@ -17,22 +17,22 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/scan"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
type AuthLazyFetchOptions struct {
TemplateStore *loader.Store
ExecOpts protocols.ExecutorOptions
ExecOpts *protocols.ExecutorOptions
OnError func(error)
}
// GetAuthTmplStore create new loader for loading auth templates
func GetAuthTmplStore(opts types.Options, catalog catalog.Catalog, execOpts protocols.ExecutorOptions) (*loader.Store, error) {
func GetAuthTmplStore(opts *types.Options, catalog catalog.Catalog, execOpts *protocols.ExecutorOptions) (*loader.Store, error) {
tmpls := []string{}
for _, file := range opts.SecretsFile {
data, err := authx.GetTemplatePathsFromSecretFile(file)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("failed to get template paths from secrets file")
return nil, errkit.Wrap(err, "failed to get template paths from secrets file")
}
tmpls = append(tmpls, data...)
}
@ -54,11 +54,11 @@ func GetAuthTmplStore(opts types.Options, catalog catalog.Catalog, execOpts prot
opts.Protocols = nil
opts.ExcludeProtocols = nil
opts.IncludeConditions = nil
cfg := loader.NewConfig(&opts, catalog, execOpts)
cfg := loader.NewConfig(opts, catalog, execOpts)
cfg.StoreId = loader.AuthStoreId
store, err := loader.New(cfg)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("failed to initialize dynamic auth templates store")
return nil, errkit.Wrap(err, "failed to initialize dynamic auth templates store")
}
return store, nil
}

View File

@ -31,7 +31,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/utils/yaml"
fileutil "github.com/projectdiscovery/utils/file"
"github.com/projectdiscovery/utils/generic"
logutil "github.com/projectdiscovery/utils/log"
stringsutil "github.com/projectdiscovery/utils/strings"
)
@ -40,6 +39,8 @@ const (
DefaultDumpTrafficOutputFolder = "output"
)
var validateOptions = validator.New()
func ConfigureOptions() error {
// with FileStringSliceOptions, FileNormalizedStringSliceOptions, FileCommaSeparatedStringSliceOptions
// if file has the extension `.yaml` or `.json` we consider those as strings and not files to be read
@ -71,17 +72,17 @@ func ParseOptions(options *types.Options) {
vardump.Limit = options.VarDumpLimit
}
if options.ShowActions {
gologger.Info().Msgf("Showing available headless actions: ")
options.Logger.Info().Msgf("Showing available headless actions: ")
for action := range engine.ActionStringToAction {
gologger.Print().Msgf("\t%s", action)
options.Logger.Print().Msgf("\t%s", action)
}
os.Exit(0)
}
defaultProfilesPath := filepath.Join(config.DefaultConfig.GetTemplateDir(), "profiles")
if options.ListTemplateProfiles {
gologger.Print().Msgf(
"\nListing available %v nuclei template profiles for %v",
options.Logger.Print().Msgf(
"Listing available %v nuclei template profiles for %v",
config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory,
)
@ -93,23 +94,23 @@ func ParseOptions(options *types.Options) {
return nil
}
if profileRelPath, err := filepath.Rel(templatesRootDir, iterItem); err == nil {
gologger.Print().Msgf("%s (%s)\n", profileRelPath, strings.TrimSuffix(filepath.Base(iterItem), ext))
options.Logger.Print().Msgf("%s (%s)\n", profileRelPath, strings.TrimSuffix(filepath.Base(iterItem), ext))
}
return nil
})
if err != nil {
gologger.Error().Msgf("%s\n", err)
options.Logger.Error().Msgf("%s\n", err)
}
os.Exit(0)
}
if options.StoreResponseDir != DefaultDumpTrafficOutputFolder && !options.StoreResponse {
gologger.Debug().Msgf("Store response directory specified, enabling \"store-resp\" flag automatically\n")
options.Logger.Debug().Msgf("Store response directory specified, enabling \"store-resp\" flag automatically\n")
options.StoreResponse = true
}
// Validate the options passed by the user and if any
// invalid options have been used, exit.
if err := ValidateOptions(options); err != nil {
gologger.Fatal().Msgf("Program exiting: %s\n", err)
options.Logger.Fatal().Msgf("Program exiting: %s\n", err)
}
// Load the resolvers if user asked for them
@ -117,12 +118,12 @@ func ParseOptions(options *types.Options) {
err := protocolinit.Init(options)
if err != nil {
gologger.Fatal().Msgf("Could not initialize protocols: %s\n", err)
options.Logger.Fatal().Msgf("Could not initialize protocols: %s\n", err)
}
// Set GitHub token in env variable. runner.getGHClientWithToken() reads token from env
if options.GitHubToken != "" && os.Getenv("GITHUB_TOKEN") != options.GitHubToken {
os.Setenv("GITHUB_TOKEN", options.GitHubToken)
_ = os.Setenv("GITHUB_TOKEN", options.GitHubToken)
}
if options.UncoverQuery != nil {
@ -139,8 +140,7 @@ func ParseOptions(options *types.Options) {
// validateOptions validates the configuration options passed
func ValidateOptions(options *types.Options) error {
validate := validator.New()
if err := validate.Struct(options); err != nil {
if err := validateOptions.Struct(options); err != nil {
if _, ok := err.(*validator.InvalidValidationError); ok {
return err
}
@ -169,7 +169,7 @@ func ValidateOptions(options *types.Options) error {
return err
}
if options.Validate {
validateTemplatePaths(config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows)
validateTemplatePaths(options.Logger, config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows)
}
if options.DAST {
if err := validateDASTOptions(options); err != nil {
@ -182,7 +182,7 @@ func ValidateOptions(options *types.Options) error {
if generic.EqualsAny("", options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile) {
return errors.New("if a client certification option is provided, then all three must be provided")
}
validateCertificatePaths(options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile)
validateCertificatePaths(options.Logger, options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile)
}
// Verify AWS secrets are passed if a S3 template bucket is passed
if options.AwsBucketName != "" && options.UpdateTemplates && !options.AwsTemplateDisableDownload {
@ -304,7 +304,9 @@ func createReportingOptions(options *types.Options) (*reporting.Options, error)
if err != nil {
return nil, errors.Wrap(err, "could not open reporting config file")
}
defer file.Close()
defer func() {
_ = file.Close()
}()
if err := yaml.DecodeAndValidate(file, reportingOptions); err != nil {
return nil, errors.Wrap(err, "could not parse reporting config file")
@ -342,32 +344,33 @@ func createReportingOptions(options *types.Options) (*reporting.Options, error)
}
reportingOptions.OmitRaw = options.OmitRawRequests
reportingOptions.ExecutionId = options.ExecutionId
return reportingOptions, nil
}
// configureOutput configures the output logging levels to be displayed on the screen
func configureOutput(options *types.Options) {
if options.NoColor {
gologger.DefaultLogger.SetFormatter(formatter.NewCLI(true))
options.Logger.SetFormatter(formatter.NewCLI(true))
}
// If the user desires verbose output, show verbose output
if options.Debug || options.DebugRequests || options.DebugResponse {
gologger.DefaultLogger.SetMaxLevel(levels.LevelDebug)
options.Logger.SetMaxLevel(levels.LevelDebug)
}
// Debug takes precedence before verbose
// because debug is a lower logging level.
if options.Verbose || options.Validate {
gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose)
options.Logger.SetMaxLevel(levels.LevelVerbose)
}
if options.NoColor {
gologger.DefaultLogger.SetFormatter(formatter.NewCLI(true))
options.Logger.SetFormatter(formatter.NewCLI(true))
}
if options.Silent {
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
options.Logger.SetMaxLevel(levels.LevelSilent)
}
// disable standard logger (ref: https://github.com/golang/go/issues/19895)
logutil.DisableDefaultLogger()
// logutil.DisableDefaultLogger()
}
// loadResolvers loads resolvers from both user-provided flags and file
@ -378,9 +381,11 @@ func loadResolvers(options *types.Options) {
file, err := os.Open(options.ResolversFile)
if err != nil {
gologger.Fatal().Msgf("Could not open resolvers file: %s\n", err)
options.Logger.Fatal().Msgf("Could not open resolvers file: %s\n", err)
}
defer file.Close()
defer func() {
_ = file.Close()
}()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
@ -396,7 +401,7 @@ func loadResolvers(options *types.Options) {
}
}
func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPaths []string) {
func validateTemplatePaths(logger *gologger.Logger, templatesDirectory string, templatePaths, workflowPaths []string) {
allGivenTemplatePaths := append(templatePaths, workflowPaths...)
for _, templatePath := range allGivenTemplatePaths {
if templatesDirectory != templatePath && filepath.IsAbs(templatePath) {
@ -404,7 +409,7 @@ func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPat
if err == nil && fileInfo.IsDir() {
relativizedPath, err2 := filepath.Rel(templatesDirectory, templatePath)
if err2 != nil || (len(relativizedPath) >= 2 && relativizedPath[:2] == "..") {
gologger.Warning().Msgf("The given path (%s) is outside the default template directory path (%s)! "+
logger.Warning().Msgf("The given path (%s) is outside the default template directory path (%s)! "+
"Referenced sub-templates with relative paths in workflows will be resolved against the default template directory.", templatePath, templatesDirectory)
break
}
@ -413,12 +418,12 @@ func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPat
}
}
func validateCertificatePaths(certificatePaths ...string) {
func validateCertificatePaths(logger *gologger.Logger, certificatePaths ...string) {
for _, certificatePath := range certificatePaths {
if !fileutil.FileExists(certificatePath) {
// The provided path to the PEM certificate does not exist for the client authentication. As this is
// required for successful authentication, log and return an error
gologger.Fatal().Msgf("The given path (%s) to the certificate does not exist!", certificatePath)
logger.Fatal().Msgf("The given path (%s) to the certificate does not exist!", certificatePath)
break
}
}
@ -445,7 +450,7 @@ func readEnvInputVars(options *types.Options) {
// Attempt to convert the repo ID to an integer
repoIDInt, err := strconv.Atoi(repoID)
if err != nil {
gologger.Warning().Msgf("Invalid GitLab template repository ID: %s", repoID)
options.Logger.Warning().Msgf("Invalid GitLab template repository ID: %s", repoID)
continue
}

View File

@ -7,9 +7,8 @@ import (
"os"
"strings"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file"
proxyutils "github.com/projectdiscovery/utils/proxy"
)
@ -30,7 +29,9 @@ func loadProxyServers(options *types.Options) error {
if err != nil {
return fmt.Errorf("could not open proxy file: %w", err)
}
defer file.Close()
defer func() {
_ = file.Close()
}()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
proxy := scanner.Text()
@ -49,17 +50,18 @@ func loadProxyServers(options *types.Options) error {
}
proxyURL, err := url.Parse(aliveProxy)
if err != nil {
return errorutil.WrapfWithNil(err, "failed to parse proxy got %v", err)
return errkit.Wrapf(err, "failed to parse proxy got %v", err)
}
if options.ProxyInternal {
os.Setenv(HTTP_PROXY_ENV, proxyURL.String())
_ = os.Setenv(HTTP_PROXY_ENV, proxyURL.String())
}
if proxyURL.Scheme == proxyutils.HTTP || proxyURL.Scheme == proxyutils.HTTPS {
gologger.Verbose().Msgf("Using %s as proxy server", proxyURL.String())
switch proxyURL.Scheme {
case proxyutils.HTTP, proxyutils.HTTPS:
options.Logger.Verbose().Msgf("Using %s as proxy server", proxyURL.String())
options.AliveHttpProxy = proxyURL.String()
} else if proxyURL.Scheme == proxyutils.SOCKS5 {
case proxyutils.SOCKS5:
options.AliveSocksProxy = proxyURL.String()
gologger.Verbose().Msgf("Using %s as socket proxy server", proxyURL.String())
options.Logger.Verbose().Msgf("Using %s as socket proxy server", proxyURL.String())
}
return nil
}

View File

@ -10,6 +10,7 @@ import (
"sync/atomic"
"time"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/nuclei/v3/internal/server"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
@ -32,7 +33,6 @@ import (
"github.com/pkg/errors"
"github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/colorizer"
"github.com/projectdiscovery/nuclei/v3/internal/httpapi"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
@ -95,6 +95,7 @@ type Runner struct {
inputProvider provider.InputProvider
fuzzFrequencyCache *frequency.Tracker
httpStats *outputstats.Tracker
Logger *gologger.Logger
//general purpose temporary directory
tmpDir string
@ -108,10 +109,11 @@ type Runner struct {
func New(options *types.Options) (*Runner, error) {
runner := &Runner{
options: options,
Logger: options.Logger,
}
if options.HealthCheck {
gologger.Print().Msgf("%s\n", DoHealthCheck(options))
runner.Logger.Print().Msgf("%s\n", DoHealthCheck(options))
os.Exit(0)
}
@ -119,14 +121,22 @@ func New(options *types.Options) (*Runner, error) {
if config.DefaultConfig.CanCheckForUpdates() {
if err := installer.NucleiVersionCheck(); err != nil {
if options.Verbose || options.Debug {
gologger.Error().Msgf("nuclei version check failed got: %s\n", err)
runner.Logger.Error().Msgf("nuclei version check failed got: %s\n", err)
}
}
// if template list or template display is enabled, enable all templates
if options.TemplateList || options.TemplateDisplay {
options.EnableCodeTemplates = true
options.EnableFileTemplates = true
options.EnableSelfContainedTemplates = true
options.EnableGlobalMatchersTemplates = true
}
// check for custom template updates and update if available
ctm, err := customtemplates.NewCustomTemplatesManager(options)
if err != nil {
gologger.Error().Label("custom-templates").Msgf("Failed to create custom templates manager: %s\n", err)
runner.Logger.Error().Label("custom-templates").Msgf("Failed to create custom templates manager: %s\n", err)
}
// Check for template updates and update if available.
@ -136,15 +146,15 @@ func New(options *types.Options) (*Runner, error) {
DisablePublicTemplates: options.PublicTemplateDisableDownload,
}
if err := tm.FreshInstallIfNotExists(); err != nil {
gologger.Warning().Msgf("failed to install nuclei templates: %s\n", err)
runner.Logger.Warning().Msgf("failed to install nuclei templates: %s\n", err)
}
if err := tm.UpdateIfOutdated(); err != nil {
gologger.Warning().Msgf("failed to update nuclei templates: %s\n", err)
runner.Logger.Warning().Msgf("failed to update nuclei templates: %s\n", err)
}
if config.DefaultConfig.NeedsIgnoreFileUpdate() {
if err := installer.UpdateIgnoreFile(); err != nil {
gologger.Warning().Msgf("failed to update nuclei ignore file: %s\n", err)
runner.Logger.Warning().Msgf("failed to update nuclei ignore file: %s\n", err)
}
}
@ -152,7 +162,7 @@ func New(options *types.Options) (*Runner, error) {
// we automatically check for updates unless explicitly disabled
// this print statement is only to inform the user that there are no updates
if !config.DefaultConfig.NeedsTemplateUpdate() {
gologger.Info().Msgf("No new updates found for nuclei templates")
runner.Logger.Info().Msgf("No new updates found for nuclei templates")
}
// manually trigger update of custom templates
if ctm != nil {
@ -161,20 +171,25 @@ func New(options *types.Options) (*Runner, error) {
}
}
if op, ok := options.Parser.(*templates.Parser); ok {
// Enable passing in an existing parser instance
// This uses a type assertion to avoid an import loop
runner.parser = op
} else {
parser := templates.NewParser()
if options.Validate {
parser.ShouldValidate = true
}
// TODO: refactor to pass options reference globally without cycles
parser.NoStrictSyntax = options.NoStrictSyntax
runner.parser = parser
}
yaml.StrictSyntax = !options.NoStrictSyntax
if options.Headless {
if engine.MustDisableSandbox() {
gologger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n")
runner.Logger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n")
}
browser, err := engine.New(options)
if err != nil {
@ -226,11 +241,11 @@ func New(options *types.Options) (*Runner, error) {
if options.HttpApiEndpoint != "" {
apiServer := httpapi.New(options.HttpApiEndpoint, options)
gologger.Info().Msgf("Listening api endpoint on: %s", options.HttpApiEndpoint)
runner.Logger.Info().Msgf("Listening api endpoint on: %s", options.HttpApiEndpoint)
runner.httpApiEndpoint = apiServer
go func() {
if err := apiServer.Start(); err != nil {
gologger.Error().Msgf("Failed to start API server: %s", err)
runner.Logger.Error().Msgf("Failed to start API server: %s", err)
}
}()
}
@ -284,7 +299,7 @@ func New(options *types.Options) (*Runner, error) {
// create the resume configuration structure
resumeCfg := types.NewResumeCfg()
if runner.options.ShouldLoadResume() {
gologger.Info().Msg("Resuming from save checkpoint")
runner.Logger.Info().Msg("Resuming from save checkpoint")
file, err := os.ReadFile(runner.options.Resume)
if err != nil {
return nil, err
@ -326,6 +341,7 @@ func New(options *types.Options) (*Runner, error) {
}
opts := interactsh.DefaultOptions(runner.output, runner.issuesClient, runner.progress)
opts.Logger = runner.Logger
opts.Debug = runner.options.Debug
opts.NoColor = runner.options.NoColor
if options.InteractshURL != "" {
@ -355,24 +371,20 @@ func New(options *types.Options) (*Runner, error) {
}
interactshClient, err := interactsh.New(opts)
if err != nil {
gologger.Error().Msgf("Could not create interactsh client: %s", err)
runner.Logger.Error().Msgf("Could not create interactsh client: %s", err)
} else {
runner.interactsh = interactshClient
}
if options.RateLimitMinute > 0 {
gologger.Print().Msgf("[%v] %v", aurora.BrightYellow("WRN"), "rate limit per minute is deprecated - use rate-limit-duration")
runner.Logger.Print().Msgf("[%v] %v", aurora.BrightYellow("WRN"), "rate limit per minute is deprecated - use rate-limit-duration")
options.RateLimit = options.RateLimitMinute
options.RateLimitDuration = time.Minute
}
if options.RateLimit > 0 && options.RateLimitDuration == 0 {
options.RateLimitDuration = time.Second
}
if options.RateLimit == 0 && options.RateLimitDuration == 0 {
runner.rateLimiter = ratelimit.NewUnlimited(context.Background())
} else {
runner.rateLimiter = ratelimit.New(context.Background(), uint(options.RateLimit), options.RateLimitDuration)
}
runner.rateLimiter = utils.GetRateLimiter(context.Background(), options.RateLimit, options.RateLimitDuration)
if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil {
runner.tmpDir = tmpDir
@ -382,7 +394,7 @@ func New(options *types.Options) (*Runner, error) {
}
// runStandardEnumeration runs standard enumeration
func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
func (r *Runner) runStandardEnumeration(executerOpts *protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
if r.options.AutomaticScan {
return r.executeSmartWorkflowInput(executerOpts, store, engine)
}
@ -413,7 +425,7 @@ func (r *Runner) Close() {
if r.inputProvider != nil {
r.inputProvider.Close()
}
protocolinit.Close()
protocolinit.Close(r.options.ExecutionId)
if r.pprofServer != nil {
r.pprofServer.Stop()
}
@ -439,23 +451,22 @@ func (r *Runner) setupPDCPUpload(writer output.Writer) output.Writer {
if r.options.ScanID != "" {
r.options.EnableCloudUpload = true
}
if !(r.options.EnableCloudUpload || EnableCloudUpload) {
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] Scan results upload to cloud is disabled.", r.colorizer.BrightYellow("WRN"))
if !r.options.EnableCloudUpload && !EnableCloudUpload {
r.pdcpUploadErrMsg = "Scan results upload to cloud is disabled."
return writer
}
color := aurora.NewAurora(!r.options.NoColor)
h := &pdcpauth.PDCPCredHandler{}
creds, err := h.GetCreds()
if err != nil {
if err != pdcpauth.ErrNoCreds && !HideAutoSaveMsg {
gologger.Verbose().Msgf("Could not get credentials for cloud upload: %s\n", err)
r.Logger.Verbose().Msgf("Could not get credentials for cloud upload: %s\n", err)
}
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] To view results on Cloud Dashboard, Configure API key from %v", color.BrightYellow("WRN"), pdcpauth.DashBoardURL)
r.pdcpUploadErrMsg = fmt.Sprintf("To view results on Cloud Dashboard, configure API key from %v", pdcpauth.DashBoardURL)
return writer
}
uploadWriter, err := pdcp.NewUploadWriter(context.Background(), creds)
uploadWriter, err := pdcp.NewUploadWriter(context.Background(), r.Logger, creds)
if err != nil {
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] PDCP (%v) Auto-Save Failed: %s\n", color.BrightYellow("WRN"), pdcpauth.DashBoardURL, err)
r.pdcpUploadErrMsg = fmt.Sprintf("PDCP (%v) Auto-Save Failed: %s\n", pdcpauth.DashBoardURL, err)
return writer
}
if r.options.ScanID != "" {
@ -491,6 +502,7 @@ func (r *Runner) RunEnumeration() error {
Parser: r.parser,
TemporaryDirectory: r.tmpDir,
FuzzStatsDB: r.fuzzStats,
Logger: r.Logger,
}
dastServer, err := server.New(&server.Options{
Address: r.options.DASTServerAddress,
@ -532,7 +544,7 @@ func (r *Runner) RunEnumeration() error {
// Create the executor options which will be used throughout the execution
// stage by the nuclei engine modules.
executorOpts := protocols.ExecutorOptions{
executorOpts := &protocols.ExecutorOptions{
Output: r.output,
Options: r.options,
Progress: r.progress,
@ -550,6 +562,8 @@ func (r *Runner) RunEnumeration() error {
Parser: r.parser,
FuzzParamsFrequency: fuzzFreqCache,
GlobalMatchers: globalmatchers.New(),
DoNotCache: r.options.DoNotCacheTemplates,
Logger: r.Logger,
}
if config.DefaultConfig.IsDebugArgEnabled(config.DebugExportURLPattern) {
@ -558,7 +572,7 @@ func (r *Runner) RunEnumeration() error {
}
if len(r.options.SecretsFile) > 0 && !r.options.Validate {
authTmplStore, err := GetAuthTmplStore(*r.options, r.catalog, executorOpts)
authTmplStore, err := GetAuthTmplStore(r.options, r.catalog, executorOpts)
if err != nil {
return errors.Wrap(err, "failed to load dynamic auth templates")
}
@ -578,8 +592,8 @@ func (r *Runner) RunEnumeration() error {
if r.options.ShouldUseHostError() {
maxHostError := r.options.MaxHostError
if r.options.TemplateThreads > maxHostError {
gologger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", r.colorizer.BrightYellow("WRN"))
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", r.options.TemplateThreads)
r.Logger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", r.colorizer.BrightYellow("WRN"))
r.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", r.options.TemplateThreads)
maxHostError = r.options.TemplateThreads
}
@ -594,7 +608,7 @@ func (r *Runner) RunEnumeration() error {
executorEngine := core.New(r.options)
executorEngine.SetExecuterOptions(executorOpts)
workflowLoader, err := parsers.NewLoader(&executorOpts)
workflowLoader, err := parsers.NewLoader(executorOpts)
if err != nil {
return errors.Wrap(err, "Could not create loader.")
}
@ -633,7 +647,7 @@ func (r *Runner) RunEnumeration() error {
return err
}
if stats.GetValue(templates.SyntaxErrorStats) == 0 && stats.GetValue(templates.SyntaxWarningStats) == 0 && stats.GetValue(templates.RuntimeWarningsStats) == 0 {
gologger.Info().Msgf("All templates validated successfully\n")
r.Logger.Info().Msgf("All templates validated successfully")
} else {
return errors.New("encountered errors while performing template validation")
}
@ -655,7 +669,7 @@ func (r *Runner) RunEnumeration() error {
}
ret := uncover.GetUncoverTargetsFromMetadata(context.TODO(), store.Templates(), r.options.UncoverField, uncoverOpts)
for host := range ret {
_ = r.inputProvider.SetWithExclusions(host)
_ = r.inputProvider.SetWithExclusions(r.options.ExecutionId, host)
}
}
// display execution info like version , templates used etc
@ -663,7 +677,7 @@ func (r *Runner) RunEnumeration() error {
// prefetch secrets if enabled
if executorOpts.AuthProvider != nil && r.options.PreFetchSecrets {
gologger.Info().Msgf("Pre-fetching secrets from authprovider[s]")
r.Logger.Info().Msgf("Pre-fetching secrets from authprovider[s]")
if err := executorOpts.AuthProvider.PreFetchSecrets(); err != nil {
return errors.Wrap(err, "could not pre-fetch secrets")
}
@ -697,11 +711,12 @@ func (r *Runner) RunEnumeration() error {
if r.dastServer != nil {
go func() {
if err := r.dastServer.Start(); err != nil {
gologger.Error().Msgf("could not start dast server: %v", err)
r.Logger.Error().Msgf("could not start dast server: %v", err)
}
}()
}
now := time.Now()
enumeration := false
var results *atomic.Bool
results, err = r.runStandardEnumeration(executorOpts, store, executorEngine)
@ -725,11 +740,17 @@ func (r *Runner) RunEnumeration() error {
}
r.fuzzFrequencyCache.Close()
r.progress.Stop()
timeTaken := time.Since(now)
// todo: error propagation without canonical straight error check is required by cloud?
// use safe dereferencing to avoid potential panics in case of previous unchecked errors
if v := ptrutil.Safe(results); !v.Load() {
gologger.Info().Msgf("No results found. Better luck next time!")
r.Logger.Info().Msgf("Scan completed in %s. No results found.", shortDur(timeTaken))
} else {
matchCount := r.output.ResultCount()
r.Logger.Info().Msgf("Scan completed in %s. %d matches found.", shortDur(timeTaken), matchCount)
}
// check if a passive scan was requested but no target was provided
if r.options.OfflineHTTP && len(r.options.Targets) == 0 && r.options.TargetsFilePath == "" {
return errors.Wrap(err, "missing required input (http response) to run passive templates")
@ -738,6 +759,24 @@ func (r *Runner) RunEnumeration() error {
return err
}
func shortDur(d time.Duration) string {
if d < time.Minute {
return d.String()
}
// Truncate to the nearest minute
d = d.Truncate(time.Minute)
s := d.String()
if strings.HasSuffix(s, "m0s") {
s = s[:len(s)-2]
}
if strings.HasSuffix(s, "h0m") {
s = s[:len(s)-2]
}
return s
}
func (r *Runner) isInputNonHTTP() bool {
var nonURLInput bool
r.inputProvider.Iterate(func(value *contextargs.MetaInput) bool {
@ -750,7 +789,7 @@ func (r *Runner) isInputNonHTTP() bool {
return nonURLInput
}
func (r *Runner) executeSmartWorkflowInput(executorOpts protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
func (r *Runner) executeSmartWorkflowInput(executorOpts *protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
r.progress.Init(r.inputProvider.Count(), 0, 0)
service, err := automaticscan.New(automaticscan.Options{
@ -818,7 +857,7 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
if tmplCount == 0 && workflowCount == 0 {
// if dast flag is used print explicit warning
if r.options.DAST {
gologger.DefaultLogger.Print().Msgf("[%v] No DAST templates found", aurora.BrightYellow("WRN"))
r.Logger.Print().Msgf("[%v] No DAST templates found", aurora.BrightYellow("WRN"))
}
stats.ForceDisplayWarning(templates.SkippedCodeTmplTamperedStats)
} else {
@ -838,38 +877,38 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
return fmt.Sprintf("Current %s version: %v %v", versionType, version, updateutils.GetVersionDescription(version, latestVersion))
}
gologger.Info().Msgf(versionInfo(config.Version, cfg.LatestNucleiVersion, "nuclei"))
gologger.Info().Msgf(versionInfo(cfg.TemplateVersion, cfg.LatestNucleiTemplatesVersion, "nuclei-templates"))
gologger.Info().Msg(versionInfo(config.Version, cfg.LatestNucleiVersion, "nuclei"))
gologger.Info().Msg(versionInfo(cfg.TemplateVersion, cfg.LatestNucleiTemplatesVersion, "nuclei-templates"))
if !HideAutoSaveMsg {
if r.pdcpUploadErrMsg != "" {
gologger.Print().Msgf("%s", r.pdcpUploadErrMsg)
r.Logger.Warning().Msgf("%s", r.pdcpUploadErrMsg)
} else {
gologger.Info().Msgf("To view results on cloud dashboard, visit %v/scans upon scan completion.", pdcpauth.DashBoardURL)
r.Logger.Info().Msgf("To view results on cloud dashboard, visit %v/scans upon scan completion.", pdcpauth.DashBoardURL)
}
}
if tmplCount > 0 || workflowCount > 0 {
if len(store.Templates()) > 0 {
gologger.Info().Msgf("New templates added in latest release: %d", len(config.DefaultConfig.GetNewAdditions()))
gologger.Info().Msgf("Templates loaded for current scan: %d", len(store.Templates()))
r.Logger.Info().Msgf("New templates added in latest release: %d", len(config.DefaultConfig.GetNewAdditions()))
r.Logger.Info().Msgf("Templates loaded for current scan: %d", len(store.Templates()))
}
if len(store.Workflows()) > 0 {
gologger.Info().Msgf("Workflows loaded for current scan: %d", len(store.Workflows()))
r.Logger.Info().Msgf("Workflows loaded for current scan: %d", len(store.Workflows()))
}
for k, v := range templates.SignatureStats {
value := v.Load()
if value > 0 {
if k == templates.Unsigned && !r.options.Silent && !config.DefaultConfig.HideTemplateSigWarning {
gologger.Print().Msgf("[%v] Loading %d unsigned templates for scan. Use with caution.", r.colorizer.BrightYellow("WRN"), value)
r.Logger.Print().Msgf("[%v] Loading %d unsigned templates for scan. Use with caution.", r.colorizer.BrightYellow("WRN"), value)
} else {
gologger.Info().Msgf("Executing %d signed templates from %s", value, k)
r.Logger.Info().Msgf("Executing %d signed templates from %s", value, k)
}
}
}
}
if r.inputProvider.Count() > 0 {
gologger.Info().Msgf("Targets loaded for current scan: %d", r.inputProvider.Count())
r.Logger.Info().Msgf("Targets loaded for current scan: %d", r.inputProvider.Count())
}
}
@ -896,7 +935,7 @@ func UploadResultsToCloud(options *types.Options) error {
return errors.Wrap(err, "could not get credentials for cloud upload")
}
ctx := context.TODO()
uploadWriter, err := pdcp.NewUploadWriter(ctx, creds)
uploadWriter, err := pdcp.NewUploadWriter(ctx, options.Logger, creds)
if err != nil {
return errors.Wrap(err, "could not create upload writer")
}
@ -915,19 +954,21 @@ func UploadResultsToCloud(options *types.Options) error {
if err != nil {
return errors.Wrap(err, "could not open scan upload file")
}
defer file.Close()
defer func() {
_ = file.Close()
}()
gologger.Info().Msgf("Uploading scan results to cloud dashboard from %s", options.ScanUploadFile)
options.Logger.Info().Msgf("Uploading scan results to cloud dashboard from %s", options.ScanUploadFile)
dec := json.NewDecoder(file)
for dec.More() {
var r output.ResultEvent
err := dec.Decode(&r)
if err != nil {
gologger.Warning().Msgf("Could not decode jsonl: %s\n", err)
options.Logger.Warning().Msgf("Could not decode jsonl: %s\n", err)
continue
}
if err = uploadWriter.Write(&r); err != nil {
gologger.Warning().Msgf("[%s] failed to upload: %s\n", r.TemplateID, err)
options.Logger.Warning().Msgf("[%s] failed to upload: %s\n", r.TemplateID, err)
}
}
uploadWriter.Close()

View File

@ -64,8 +64,8 @@ func TestWalkReflectStructAssignsEnvVars(t *testing.T) {
B: "$VAR_TWO",
},
}
os.Setenv("VAR_EXAMPLE", "value")
os.Setenv("VAR_TWO", "value2")
_ = os.Setenv("VAR_EXAMPLE", "value")
_ = os.Setenv("VAR_TWO", "value2")
Walk(testStruct, expandEndVars)
@ -79,9 +79,9 @@ func TestWalkReflectStructHandlesDifferentTypes(t *testing.T) {
B: "$VAR_TWO",
C: "$VAR_THREE",
}
os.Setenv("VAR_EXAMPLE", "value")
os.Setenv("VAR_TWO", "2")
os.Setenv("VAR_THREE", "true")
_ = os.Setenv("VAR_EXAMPLE", "value")
_ = os.Setenv("VAR_TWO", "2")
_ = os.Setenv("VAR_THREE", "true")
Walk(testStruct, expandEndVars)
@ -96,9 +96,9 @@ func TestWalkReflectStructEmpty(t *testing.T) {
B: "",
C: "$VAR_THREE",
}
os.Setenv("VAR_EXAMPLE", "value")
os.Setenv("VAR_TWO", "2")
os.Setenv("VAR_THREE", "true")
_ = os.Setenv("VAR_EXAMPLE", "value")
_ = os.Setenv("VAR_TWO", "2")
_ = os.Setenv("VAR_THREE", "true")
Walk(testStruct, expandEndVars)
@ -116,7 +116,7 @@ func TestWalkReflectStructWithNoYamlTag(t *testing.T) {
C: "$GITHUB_USER",
}
os.Setenv("GITHUB_USER", "testuser")
_ = os.Setenv("GITHUB_USER", "testuser")
Walk(test, expandEndVars)
require.Equal(t, "testuser", test.A)
@ -132,9 +132,9 @@ func TestWalkReflectStructHandlesNestedStructs(t *testing.T) {
C: "$VAR_THREE",
},
}
os.Setenv("VAR_EXAMPLE", "value")
os.Setenv("VAR_TWO", "2")
os.Setenv("VAR_THREE", "true")
_ = os.Setenv("VAR_EXAMPLE", "value")
_ = os.Setenv("VAR_TWO", "2")
_ = os.Setenv("VAR_THREE", "true")
Walk(testStruct, expandEndVars)

View File

@ -12,7 +12,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
)
@ -25,7 +24,7 @@ func (r *Runner) logAvailableTemplate(tplPath string) {
panic("not a template")
}
if err != nil {
gologger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
r.Logger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
} else {
r.verboseTemplate(tpl)
}
@ -33,14 +32,14 @@ func (r *Runner) logAvailableTemplate(tplPath string) {
// log available templates for verbose (-vv)
func (r *Runner) verboseTemplate(tpl *templates.Template) {
gologger.Print().Msgf("%s\n", templates.TemplateLogMessage(tpl.ID,
r.Logger.Print().Msgf("%s\n", templates.TemplateLogMessage(tpl.ID,
types.ToString(tpl.Info.Name),
tpl.Info.Authors.ToSlice(),
tpl.Info.SeverityHolder.Severity))
}
func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
gologger.Print().Msgf(
r.Logger.Print().Msgf(
"\nListing available %v nuclei templates for %v",
config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory,
@ -52,20 +51,20 @@ func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
path := tpl.Path
tplBody, err := store.ReadTemplateFromURI(path, true)
if err != nil {
gologger.Error().Msgf("Could not read the template %s: %s", path, err)
r.Logger.Error().Msgf("Could not read the template %s: %s", path, err)
continue
}
if colorize {
path = aurora.Cyan(tpl.Path).String()
tplBody, err = r.highlightTemplate(&tplBody)
if err != nil {
gologger.Error().Msgf("Could not highlight the template %s: %s", tpl.Path, err)
r.Logger.Error().Msgf("Could not highlight the template %s: %s", tpl.Path, err)
continue
}
}
gologger.Silent().Msgf("Template: %s\n\n%s", path, tplBody)
r.Logger.Print().Msgf("Template: %s\n\n%s", path, tplBody)
} else {
gologger.Silent().Msgf("%s\n", strings.TrimPrefix(tpl.Path, config.DefaultConfig.TemplatesDirectory+string(filepath.Separator)))
r.Logger.Print().Msgf("%s\n", strings.TrimPrefix(tpl.Path, config.DefaultConfig.TemplatesDirectory+string(filepath.Separator)))
}
} else {
r.verboseTemplate(tpl)
@ -74,7 +73,7 @@ func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
}
func (r *Runner) listAvailableStoreTags(store *loader.Store) {
gologger.Print().Msgf(
r.Logger.Print().Msgf(
"\nListing available %v nuclei tags for %v",
config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory,
@ -100,9 +99,9 @@ func (r *Runner) listAvailableStoreTags(store *loader.Store) {
for _, tag := range tagsList {
if r.options.JSONL {
marshalled, _ := jsoniter.Marshal(tag)
gologger.Silent().Msgf("%s\n", string(marshalled))
r.Logger.Debug().Msgf("%s", string(marshalled))
} else {
gologger.Silent().Msgf("%s (%d)\n", tag.Key, tag.Value)
r.Logger.Debug().Msgf("%s (%d)", tag.Key, tag.Value)
}
}
}

View File

@ -41,7 +41,7 @@ type nucleiExecutor struct {
engine *core.Engine
store *loader.Store
options *NucleiExecutorOptions
executorOpts protocols.ExecutorOptions
executorOpts *protocols.ExecutorOptions
}
type NucleiExecutorOptions struct {
@ -58,6 +58,7 @@ type NucleiExecutorOptions struct {
Colorizer aurora.Aurora
Parser parser.Parser
TemporaryDirectory string
Logger *gologger.Logger
}
func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
@ -66,7 +67,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
// Create the executor options which will be used throughout the execution
// stage by the nuclei engine modules.
executorOpts := protocols.ExecutorOptions{
executorOpts := &protocols.ExecutorOptions{
Output: opts.Output,
Options: opts.Options,
Progress: opts.Progress,
@ -85,6 +86,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
FuzzParamsFrequency: fuzzFreqCache,
GlobalMatchers: globalmatchers.New(),
FuzzStatsDB: opts.FuzzStatsDB,
Logger: opts.Logger,
}
if opts.Options.ShouldUseHostError() {
@ -93,7 +95,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
maxHostError = 100 // auto adjust for fuzzings
}
if opts.Options.TemplateThreads > maxHostError {
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads)
opts.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads)
maxHostError = opts.Options.TemplateThreads
}
@ -107,7 +109,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
executorEngine := core.New(opts.Options)
executorEngine.SetExecuterOptions(executorOpts)
workflowLoader, err := parsers.NewLoader(&executorOpts)
workflowLoader, err := parsers.NewLoader(executorOpts)
if err != nil {
return nil, errors.Wrap(err, "Could not create loader options.")
}

View File

@ -112,7 +112,7 @@ func New(options *Options) (*DASTServer, error) {
func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) {
server := &DASTServer{
nucleiExecutor: &nucleiExecutor{
executorOpts: protocols.ExecutorOptions{
executorOpts: &protocols.ExecutorOptions{
FuzzStatsDB: fuzzStatsDB,
},
},
@ -125,7 +125,7 @@ func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) {
func (s *DASTServer) Close() {
s.nucleiExecutor.Close()
s.echo.Close()
_ = s.echo.Close()
s.tasksPool.StopAndWaitFor(1 * time.Minute)
}

View File

@ -7,7 +7,8 @@ import (
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/utils/errkit"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
@ -19,6 +20,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/utils/vardump"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
pkgtypes "github.com/projectdiscovery/nuclei/v3/pkg/types"
)
// TemplateSources contains template sources
@ -101,7 +103,7 @@ type InteractshOpts interactsh.Options
func WithInteractshOptions(opts InteractshOpts) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithInteractshOptions")
return errkit.Wrap(ErrOptionsNotSupported, "WithInteractshOptions")
}
optsPtr := &opts
e.interactshOpts = (*interactsh.Options)(optsPtr)
@ -179,7 +181,7 @@ func WithGlobalRateLimitCtx(ctx context.Context, maxTokens int, duration time.Du
return func(e *NucleiEngine) error {
e.opts.RateLimit = maxTokens
e.opts.RateLimitDuration = duration
e.rateLimiter = ratelimit.New(ctx, uint(e.opts.RateLimit), e.opts.RateLimitDuration)
e.rateLimiter = utils.GetRateLimiter(ctx, e.opts.RateLimit, e.opts.RateLimitDuration)
return nil
}
}
@ -205,7 +207,7 @@ func EnableHeadlessWithOpts(hopts *HeadlessOpts) NucleiSDKOptions {
e.opts.UseInstalledChrome = hopts.UseChrome
}
if engine.MustDisableSandbox() {
gologger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n")
e.Logger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox")
}
browser, err := engine.New(e.opts)
if err != nil {
@ -228,7 +230,7 @@ type StatsOptions struct {
func EnableStatsWithOpts(opts StatsOptions) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("EnableStatsWithOpts")
return errkit.Wrap(ErrOptionsNotSupported, "EnableStatsWithOpts")
}
if opts.Interval == 0 {
opts.Interval = 5 //sec
@ -256,7 +258,7 @@ type VerbosityOptions struct {
func WithVerbosity(opts VerbosityOptions) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithVerbosity")
return errkit.Wrap(ErrOptionsNotSupported, "WithVerbosity")
}
e.opts.Verbose = opts.Verbose
e.opts.Silent = opts.Silent
@ -289,15 +291,15 @@ type NetworkConfig struct {
func WithNetworkConfig(opts NetworkConfig) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithNetworkConfig")
return errkit.Wrap(ErrOptionsNotSupported, "WithNetworkConfig")
}
e.opts.NoHostErrors = opts.DisableMaxHostErr
e.opts.MaxHostError = opts.MaxHostError
if e.opts.ShouldUseHostError() {
maxHostError := opts.MaxHostError
if e.opts.TemplateThreads > maxHostError {
gologger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", e.executerOpts.Colorizer.BrightYellow("WRN"))
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", e.opts.TemplateThreads)
e.Logger.Warning().Msg("The concurrency value is higher than max-host-error")
e.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", e.opts.TemplateThreads)
maxHostError = e.opts.TemplateThreads
e.opts.MaxHostError = maxHostError
}
@ -320,7 +322,7 @@ func WithNetworkConfig(opts NetworkConfig) NucleiSDKOptions {
func WithProxy(proxy []string, proxyInternalRequests bool) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithProxy")
return errkit.Wrap(ErrOptionsNotSupported, "WithProxy")
}
e.opts.Proxy = proxy
e.opts.ProxyInternal = proxyInternalRequests
@ -345,7 +347,7 @@ type OutputWriter output.Writer
func UseOutputWriter(writer OutputWriter) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("UseOutputWriter")
return errkit.Wrap(ErrOptionsNotSupported, "UseOutputWriter")
}
e.customWriter = writer
return nil
@ -360,7 +362,7 @@ type StatsWriter progress.Progress
func UseStatsWriter(writer StatsWriter) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("UseStatsWriter")
return errkit.Wrap(ErrOptionsNotSupported, "UseStatsWriter")
}
e.customProgress = writer
return nil
@ -374,7 +376,7 @@ func UseStatsWriter(writer StatsWriter) NucleiSDKOptions {
func WithTemplateUpdateCallback(disableTemplatesAutoUpgrade bool, callback func(newVersion string)) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithTemplateUpdateCallback")
return errkit.Wrap(ErrOptionsNotSupported, "WithTemplateUpdateCallback")
}
e.disableTemplatesAutoUpgrade = disableTemplatesAutoUpgrade
e.onUpdateAvailableCallback = callback
@ -386,7 +388,7 @@ func WithTemplateUpdateCallback(disableTemplatesAutoUpgrade bool, callback func(
func WithSandboxOptions(allowLocalFileAccess bool, restrictLocalNetworkAccess bool) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithSandboxOptions")
return errkit.Wrap(ErrOptionsNotSupported, "WithSandboxOptions")
}
e.opts.AllowLocalFileAccess = allowLocalFileAccess
e.opts.RestrictLocalNetworkAccess = restrictLocalNetworkAccess
@ -419,6 +421,14 @@ func EnableGlobalMatchersTemplates() NucleiSDKOptions {
}
}
// DisableTemplateCache disables template caching
func DisableTemplateCache() NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts.DoNotCacheTemplates = true
return nil
}
}
// EnableFileTemplates allows loading/executing file protocol templates
func EnableFileTemplates() NucleiSDKOptions {
return func(e *NucleiEngine) error {
@ -463,6 +473,14 @@ func EnablePassiveMode() NucleiSDKOptions {
}
}
// EnableMatcherStatus allows enabling matcher status
func EnableMatcherStatus() NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts.MatcherStatus = true
return nil
}
}
// WithAuthProvider allows setting a custom authprovider implementation
func WithAuthProvider(provider authprovider.AuthProvider) NucleiSDKOptions {
return func(e *NucleiEngine) error {
@ -519,3 +537,25 @@ func WithResumeFile(file string) NucleiSDKOptions {
return nil
}
}
// WithLogger allows setting a shared gologger instance
func WithLogger(logger *gologger.Logger) NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.Logger = logger
if e.opts != nil {
e.opts.Logger = logger
}
if e.executerOpts != nil {
e.executerOpts.Logger = logger
}
return nil
}
}
// WithOptions sets all options at once
func WithOptions(opts *pkgtypes.Options) NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts = opts
return nil
}
}

View File

@ -12,8 +12,9 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/ratelimit"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/utils/errkit"
"github.com/rs/xid"
)
// unsafeOptions are those nuclei objects/instances/types
@ -21,14 +22,14 @@ import (
// hence they are ephemeral and are created on every ExecuteNucleiWithOpts invocation
// in ThreadSafeNucleiEngine
type unsafeOptions struct {
executerOpts protocols.ExecutorOptions
executerOpts *protocols.ExecutorOptions
engine *core.Engine
}
// createEphemeralObjects creates ephemeral nuclei objects/instances/types
func createEphemeralObjects(ctx context.Context, base *NucleiEngine, opts *types.Options) (*unsafeOptions, error) {
u := &unsafeOptions{}
u.executerOpts = protocols.ExecutorOptions{
u.executerOpts = &protocols.ExecutorOptions{
Output: base.customWriter,
Options: opts,
Progress: base.customProgress,
@ -52,11 +53,7 @@ func createEphemeralObjects(ctx context.Context, base *NucleiEngine, opts *types
if opts.RateLimit > 0 && opts.RateLimitDuration == 0 {
opts.RateLimitDuration = time.Second
}
if opts.RateLimit == 0 && opts.RateLimitDuration == 0 {
u.executerOpts.RateLimiter = ratelimit.NewUnlimited(ctx)
} else {
u.executerOpts.RateLimiter = ratelimit.New(ctx, uint(opts.RateLimit), opts.RateLimitDuration)
}
u.executerOpts.RateLimiter = utils.GetRateLimiter(ctx, opts.RateLimit, opts.RateLimitDuration)
u.engine = core.New(opts)
u.engine.SetExecuterOptions(u.executerOpts)
return u, nil
@ -88,9 +85,11 @@ type ThreadSafeNucleiEngine struct {
// whose methods are thread-safe and can be used concurrently
// Note: Non-thread-safe methods start with Global prefix
func NewThreadSafeNucleiEngineCtx(ctx context.Context, opts ...NucleiSDKOptions) (*ThreadSafeNucleiEngine, error) {
defaultOptions := types.DefaultOptions()
defaultOptions.ExecutionId = xid.New().String()
// default options
e := &NucleiEngine{
opts: types.DefaultOptions(),
opts: defaultOptions,
mode: threadSafe,
}
for _, option := range opts {
@ -125,8 +124,8 @@ func (e *ThreadSafeNucleiEngine) GlobalResultCallback(callback func(event *outpu
// by invoking this method with different options and targets
// Note: Not all options are thread-safe. this method will throw error if you try to use non-thread-safe options
func (e *ThreadSafeNucleiEngine) ExecuteNucleiWithOptsCtx(ctx context.Context, targets []string, opts ...NucleiSDKOptions) error {
baseOpts := *e.eng.opts
tmpEngine := &NucleiEngine{opts: &baseOpts, mode: threadSafe}
baseOpts := e.eng.opts.Copy()
tmpEngine := &NucleiEngine{opts: baseOpts, mode: threadSafe}
for _, option := range opts {
if err := option(tmpEngine); err != nil {
return err
@ -142,19 +141,19 @@ func (e *ThreadSafeNucleiEngine) ExecuteNucleiWithOptsCtx(ctx context.Context, t
defer closeEphemeralObjects(unsafeOpts)
// load templates
workflowLoader, err := workflow.NewLoader(&unsafeOpts.executerOpts)
workflowLoader, err := workflow.NewLoader(unsafeOpts.executerOpts)
if err != nil {
return errorutil.New("Could not create workflow loader: %s\n", err)
return errkit.Wrapf(err, "Could not create workflow loader: %s", err)
}
unsafeOpts.executerOpts.WorkflowLoader = workflowLoader
store, err := loader.New(loader.NewConfig(tmpEngine.opts, e.eng.catalog, unsafeOpts.executerOpts))
if err != nil {
return errorutil.New("Could not create loader client: %s\n", err)
return errkit.Wrapf(err, "Could not create loader client: %s", err)
}
store.Load()
inputProvider := provider.NewSimpleInputProviderWithUrls(targets...)
inputProvider := provider.NewSimpleInputProviderWithUrls(e.eng.opts.ExecutionId, targets...)
if len(store.Templates()) == 0 && len(store.Workflows()) == 0 {
return ErrNoTemplatesAvailable

View File

@ -5,7 +5,9 @@ import (
"bytes"
"context"
"io"
"sync"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
@ -26,7 +28,8 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
"github.com/rs/xid"
)
// NucleiSDKOptions contains options for nuclei SDK
@ -34,13 +37,13 @@ type NucleiSDKOptions func(e *NucleiEngine) error
var (
// ErrNotImplemented is returned when a feature is not implemented
ErrNotImplemented = errorutil.New("Not implemented")
ErrNotImplemented = errkit.New("Not implemented")
// ErrNoTemplatesAvailable is returned when no templates are available to execute
ErrNoTemplatesAvailable = errorutil.New("No templates available")
ErrNoTemplatesAvailable = errkit.New("No templates available")
// ErrNoTargetsAvailable is returned when no targets are available to scan
ErrNoTargetsAvailable = errorutil.New("No targets available")
ErrNoTargetsAvailable = errkit.New("No targets available")
// ErrOptionsNotSupported is returned when an option is not supported in thread safe mode
ErrOptionsNotSupported = errorutil.NewWithFmt("Option %v not supported in thread safe mode")
ErrOptionsNotSupported = errkit.New("Option not supported in thread safe mode")
)
type engineMode uint
@ -64,6 +67,7 @@ type NucleiEngine struct {
templatesLoaded bool
// unexported core fields
ctx context.Context
interactshClient *interactsh.Client
catalog catalog.Catalog
rateLimiter *ratelimit.Limiter
@ -84,20 +88,23 @@ type NucleiEngine struct {
customWriter output.Writer
customProgress progress.Progress
rc reporting.Client
executerOpts protocols.ExecutorOptions
executerOpts *protocols.ExecutorOptions
// Logger instance for the engine
Logger *gologger.Logger
}
// LoadAllTemplates loads all nuclei template based on given options
func (e *NucleiEngine) LoadAllTemplates() error {
workflowLoader, err := workflow.NewLoader(&e.executerOpts)
workflowLoader, err := workflow.NewLoader(e.executerOpts)
if err != nil {
return errorutil.New("Could not create workflow loader: %s\n", err)
return errkit.Wrapf(err, "Could not create workflow loader: %s", err)
}
e.executerOpts.WorkflowLoader = workflowLoader
e.store, err = loader.New(loader.NewConfig(e.opts, e.catalog, e.executerOpts))
if err != nil {
return errorutil.New("Could not create loader client: %s\n", err)
return errkit.Wrapf(err, "Could not create loader client: %s", err)
}
e.store.Load()
e.templatesLoaded = true
@ -124,9 +131,9 @@ func (e *NucleiEngine) GetWorkflows() []*templates.Template {
func (e *NucleiEngine) LoadTargets(targets []string, probeNonHttp bool) {
for _, target := range targets {
if probeNonHttp {
_ = e.inputProvider.SetWithProbe(target, e.httpxClient)
_ = e.inputProvider.SetWithProbe(e.opts.ExecutionId, target, e.httpxClient)
} else {
e.inputProvider.Set(target)
e.inputProvider.Set(e.opts.ExecutionId, target)
}
}
}
@ -136,9 +143,9 @@ func (e *NucleiEngine) LoadTargetsFromReader(reader io.Reader, probeNonHttp bool
buff := bufio.NewScanner(reader)
for buff.Scan() {
if probeNonHttp {
_ = e.inputProvider.SetWithProbe(buff.Text(), e.httpxClient)
_ = e.inputProvider.SetWithProbe(e.opts.ExecutionId, buff.Text(), e.httpxClient)
} else {
e.inputProvider.Set(buff.Text())
e.inputProvider.Set(e.opts.ExecutionId, buff.Text())
}
}
}
@ -161,7 +168,7 @@ func (e *NucleiEngine) LoadTargetsWithHttpData(filePath string, filemode string)
// GetExecuterOptions returns the nuclei executor options
func (e *NucleiEngine) GetExecuterOptions() *protocols.ExecutorOptions {
return &e.executerOpts
return e.executerOpts
}
// ParseTemplate parses a template from given data
@ -229,7 +236,7 @@ func (e *NucleiEngine) closeInternal() {
// Close all resources used by nuclei engine
func (e *NucleiEngine) Close() {
e.closeInternal()
protocolinit.Close()
protocolinit.Close(e.opts.ExecutionId)
}
// ExecuteCallbackWithCtx executes templates on targets and calls callback on each result(only if results are found)
@ -246,9 +253,9 @@ func (e *NucleiEngine) ExecuteCallbackWithCtx(ctx context.Context, callback ...f
}
filtered := []func(event *output.ResultEvent){}
for _, callback := range callback {
if callback != nil {
filtered = append(filtered, callback)
for _, cb := range callback {
if cb != nil {
filtered = append(filtered, cb)
}
}
e.resultCallbacks = append(e.resultCallbacks, filtered...)
@ -258,15 +265,32 @@ func (e *NucleiEngine) ExecuteCallbackWithCtx(ctx context.Context, callback ...f
return ErrNoTemplatesAvailable
}
var wg sync.WaitGroup
wg.Add(1)
go func() {
defer wg.Done()
_ = e.engine.ExecuteScanWithOpts(ctx, templatesAndWorkflows, e.inputProvider, false)
defer e.engine.WorkPool().Wait()
}()
// wait for context to be cancelled
select {
case <-ctx.Done():
<-wait(&wg) // wait for scan to finish
return ctx.Err()
case <-wait(&wg):
// scan finished
}
return nil
}
// ExecuteWithCallback is same as ExecuteCallbackWithCtx but with default context
// Note this is deprecated and will be removed in future major release
func (e *NucleiEngine) ExecuteWithCallback(callback ...func(event *output.ResultEvent)) error {
return e.ExecuteCallbackWithCtx(context.Background(), callback...)
ctx := context.Background()
if e.ctx != nil {
ctx = e.ctx
}
return e.ExecuteCallbackWithCtx(ctx, callback...)
}
// Options return nuclei Type Options
@ -287,9 +311,12 @@ func (e *NucleiEngine) Store() *loader.Store {
// NewNucleiEngineCtx creates a new nuclei engine instance with given context
func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*NucleiEngine, error) {
// default options
defaultOptions := types.DefaultOptions()
defaultOptions.ExecutionId = xid.New().String()
e := &NucleiEngine{
opts: types.DefaultOptions(),
opts: defaultOptions,
mode: singleInstance,
ctx: ctx,
}
for _, option := range options {
if err := option(e); err != nil {
@ -306,3 +333,18 @@ func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*Nucl
func NewNucleiEngine(options ...NucleiSDKOptions) (*NucleiEngine, error) {
return NewNucleiEngineCtx(context.Background(), options...)
}
// GetParser returns the template parser with cache
func (e *NucleiEngine) GetParser() *templates.Parser {
return e.parser
}
// wait for a waitgroup to finish
func wait(wg *sync.WaitGroup) <-chan struct{} {
ch := make(chan struct{})
go func() {
defer close(ch)
wg.Wait()
}()
return ch
}

View File

@ -8,6 +8,7 @@ import (
"time"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/logrusorgru/aurora"
"github.com/pkg/errors"
@ -29,7 +30,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolinit"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/http/httpclientpool"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
@ -37,8 +37,6 @@ import (
"github.com/projectdiscovery/ratelimit"
)
var sharedInit *sync.Once
// applyRequiredDefaults to options
func (e *NucleiEngine) applyRequiredDefaults(ctx context.Context) {
mockoutput := testutils.NewMockOutputWriter(e.opts.OmitTemplate)
@ -98,27 +96,39 @@ func (e *NucleiEngine) applyRequiredDefaults(ctx context.Context) {
// init
func (e *NucleiEngine) init(ctx context.Context) error {
// Set a default logger if one isn't provided in the options
if e.opts.Logger != nil {
e.Logger = e.opts.Logger
} else {
e.opts.Logger = &gologger.Logger{}
}
e.Logger = e.opts.Logger
if e.opts.Verbose {
gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose)
e.Logger.SetMaxLevel(levels.LevelVerbose)
} else if e.opts.Debug {
gologger.DefaultLogger.SetMaxLevel(levels.LevelDebug)
e.Logger.SetMaxLevel(levels.LevelDebug)
} else if e.opts.Silent {
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
e.Logger.SetMaxLevel(levels.LevelSilent)
}
if err := runner.ValidateOptions(e.opts); err != nil {
return err
}
e.parser = templates.NewParser()
if sharedInit == nil || protocolstate.ShouldInit() {
sharedInit = &sync.Once{}
if e.opts.Parser != nil {
if op, ok := e.opts.Parser.(*templates.Parser); ok {
e.parser = op
}
}
sharedInit.Do(func() {
if e.parser == nil {
e.parser = templates.NewParser()
}
if protocolstate.ShouldInit(e.opts.ExecutionId) {
_ = protocolinit.Init(e.opts)
})
}
if e.opts.ProxyInternal && e.opts.AliveHttpProxy != "" || e.opts.AliveSocksProxy != "" {
httpclient, err := httpclientpool.Get(e.opts, &httpclientpool.Configuration{})
@ -160,7 +170,7 @@ func (e *NucleiEngine) init(ctx context.Context) error {
e.catalog = disk.NewCatalog(config.DefaultConfig.TemplatesDirectory)
}
e.executerOpts = protocols.ExecutorOptions{
e.executerOpts = &protocols.ExecutorOptions{
Output: e.customWriter,
Options: e.opts,
Progress: e.customProgress,
@ -173,12 +183,13 @@ func (e *NucleiEngine) init(ctx context.Context) error {
Browser: e.browserInstance,
Parser: e.parser,
InputHelper: input.NewHelper(),
Logger: e.opts.Logger,
}
if e.opts.ShouldUseHostError() && e.hostErrCache != nil {
e.executerOpts.HostErrorsCache = e.hostErrCache
}
if len(e.opts.SecretsFile) > 0 {
authTmplStore, err := runner.GetAuthTmplStore(*e.opts, e.catalog, e.executerOpts)
authTmplStore, err := runner.GetAuthTmplStore(e.opts, e.catalog, e.executerOpts)
if err != nil {
return errors.Wrap(err, "failed to load dynamic auth templates")
}
@ -220,6 +231,25 @@ func (e *NucleiEngine) init(ctx context.Context) error {
}
}
// Handle the case where the user passed an existing parser that we can use as a cache
if e.opts.Parser != nil {
if cachedParser, ok := e.opts.Parser.(*templates.Parser); ok {
e.parser = cachedParser
e.opts.Parser = cachedParser
e.executerOpts.Parser = cachedParser
e.executerOpts.Options.Parser = cachedParser
}
}
// Create a new parser if necessary
if e.parser == nil {
op := templates.NewParser()
e.parser = op
e.opts.Parser = op
e.executerOpts.Parser = op
e.executerOpts.Options.Parser = op
}
e.engine = core.New(e.opts)
e.engine.SetExecuterOptions(e.executerOpts)

37
lib/sdk_test.go Normal file
View File

@ -0,0 +1,37 @@
package nuclei_test
import (
"context"
"log"
"testing"
"time"
nuclei "github.com/projectdiscovery/nuclei/v3/lib"
"github.com/stretchr/testify/require"
)
func TestContextCancelNucleiEngine(t *testing.T) {
// create nuclei engine with options
ctx, cancel := context.WithCancel(context.Background())
ne, err := nuclei.NewNucleiEngineCtx(ctx,
nuclei.WithTemplateFilters(nuclei.TemplateFilters{Tags: []string{"oast"}}),
nuclei.EnableStatsWithOpts(nuclei.StatsOptions{MetricServerPort: 0}),
)
require.NoError(t, err, "could not create nuclei engine")
go func() {
time.Sleep(time.Second * 2)
cancel()
log.Println("Test: context cancelled")
}()
// load targets and optionally probe non http/https targets
ne.LoadTargets([]string{"http://honey.scanme.sh"}, false)
// when callback is nil it nuclei will print JSON output to stdout
err = ne.ExecuteWithCallback(nil)
if err != nil {
// we expect a context cancellation error
require.ErrorIs(t, err, context.Canceled, "was expecting context cancellation error")
}
defer ne.Close()
}

View File

@ -3,12 +3,12 @@ package authx
import (
"fmt"
"strings"
"sync"
"sync/atomic"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/replacer"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
sliceutil "github.com/projectdiscovery/utils/slice"
)
@ -30,8 +30,8 @@ type Dynamic struct {
Input string `json:"input" yaml:"input"` // (optional) target for the dynamic secret
Extracted map[string]interface{} `json:"-" yaml:"-"` // extracted values from the dynamic secret
fetchCallback LazyFetchSecret `json:"-" yaml:"-"`
m *sync.Mutex `json:"-" yaml:"-"` // mutex for lazy fetch
fetched bool `json:"-" yaml:"-"` // flag to check if the secret has been fetched
fetched *atomic.Bool `json:"-" yaml:"-"` // atomic flag to check if the secret has been fetched
fetching *atomic.Bool `json:"-" yaml:"-"` // atomic flag to prevent recursive fetch calls
error error `json:"-" yaml:"-"` // error if any
}
@ -43,8 +43,8 @@ func (d *Dynamic) GetDomainAndDomainRegex() ([]string, []string) {
domainRegex = append(domainRegex, secret.DomainsRegex...)
}
if d.Secret != nil {
domains = append(domains, d.Secret.Domains...)
domainRegex = append(domainRegex, d.Secret.DomainsRegex...)
domains = append(domains, d.Domains...)
domainRegex = append(domainRegex, d.DomainsRegex...)
}
uniqueDomains := sliceutil.Dedupe(domains)
uniqueDomainRegex := sliceutil.Dedupe(domainRegex)
@ -52,29 +52,35 @@ func (d *Dynamic) GetDomainAndDomainRegex() ([]string, []string) {
}
func (d *Dynamic) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &d); err != nil {
if d == nil {
return errkit.New("cannot unmarshal into nil Dynamic struct")
}
// Use an alias type (auxiliary) to avoid a recursive call in this method.
type Alias Dynamic
// If d.Secret was nil, json.Unmarshal will allocate a new Secret object
// and populate it from the top level JSON fields.
if err := json.Unmarshal(data, (*Alias)(d)); err != nil {
return err
}
var s Secret
if err := json.Unmarshal(data, &s); err != nil {
return err
}
d.Secret = &s
return nil
}
// Validate validates the dynamic secret
func (d *Dynamic) Validate() error {
d.m = &sync.Mutex{}
d.fetched = &atomic.Bool{}
d.fetching = &atomic.Bool{}
if d.TemplatePath == "" {
return errorutil.New(" template-path is required for dynamic secret")
return errkit.New(" template-path is required for dynamic secret")
}
if len(d.Variables) == 0 {
return errorutil.New("variables are required for dynamic secret")
return errkit.New("variables are required for dynamic secret")
}
if d.Secret != nil {
d.Secret.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
d.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
if err := d.Secret.Validate(); err != nil {
return err
}
@ -92,9 +98,7 @@ func (d *Dynamic) Validate() error {
func (d *Dynamic) SetLazyFetchCallback(callback LazyFetchSecret) {
d.fetchCallback = func(d *Dynamic) error {
err := callback(d)
d.fetched = true
if err != nil {
d.error = err
return err
}
if len(d.Extracted) == 0 {
@ -179,15 +183,21 @@ func (d *Dynamic) applyValuesToSecret(secret *Secret) error {
// GetStrategy returns the auth strategies for the dynamic secret
func (d *Dynamic) GetStrategies() []AuthStrategy {
if !d.fetched {
if d.fetched.Load() {
if d.error != nil {
return nil
}
} else {
// Try to fetch if not already fetched
_ = d.Fetch(true)
}
if d.error != nil {
return nil
}
var strategies []AuthStrategy
if d.Secret != nil {
strategies = append(strategies, d.Secret.GetStrategy())
strategies = append(strategies, d.GetStrategy())
}
for _, secret := range d.Secrets {
strategies = append(strategies, secret.GetStrategy())
@ -198,12 +208,23 @@ func (d *Dynamic) GetStrategies() []AuthStrategy {
// Fetch fetches the dynamic secret
// if isFatal is true, it will stop the execution if the secret could not be fetched
func (d *Dynamic) Fetch(isFatal bool) error {
d.m.Lock()
defer d.m.Unlock()
if d.fetched {
return nil
if d.fetched.Load() {
return d.error
}
// Try to set fetching flag atomically
if !d.fetching.CompareAndSwap(false, true) {
// Already fetching, return current error
return d.error
}
// We're the only one fetching, call the callback
d.error = d.fetchCallback(d)
// Mark as fetched and clear fetching flag
d.fetched.Store(true)
d.fetching.Store(false)
if d.error != nil && isFatal {
gologger.Fatal().Msgf("Could not fetch dynamic secret: %s\n", d.error)
}

View File

@ -0,0 +1,125 @@
package authx
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestDynamicUnmarshalJSON(t *testing.T) {
t.Run("basic-unmarshal", func(t *testing.T) {
data := []byte(`{
"template": "test-template.yaml",
"variables": [
{
"key": "username",
"value": "testuser"
}
],
"secrets": [
{
"type": "BasicAuth",
"domains": ["example.com"],
"username": "user1",
"password": "pass1"
}
],
"type": "BasicAuth",
"domains": ["test.com"],
"username": "testuser",
"password": "testpass"
}`)
var d Dynamic
err := d.UnmarshalJSON(data)
require.NoError(t, err)
// Secret
require.NotNil(t, d.Secret)
require.Equal(t, "BasicAuth", d.Type)
require.Equal(t, []string{"test.com"}, d.Domains)
require.Equal(t, "testuser", d.Username)
require.Equal(t, "testpass", d.Password)
// Dynamic fields
require.Equal(t, "test-template.yaml", d.TemplatePath)
require.Len(t, d.Variables, 1)
require.Equal(t, "username", d.Variables[0].Key)
require.Equal(t, "testuser", d.Variables[0].Value)
require.Len(t, d.Secrets, 1)
require.Equal(t, "BasicAuth", d.Secrets[0].Type)
require.Equal(t, []string{"example.com"}, d.Secrets[0].Domains)
require.Equal(t, "user1", d.Secrets[0].Username)
require.Equal(t, "pass1", d.Secrets[0].Password)
})
t.Run("complex-unmarshal", func(t *testing.T) {
data := []byte(`{
"template": "test-template.yaml",
"variables": [
{
"key": "token",
"value": "Bearer xyz"
}
],
"secrets": [
{
"type": "CookiesAuth",
"domains": ["example.com"],
"cookies": [
{
"key": "session",
"value": "abc123"
}
]
}
],
"type": "HeadersAuth",
"domains": ["api.test.com"],
"headers": [
{
"key": "X-API-Key",
"value": "secret-key"
}
]
}`)
var d Dynamic
err := d.UnmarshalJSON(data)
require.NoError(t, err)
// Secret
require.NotNil(t, d.Secret)
require.Equal(t, "HeadersAuth", d.Type)
require.Equal(t, []string{"api.test.com"}, d.Domains)
require.Len(t, d.Headers, 1)
require.Equal(t, "X-API-Key", d.Secret.Headers[0].Key)
require.Equal(t, "secret-key", d.Secret.Headers[0].Value)
// Dynamic fields
require.Equal(t, "test-template.yaml", d.TemplatePath)
require.Len(t, d.Variables, 1)
require.Equal(t, "token", d.Variables[0].Key)
require.Equal(t, "Bearer xyz", d.Variables[0].Value)
require.Len(t, d.Secrets, 1)
require.Equal(t, "CookiesAuth", d.Secrets[0].Type)
require.Equal(t, []string{"example.com"}, d.Secrets[0].Domains)
require.Len(t, d.Secrets[0].Cookies, 1)
require.Equal(t, "session", d.Secrets[0].Cookies[0].Key)
require.Equal(t, "abc123", d.Secrets[0].Cookies[0].Value)
})
t.Run("invalid-json", func(t *testing.T) {
data := []byte(`{invalid json}`)
var d Dynamic
err := d.UnmarshalJSON(data)
require.Error(t, err)
})
t.Run("empty-json", func(t *testing.T) {
data := []byte(`{}`)
var d Dynamic
err := d.UnmarshalJSON(data)
require.NoError(t, err)
})
}

View File

@ -8,7 +8,7 @@ import (
"strings"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
"github.com/projectdiscovery/utils/generic"
stringsutil "github.com/projectdiscovery/utils/strings"
"gopkg.in/yaml.v3"
@ -55,7 +55,7 @@ type Secret struct {
Type string `json:"type" yaml:"type"`
Domains []string `json:"domains" yaml:"domains"`
DomainsRegex []string `json:"domains-regex" yaml:"domains-regex"`
Headers []KV `json:"headers" yaml:"headers"`
Headers []KV `json:"headers" yaml:"headers"` // Headers preserve exact casing (useful for case-sensitive APIs)
Cookies []Cookie `json:"cookies" yaml:"cookies"`
Params []KV `json:"params" yaml:"params"`
Username string `json:"username" yaml:"username"` // can be either email or username
@ -148,7 +148,7 @@ func (s *Secret) Validate() error {
}
type KV struct {
Key string `json:"key" yaml:"key"`
Key string `json:"key" yaml:"key"` // Header key (preserves exact casing)
Value string `json:"value" yaml:"value"`
}
@ -237,7 +237,9 @@ func GetAuthDataFromYAML(data []byte) (*Authx, error) {
var auth Authx
err := yaml.Unmarshal(data, &auth)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not unmarshal yaml")
errorErr := errkit.FromError(err)
errorErr.Msgf("could not unmarshal yaml")
return nil, errorErr
}
return &auth, nil
}
@ -247,7 +249,9 @@ func GetAuthDataFromJSON(data []byte) (*Authx, error) {
var auth Authx
err := json.Unmarshal(data, &auth)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not unmarshal json")
errorErr := errkit.FromError(err)
errorErr.Msgf("could not unmarshal json")
return nil, errorErr
}
return &auth, nil
}

View File

@ -21,15 +21,19 @@ func NewHeadersAuthStrategy(data *Secret) *HeadersAuthStrategy {
}
// Apply applies the headers auth strategy to the request
// NOTE: This preserves exact header casing (e.g., barAuthToken stays as barAuthToken)
// This is useful for APIs that require case-sensitive header names
func (s *HeadersAuthStrategy) Apply(req *http.Request) {
for _, header := range s.Data.Headers {
req.Header.Set(header.Key, header.Value)
req.Header[header.Key] = []string{header.Value}
}
}
// ApplyOnRR applies the headers auth strategy to the retryable request
// NOTE: This preserves exact header casing (e.g., barAuthToken stays as barAuthToken)
// This is useful for APIs that require case-sensitive header names
func (s *HeadersAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
for _, header := range s.Data.Headers {
req.Header.Set(header.Key, header.Value)
req.Header[header.Key] = []string{header.Value}
}
}

View File

@ -12,6 +12,8 @@ info:
# static secrets
static:
# for header based auth session
# NOTE: Headers preserve exact casing (e.g., x-pdcp-key stays as x-pdcp-key)
# This is useful for APIs that require case-sensitive header names
- type: header
domains:
- api.projectdiscovery.io
@ -20,6 +22,8 @@ static:
headers:
- key: x-pdcp-key
value: <api-key-here>
- key: barAuthToken
value: <auth-token-here>
# for query based auth session
- type: Query

View File

@ -7,7 +7,7 @@ import (
"strings"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider/authx"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
urlutil "github.com/projectdiscovery/utils/url"
)
@ -30,16 +30,20 @@ func NewFileAuthProvider(path string, callback authx.LazyFetchSecret) (AuthProvi
return nil, ErrNoSecrets
}
if len(store.Dynamic) > 0 && callback == nil {
return nil, errorutil.New("lazy fetch callback is required for dynamic secrets")
return nil, errkit.New("lazy fetch callback is required for dynamic secrets")
}
for _, secret := range store.Secrets {
if err := secret.Validate(); err != nil {
return nil, errorutil.NewWithErr(err).Msgf("invalid secret in file: %s", path)
errorErr := errkit.FromError(err)
errorErr.Msgf("invalid secret in file: %s", path)
return nil, errorErr
}
}
for i, dynamic := range store.Dynamic {
if err := dynamic.Validate(); err != nil {
return nil, errorutil.NewWithErr(err).Msgf("invalid dynamic in file: %s", path)
errorErr := errkit.FromError(err)
errorErr.Msgf("invalid dynamic in file: %s", path)
return nil, errorErr
}
dynamic.SetLazyFetchCallback(callback)
store.Dynamic[i] = dynamic

View File

@ -7,6 +7,7 @@ import (
"fmt"
"io"
"path"
"slices"
"strings"
"github.com/aws/aws-sdk-go-v2/aws"
@ -140,11 +141,9 @@ func (c Catalog) ResolvePath(templateName, second string) (string, error) {
}
// check if templateName is already an absolute path to c key
for _, key := range keys {
if key == templateName {
if slices.Contains(keys, templateName) {
return templateName, nil
}
}
return "", fmt.Errorf("no such path found: %s%s for keys: %v", second, templateName, keys)
}

View File

@ -3,6 +3,7 @@ package aws
import (
"io"
"reflect"
"slices"
"strings"
"testing"
@ -250,13 +251,7 @@ func (m mocks3svc) getAllKeys() ([]string, error) {
}
func (m mocks3svc) downloadKey(name string) (io.ReadCloser, error) {
found := false
for _, key := range m.keys {
if key == name {
found = true
break
}
}
found := slices.Contains(m.keys, name)
if !found {
return nil, errors.New("key not found")
}

View File

@ -31,7 +31,7 @@ const (
CLIConfigFileName = "config.yaml"
ReportingConfigFilename = "reporting-config.yaml"
// Version is the current version of nuclei
Version = `v3.4.2`
Version = `v3.4.10`
// Directory Names of custom templates
CustomS3TemplatesDirName = "s3"
CustomGitHubTemplatesDirName = "github"
@ -46,18 +46,21 @@ const (
// if the current version is outdated
func IsOutdatedVersion(current, latest string) bool {
if latest == "" {
// if pdtm api call failed it's assumed that the current version is outdated
// and it will be confirmed while updating from GitHub
// this fixes `version string empty` errors
return true
// NOTE(dwisiswant0): if PDTM API call failed or returned empty, we
// cannot determine if templates are outdated w/o additional checks
// return false to avoid unnecessary updates.
return false
}
current = trimDevIfExists(current)
currentVer, _ := semver.NewVersion(current)
newVer, _ := semver.NewVersion(latest)
if currentVer == nil || newVer == nil {
// fallback to naive comparison
return current == latest
// fallback to naive comparison - return true only if they are different
return current != latest
}
return newVer.GreaterThan(currentVer)
}

View File

@ -20,7 +20,9 @@ func ReadIgnoreFile() IgnoreFile {
gologger.Error().Msgf("Could not read nuclei-ignore file: %s\n", err)
return IgnoreFile{}
}
defer file.Close()
defer func() {
_ = file.Close()
}()
ignore := IgnoreFile{}
if err := yaml.NewDecoder(file).Decode(&ignore); err != nil {

View File

@ -4,16 +4,16 @@ import (
"bytes"
"crypto/md5"
"fmt"
"log"
"os"
"path/filepath"
"slices"
"strings"
"sync"
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file"
folderutil "github.com/projectdiscovery/utils/folder"
)
@ -44,12 +44,15 @@ type Config struct {
LatestNucleiVersion string `json:"nuclei-latest-version"`
LatestNucleiTemplatesVersion string `json:"nuclei-templates-latest-version"`
LatestNucleiIgnoreHash string `json:"nuclei-latest-ignore-hash,omitempty"`
Logger *gologger.Logger `json:"-"` // logger
// internal / unexported fields
disableUpdates bool `json:"-"` // disable updates both version check and template updates
homeDir string `json:"-"` // User Home Directory
configDir string `json:"-"` // Nuclei Global Config Directory
debugArgs []string `json:"-"` // debug args
m sync.Mutex
}
// IsCustomTemplate determines whether a given template is custom-built or part of the official Nuclei templates.
@ -104,21 +107,29 @@ func (c *Config) GetTemplateDir() string {
// DisableUpdateCheck disables update check and template updates
func (c *Config) DisableUpdateCheck() {
c.m.Lock()
defer c.m.Unlock()
c.disableUpdates = true
}
// CanCheckForUpdates returns true if update check is enabled
func (c *Config) CanCheckForUpdates() bool {
c.m.Lock()
defer c.m.Unlock()
return !c.disableUpdates
}
// NeedsTemplateUpdate returns true if template installation/update is required
func (c *Config) NeedsTemplateUpdate() bool {
c.m.Lock()
defer c.m.Unlock()
return !c.disableUpdates && (c.TemplateVersion == "" || IsOutdatedVersion(c.TemplateVersion, c.LatestNucleiTemplatesVersion) || !fileutil.FolderExists(c.TemplatesDirectory))
}
// NeedsIgnoreFileUpdate returns true if Ignore file hash is different (aka ignore file is outdated)
func (c *Config) NeedsIgnoreFileUpdate() bool {
c.m.Lock()
defer c.m.Unlock()
return c.NucleiIgnoreHash == "" || c.NucleiIgnoreHash != c.LatestNucleiIgnoreHash
}
@ -129,13 +140,13 @@ func (c *Config) UpdateNucleiIgnoreHash() error {
if fileutil.FileExists(ignoreFilePath) {
bin, err := os.ReadFile(ignoreFilePath)
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not read nuclei ignore file")
return errkit.Newf("could not read nuclei ignore file: %v", err)
}
c.NucleiIgnoreHash = fmt.Sprintf("%x", md5.Sum(bin))
// write config to disk
return c.WriteTemplatesConfig()
}
return errorutil.NewWithTag("config", "ignore file not found: could not update nuclei ignore hash")
return errkit.New("ignore file not found: could not update nuclei ignore hash")
}
// GetConfigDir returns the nuclei configuration directory
@ -210,7 +221,7 @@ func (c *Config) GetCacheDir() string {
func (c *Config) SetConfigDir(dir string) {
c.configDir = dir
if err := c.createConfigDirIfNotExists(); err != nil {
gologger.Fatal().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
c.Logger.Fatal().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
}
// if folder already exists read config or create new
@ -218,7 +229,7 @@ func (c *Config) SetConfigDir(dir string) {
// create new config
applyDefaultConfig()
if err2 := c.WriteTemplatesConfig(); err2 != nil {
gologger.Fatal().Msgf("Could not create nuclei config file at %s: %s", c.getTemplatesConfigFilePath(), err2)
c.Logger.Fatal().Msgf("Could not create nuclei config file at %s: %s", c.getTemplatesConfigFilePath(), err2)
}
}
@ -246,7 +257,7 @@ func (c *Config) SetTemplatesVersion(version string) error {
c.TemplateVersion = version
// write config to disk
if err := c.WriteTemplatesConfig(); err != nil {
return errorutil.NewWithErr(err).Msgf("could not write nuclei config file at %s", c.getTemplatesConfigFilePath())
return errkit.Newf("could not write nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
}
return nil
}
@ -254,15 +265,15 @@ func (c *Config) SetTemplatesVersion(version string) error {
// ReadTemplatesConfig reads the nuclei templates config file
func (c *Config) ReadTemplatesConfig() error {
if !fileutil.FileExists(c.getTemplatesConfigFilePath()) {
return errorutil.NewWithTag("config", "nuclei config file at %s does not exist", c.getTemplatesConfigFilePath())
return errkit.Newf("nuclei config file at %s does not exist", c.getTemplatesConfigFilePath())
}
var cfg *Config
bin, err := os.ReadFile(c.getTemplatesConfigFilePath())
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not read nuclei config file at %s", c.getTemplatesConfigFilePath())
return errkit.Newf("could not read nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
}
if err := json.Unmarshal(bin, &cfg); err != nil {
return errorutil.NewWithErr(err).Msgf("could not unmarshal nuclei config file at %s", c.getTemplatesConfigFilePath())
return errkit.Newf("could not unmarshal nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
}
// apply config
c.TemplatesDirectory = cfg.TemplatesDirectory
@ -281,10 +292,10 @@ func (c *Config) WriteTemplatesConfig() error {
}
bin, err := json.Marshal(c)
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to marshal nuclei config")
return errkit.Newf("failed to marshal nuclei config: %v", err)
}
if err = os.WriteFile(c.getTemplatesConfigFilePath(), bin, 0600); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write nuclei config file at %s", c.getTemplatesConfigFilePath())
return errkit.Newf("failed to write nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
}
return nil
}
@ -308,7 +319,7 @@ func (c *Config) getTemplatesConfigFilePath() string {
func (c *Config) createConfigDirIfNotExists() error {
if !fileutil.FolderExists(c.configDir) {
if err := fileutil.CreateFolder(c.configDir); err != nil {
return errorutil.NewWithErr(err).Msgf("could not create nuclei config directory at %s", c.configDir)
return errkit.Newf("could not create nuclei config directory at %s: %v", c.configDir, err)
}
}
return nil
@ -318,14 +329,14 @@ func (c *Config) createConfigDirIfNotExists() error {
// to the current config directory
func (c *Config) copyIgnoreFile() {
if err := c.createConfigDirIfNotExists(); err != nil {
gologger.Error().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
c.Logger.Error().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
return
}
ignoreFilePath := c.GetIgnoreFilePath()
if !fileutil.FileExists(ignoreFilePath) {
// copy ignore file from default config directory
if err := fileutil.CopyFile(filepath.Join(folderutil.AppConfigDirOrDefault(FallbackConfigFolderName, BinaryName), NucleiIgnoreFileName), ignoreFilePath); err != nil {
gologger.Error().Msgf("Could not copy nuclei ignore file at %s: %s", ignoreFilePath, err)
c.Logger.Error().Msgf("Could not copy nuclei ignore file at %s: %s", ignoreFilePath, err)
}
}
}
@ -334,12 +345,7 @@ func (c *Config) copyIgnoreFile() {
// this could be a feature specific to debugging like PPROF or printing stats
// of max host error etc
func (c *Config) IsDebugArgEnabled(arg string) bool {
for _, v := range c.debugArgs {
if v == arg {
return true
}
}
return false
return slices.Contains(c.debugArgs, arg)
}
// parseDebugArgs from string
@ -371,9 +377,6 @@ func (c *Config) parseDebugArgs(data string) {
}
func init() {
// first attempt to migrate all files from old config directory to new config directory
goflags.AttemptConfigMigration() // regardless how many times this is called it will only migrate once based on condition
ConfigDir := folderutil.AppConfigDirOrDefault(FallbackConfigFolderName, BinaryName)
if cfgDir := os.Getenv(NucleiConfigDirEnv); cfgDir != "" {
@ -389,6 +392,7 @@ func init() {
DefaultConfig = &Config{
homeDir: folderutil.HomeDirOrDefault(""),
configDir: ConfigDir,
Logger: gologger.DefaultLogger,
}
// when enabled will log events in more verbosity than -v or -debug
@ -410,9 +414,7 @@ func init() {
gologger.Error().Msgf("failed to write config file at %s got: %s", DefaultConfig.getTemplatesConfigFilePath(), err)
}
}
// attempt to migrate resume files
// this also happens once regardless of how many times this is called
migrateResumeFiles()
// Loads/updates paths of custom templates
// Note: custom templates paths should not be updated in config file
// and even if it is changed we don't follow it since it is not expected behavior
@ -427,61 +429,3 @@ func applyDefaultConfig() {
// updates all necessary paths
DefaultConfig.SetTemplatesDir(DefaultConfig.TemplatesDirectory)
}
func migrateResumeFiles() {
// attempt to migrate old resume files to new directory structure
// after migration has been done in goflags
oldResumeDir := DefaultConfig.GetConfigDir()
// migrate old resume file to new directory structure
if !fileutil.FileOrFolderExists(DefaultConfig.GetCacheDir()) && fileutil.FileOrFolderExists(oldResumeDir) {
// this means new cache dir doesn't exist, so we need to migrate
// first check if old resume file exists if not then no need to migrate
exists := false
files, err := os.ReadDir(oldResumeDir)
if err != nil {
// log silently
log.Printf("could not read old resume dir: %s\n", err)
return
}
for _, file := range files {
if strings.HasSuffix(file.Name(), ".cfg") {
exists = true
break
}
}
if !exists {
// no need to migrate
return
}
// create new cache dir
err = os.MkdirAll(DefaultConfig.GetCacheDir(), os.ModePerm)
if err != nil {
// log silently
log.Printf("could not create new cache dir: %s\n", err)
return
}
err = filepath.WalkDir(oldResumeDir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
if !strings.HasSuffix(path, ".cfg") {
return nil
}
err = os.Rename(path, filepath.Join(DefaultConfig.GetCacheDir(), filepath.Base(path)))
if err != nil {
return err
}
return nil
})
if err != nil {
// log silently
log.Printf("could not migrate old resume files: %s\n", err)
return
}
}
}

View File

@ -7,7 +7,6 @@ import (
"path/filepath"
"strings"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions"
fileutil "github.com/projectdiscovery/utils/file"
stringsutil "github.com/projectdiscovery/utils/strings"
@ -74,7 +73,9 @@ func getTemplateID(filePath string) (string, error) {
return "", err
}
defer file.Close()
defer func() {
_ = file.Close()
}()
return GetTemplateIDFromReader(file, filePath)
}
@ -96,7 +97,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
return index, nil
}
}
gologger.Error().Msgf("failed to read index file creating new one: %v", err)
DefaultConfig.Logger.Error().Msgf("failed to read index file creating new one: %v", err)
}
ignoreDirs := DefaultConfig.GetAllCustomTemplateDirs()
@ -107,7 +108,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
}
err := filepath.WalkDir(DefaultConfig.TemplatesDirectory, func(path string, d os.DirEntry, err error) error {
if err != nil {
gologger.Verbose().Msgf("failed to walk path=%v err=%v", path, err)
DefaultConfig.Logger.Verbose().Msgf("failed to walk path=%v err=%v", path, err)
return nil
}
if d.IsDir() || !IsTemplate(path) || stringsutil.ContainsAny(path, ignoreDirs...) {
@ -116,7 +117,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
// get template id from file
id, err := getTemplateID(path)
if err != nil || id == "" {
gologger.Verbose().Msgf("failed to get template id from file=%v got id=%v err=%v", path, id, err)
DefaultConfig.Logger.Verbose().Msgf("failed to get template id from file=%v got id=%v err=%v", path, id, err)
return nil
}
index[id] = path

View File

@ -8,7 +8,6 @@ import (
"github.com/logrusorgru/aurora"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
stringsutil "github.com/projectdiscovery/utils/strings"
updateutils "github.com/projectdiscovery/utils/update"
@ -84,7 +83,7 @@ func (c *DiskCatalog) GetTemplatePath(target string) ([]string, error) {
absPath = BackwardsCompatiblePaths(c.templatesDirectory, target)
if absPath != target && strings.TrimPrefix(absPath, c.templatesDirectory+string(filepath.Separator)) != target {
if config.DefaultConfig.LogAllEvents {
gologger.DefaultLogger.Print().Msgf("[%v] requested Template path %s is deprecated, please update to %s\n", aurora.Yellow("WRN").String(), target, absPath)
config.DefaultConfig.Logger.Print().Msgf("[%v] requested Template path %s is deprecated, please update to %s\n", aurora.Yellow("WRN").String(), target, absPath)
}
deprecatedPathsCounter++
}
@ -302,6 +301,6 @@ func PrintDeprecatedPathsMsgIfApplicable(isSilent bool) {
return
}
if deprecatedPathsCounter > 0 && !isSilent {
gologger.Print().Msgf("[%v] Found %v template[s] loaded with deprecated paths, update before v3 for continued support.\n", aurora.Yellow("WRN").String(), deprecatedPathsCounter)
config.DefaultConfig.Logger.Print().Msgf("[%v] Found %v template[s] loaded with deprecated paths, update before v3 for continued support.\n", aurora.Yellow("WRN").String(), deprecatedPathsCounter)
}
}

View File

@ -10,12 +10,11 @@ import (
"strings"
"github.com/alecthomas/chroma/quick"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/retryablehttp-go"
pdcpauth "github.com/projectdiscovery/utils/auth/pdcp"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
const (
@ -34,31 +33,31 @@ type AITemplateResponse struct {
func getAIGeneratedTemplates(prompt string, options *types.Options) ([]string, error) {
prompt = strings.TrimSpace(prompt)
if len(prompt) < 5 {
return nil, errorutil.New("Prompt is too short. Please provide a more descriptive prompt")
return nil, errkit.Newf("Prompt is too short. Please provide a more descriptive prompt")
}
if len(prompt) > 3000 {
return nil, errorutil.New("Prompt is too long. Please limit to 3000 characters")
return nil, errkit.Newf("Prompt is too long. Please limit to 3000 characters")
}
template, templateID, err := generateAITemplate(prompt)
if err != nil {
return nil, errorutil.New("Failed to generate template: %v", err)
return nil, errkit.Newf("Failed to generate template: %v", err)
}
pdcpTemplateDir := filepath.Join(config.DefaultConfig.GetTemplateDir(), "pdcp")
if err := os.MkdirAll(pdcpTemplateDir, 0755); err != nil {
return nil, errorutil.New("Failed to create pdcp template directory: %v", err)
return nil, errkit.Newf("Failed to create pdcp template directory: %v", err)
}
templateFile := filepath.Join(pdcpTemplateDir, templateID+".yaml")
err = os.WriteFile(templateFile, []byte(template), 0644)
if err != nil {
return nil, errorutil.New("Failed to generate template: %v", err)
return nil, errkit.Newf("Failed to generate template: %v", err)
}
gologger.Info().Msgf("Generated template available at: https://cloud.projectdiscovery.io/templates/%s", templateID)
gologger.Info().Msgf("Generated template path: %s", templateFile)
options.Logger.Info().Msgf("Generated template available at: https://cloud.projectdiscovery.io/templates/%s", templateID)
options.Logger.Info().Msgf("Generated template path: %s", templateFile)
// Check if we should display the template
// This happens when:
@ -76,7 +75,7 @@ func getAIGeneratedTemplates(prompt string, options *types.Options) ([]string, e
template = buf.String()
}
}
gologger.Silent().Msgf("\n%s", template)
options.Logger.Debug().Msgf("\n%s", template)
// FIXME:
// we should not be exiting the program here
// but we need to find a better way to handle this
@ -92,22 +91,22 @@ func generateAITemplate(prompt string) (string, string, error) {
}
jsonBody, err := json.Marshal(reqBody)
if err != nil {
return "", "", errorutil.New("Failed to marshal request body: %v", err)
return "", "", errkit.Newf("Failed to marshal request body: %v", err)
}
req, err := http.NewRequest(http.MethodPost, aiTemplateGeneratorAPIEndpoint, bytes.NewBuffer(jsonBody))
if err != nil {
return "", "", errorutil.New("Failed to create HTTP request: %v", err)
return "", "", errkit.Newf("Failed to create HTTP request: %v", err)
}
ph := pdcpauth.PDCPCredHandler{}
creds, err := ph.GetCreds()
if err != nil {
return "", "", errorutil.New("Failed to get PDCP credentials: %v", err)
return "", "", errkit.Newf("Failed to get PDCP credentials: %v", err)
}
if creds == nil {
return "", "", errorutil.New("PDCP API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
return "", "", errkit.Newf("PDCP API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
}
req.Header.Set("Content-Type", "application/json")
@ -115,26 +114,28 @@ func generateAITemplate(prompt string) (string, string, error) {
resp, err := retryablehttp.DefaultClient().Do(req)
if err != nil {
return "", "", errorutil.New("Failed to send HTTP request: %v", err)
return "", "", errkit.Newf("Failed to send HTTP request: %v", err)
}
defer resp.Body.Close()
defer func() {
_ = resp.Body.Close()
}()
if resp.StatusCode == http.StatusUnauthorized {
return "", "", errorutil.New("Invalid API Key or API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
return "", "", errkit.Newf("Invalid API Key or API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
}
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", "", errorutil.New("API returned status code %d: %s", resp.StatusCode, string(body))
return "", "", errkit.Newf("API returned status code %d: %s", resp.StatusCode, string(body))
}
var result AITemplateResponse
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", "", errorutil.New("Failed to decode API response: %v", err)
return "", "", errkit.Newf("Failed to decode API response: %v", err)
}
if result.TemplateID == "" || result.Completion == "" {
return "", "", errorutil.New("Failed to generate template")
return "", "", errkit.Newf("Failed to generate template")
}
return result.Completion, result.TemplateID, nil

View File

@ -7,7 +7,6 @@ import (
"os"
"sort"
"strings"
"sync"
"github.com/logrusorgru/aurora"
"github.com/pkg/errors"
@ -18,16 +17,20 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/keys"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
templateTypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/workflows"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
mapsutil "github.com/projectdiscovery/utils/maps"
sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings"
syncutil "github.com/projectdiscovery/utils/sync"
urlutil "github.com/projectdiscovery/utils/url"
"github.com/rs/xid"
)
const (
@ -65,7 +68,8 @@ type Config struct {
IncludeConditions []string
Catalog catalog.Catalog
ExecutorOptions protocols.ExecutorOptions
ExecutorOptions *protocols.ExecutorOptions
Logger *gologger.Logger
}
// Store is a storage for loaded nuclei templates
@ -82,13 +86,15 @@ type Store struct {
preprocessor templates.Preprocessor
logger *gologger.Logger
// NotFoundCallback is called for each not found template
// This overrides error handling for not found templates
NotFoundCallback func(template string) bool
}
// NewConfig returns a new loader config
func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts protocols.ExecutorOptions) *Config {
func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts *protocols.ExecutorOptions) *Config {
loaderConfig := Config{
Templates: options.Templates,
Workflows: options.Workflows,
@ -111,6 +117,7 @@ func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts pro
Catalog: catalog,
ExecutorOptions: executerOpts,
AITemplatePrompt: options.AITemplatePrompt,
Logger: options.Logger,
}
loaderConfig.RemoteTemplateDomainList = append(loaderConfig.RemoteTemplateDomainList, TrustedTemplateDomains...)
return &loaderConfig
@ -145,6 +152,7 @@ func New(cfg *Config) (*Store, error) {
}, cfg.Catalog),
finalTemplates: cfg.Templates,
finalWorkflows: cfg.Workflows,
logger: cfg.Logger,
}
// Do a check to see if we have URLs in templates flag, if so
@ -231,13 +239,15 @@ func (store *Store) ReadTemplateFromURI(uri string, remote bool) ([]byte, error)
uri = handleTemplatesEditorURLs(uri)
remoteTemplates, _, err := getRemoteTemplatesAndWorkflows([]string{uri}, nil, store.config.RemoteTemplateDomainList)
if err != nil || len(remoteTemplates) == 0 {
return nil, errorutil.NewWithErr(err).Msgf("Could not load template %s: got %v", uri, remoteTemplates)
return nil, errkit.Wrapf(err, "Could not load template %s: got %v", uri, remoteTemplates)
}
resp, err := retryablehttp.Get(remoteTemplates[0])
if err != nil {
return nil, err
}
defer resp.Body.Close()
defer func() {
_ = resp.Body.Close()
}()
return io.ReadAll(resp.Body)
} else {
return os.ReadFile(uri)
@ -293,11 +303,11 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
if strings.Contains(err.Error(), templates.ErrExcluded.Error()) {
stats.Increment(templates.TemplatesExcludedStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
store.logger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
}
continue
}
gologger.Warning().Msg(err.Error())
store.logger.Warning().Msg(err.Error())
}
}
parserItem, ok := store.config.ExecutorOptions.Parser.(*templates.Parser)
@ -306,6 +316,8 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
}
templatesCache := parserItem.Cache()
loadedTemplateIDs := mapsutil.NewSyncLockMap[string, struct{}]()
for templatePath := range validPaths {
template, _, _ := templatesCache.Has(templatePath)
@ -330,6 +342,12 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
}
if template != nil {
if loadedTemplateIDs.Has(template.ID) {
store.logger.Debug().Msgf("Skipping duplicate template ID '%s' from path '%s'", template.ID, templatePath)
continue
}
_ = loadedTemplateIDs.Set(template.ID, struct{}{})
template.Path = templatePath
store.templates = append(store.templates, template)
}
@ -356,15 +374,13 @@ func (store *Store) ValidateTemplates() error {
func (store *Store) areWorkflowsValid(filteredWorkflowPaths map[string]struct{}) bool {
return store.areWorkflowOrTemplatesValid(filteredWorkflowPaths, true, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) {
return false, nil
// return store.config.ExecutorOptions.Parser.LoadWorkflow(templatePath, store.config.Catalog)
return store.config.ExecutorOptions.Parser.LoadWorkflow(templatePath, store.config.Catalog)
})
}
func (store *Store) areTemplatesValid(filteredTemplatePaths map[string]struct{}) bool {
return store.areWorkflowOrTemplatesValid(filteredTemplatePaths, false, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) {
return false, nil
// return store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog)
return store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog)
})
}
@ -373,7 +389,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
for templatePath := range filteredTemplatePaths {
if _, err := load(templatePath, store.tagFilter); err != nil {
if isParsingError("Error occurred loading template %s: %s\n", templatePath, err) {
if isParsingError(store, "Error occurred loading template %s: %s\n", templatePath, err) {
areTemplatesValid = false
continue
}
@ -381,7 +397,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
template, err := templates.Parse(templatePath, store.preprocessor, store.config.ExecutorOptions)
if err != nil {
if isParsingError("Error occurred parsing template %s: %s\n", templatePath, err) {
if isParsingError(store, "Error occurred parsing template %s: %s\n", templatePath, err) {
areTemplatesValid = false
continue
}
@ -406,7 +422,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
// TODO: until https://github.com/projectdiscovery/nuclei-templates/issues/11324 is deployed
// disable strict validation to allow GH actions to run
// areTemplatesValid = false
gologger.Warning().Msgf("Found duplicate template ID during validation '%s' => '%s': %s\n", templatePath, existingTemplatePath, template.ID)
store.logger.Warning().Msgf("Found duplicate template ID during validation '%s' => '%s': %s\n", templatePath, existingTemplatePath, template.ID)
}
if !isWorkflow && len(template.Workflows) > 0 {
continue
@ -429,7 +445,7 @@ func areWorkflowTemplatesValid(store *Store, workflows []*workflows.WorkflowTemp
}
_, err := store.config.Catalog.GetTemplatePath(workflow.Template)
if err != nil {
if isParsingError("Error occurred loading template %s: %s\n", workflow.Template, err) {
if isParsingError(store, "Error occurred loading template %s: %s\n", workflow.Template, err) {
return false
}
}
@ -437,14 +453,14 @@ func areWorkflowTemplatesValid(store *Store, workflows []*workflows.WorkflowTemp
return true
}
func isParsingError(message string, template string, err error) bool {
func isParsingError(store *Store, message string, template string, err error) bool {
if errors.Is(err, templates.ErrExcluded) {
return false
}
if errors.Is(err, templates.ErrCreateTemplateExecutor) {
return false
}
gologger.Error().Msgf(message, template, err)
store.logger.Error().Msgf(message, template, err)
return true
}
@ -463,12 +479,12 @@ func (store *Store) LoadWorkflows(workflowsList []string) []*templates.Template
for workflowPath := range workflowPathMap {
loaded, err := store.config.ExecutorOptions.Parser.LoadWorkflow(workflowPath, store.config.Catalog)
if err != nil {
gologger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err)
store.logger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err)
}
if loaded {
parsed, err := templates.Parse(workflowPath, store.preprocessor, store.config.ExecutorOptions)
if err != nil {
gologger.Warning().Msgf("Could not parse workflow %s: %s\n", workflowPath, err)
store.logger.Warning().Msgf("Could not parse workflow %s: %s\n", workflowPath, err)
} else if parsed != nil {
loadedWorkflows = append(loadedWorkflows, parsed)
}
@ -485,8 +501,16 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
templatePathMap := store.pathFilter.Match(includedTemplates)
loadedTemplates := sliceutil.NewSyncSlice[*templates.Template]()
loadedTemplateIDs := mapsutil.NewSyncLockMap[string, struct{}]()
loadTemplate := func(tmpl *templates.Template) {
if loadedTemplateIDs.Has(tmpl.ID) {
store.logger.Debug().Msgf("Skipping duplicate template ID '%s' from path '%s'", tmpl.ID, tmpl.Path)
return
}
_ = loadedTemplateIDs.Set(tmpl.ID, struct{}{})
loadedTemplates.Append(tmpl)
// increment signed/unsigned counters
if tmpl.Verified {
@ -500,10 +524,22 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
}
}
var wgLoadTemplates sync.WaitGroup
wgLoadTemplates, errWg := syncutil.New(syncutil.WithSize(50))
if errWg != nil {
panic("could not create wait group")
}
if store.config.ExecutorOptions.Options.ExecutionId == "" {
store.config.ExecutorOptions.Options.ExecutionId = xid.New().String()
}
dialers := protocolstate.GetDialersWithId(store.config.ExecutorOptions.Options.ExecutionId)
if dialers == nil {
panic("dialers with executionId " + store.config.ExecutorOptions.Options.ExecutionId + " not found")
}
for templatePath := range templatePathMap {
wgLoadTemplates.Add(1)
wgLoadTemplates.Add()
go func(templatePath string) {
defer wgLoadTemplates.Done()
@ -515,7 +551,7 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
if !errors.Is(err, templates.ErrIncompatibleWithOfflineMatching) {
stats.Increment(templates.RuntimeWarningsStats)
}
gologger.Warning().Msgf("Could not parse template %s: %s\n", templatePath, err)
store.logger.Warning().Msgf("Could not parse template %s: %s\n", templatePath, err)
} else if parsed != nil {
if !parsed.Verified && store.config.ExecutorOptions.Options.DisableUnsignedTemplates {
// skip unverified templates when prompted to
@ -544,19 +580,26 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// check if the template is a DAST template
// also allow global matchers template to be loaded
if parsed.IsFuzzing() || parsed.Options.GlobalMatchers != nil && parsed.Options.GlobalMatchers.HasMatchers() {
if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
stats.Increment(templates.ExcludedHeadlessTmplStats)
if config.DefaultConfig.LogAllEvents {
store.logger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else {
loadTemplate(parsed)
}
}
} else if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
// donot include headless template in final list if headless flag is not set
stats.Increment(templates.ExcludedHeadlessTmplStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
store.logger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if len(parsed.RequestsCode) > 0 && !store.config.ExecutorOptions.Options.EnableCodeTemplates {
// donot include 'Code' protocol custom template in final list if code flag is not set
stats.Increment(templates.ExcludedCodeTmplStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Code flag is required for code protocol template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
store.logger.Print().Msgf("[%v] Code flag is required for code protocol template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if len(parsed.RequestsCode) > 0 && !parsed.Verified && len(parsed.Workflows) == 0 {
// donot include unverified 'Code' protocol custom template in final list
@ -564,12 +607,12 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// these will be skipped so increment skip counter
stats.Increment(templates.SkippedUnsignedStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Tampered/Unsigned template at %v.\n", aurora.Yellow("WRN").String(), templatePath)
store.logger.Print().Msgf("[%v] Tampered/Unsigned template at %v.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if parsed.IsFuzzing() && !store.config.ExecutorOptions.Options.DAST {
stats.Increment(templates.ExludedDastTmplStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] -dast flag is required for DAST template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
store.logger.Print().Msgf("[%v] -dast flag is required for DAST template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else {
loadTemplate(parsed)
@ -580,11 +623,11 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
if strings.Contains(err.Error(), templates.ErrExcluded.Error()) {
stats.Increment(templates.TemplatesExcludedStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
store.logger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
}
return
}
gologger.Warning().Msg(err.Error())
store.logger.Warning().Msg(err.Error())
}
}(templatePath)
}
@ -640,7 +683,7 @@ func workflowContainsProtocol(workflow []*workflows.WorkflowTemplate) bool {
func (s *Store) logErroredTemplates(erred map[string]error) {
for template, err := range erred {
if s.NotFoundCallback == nil || !s.NotFoundCallback(template) {
gologger.Error().Msgf("Could not find template '%s': %s", template, err)
s.logger.Error().Msgf("Could not find template '%s': %s", template, err)
}
}
}

View File

@ -5,13 +5,16 @@ import (
"fmt"
"net/url"
"strings"
"sync"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/retryablehttp-go"
sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings"
syncutil "github.com/projectdiscovery/utils/sync"
)
type ContentType string
@ -28,64 +31,73 @@ type RemoteContent struct {
}
func getRemoteTemplatesAndWorkflows(templateURLs, workflowURLs, remoteTemplateDomainList []string) ([]string, []string, error) {
remoteContentChannel := make(chan RemoteContent)
var (
err error
muErr sync.Mutex
)
remoteTemplateList := sliceutil.NewSyncSlice[string]()
remoteWorkFlowList := sliceutil.NewSyncSlice[string]()
for _, templateURL := range templateURLs {
go getRemoteContent(templateURL, remoteTemplateDomainList, remoteContentChannel, Template)
}
for _, workflowURL := range workflowURLs {
go getRemoteContent(workflowURL, remoteTemplateDomainList, remoteContentChannel, Workflow)
awg, errAwg := syncutil.New(syncutil.WithSize(50))
if errAwg != nil {
return nil, nil, errAwg
}
var remoteTemplateList []string
var remoteWorkFlowList []string
var err error
for i := 0; i < (len(templateURLs) + len(workflowURLs)); i++ {
remoteContent := <-remoteContentChannel
loadItem := func(URL string, contentType ContentType) {
defer awg.Done()
remoteContent := getRemoteContent(URL, remoteTemplateDomainList, contentType)
if remoteContent.Error != nil {
muErr.Lock()
if err != nil {
err = errors.New(remoteContent.Error.Error() + ": " + err.Error())
} else {
err = remoteContent.Error
}
muErr.Unlock()
} else {
if remoteContent.Type == Template {
remoteTemplateList = append(remoteTemplateList, remoteContent.Content...)
} else if remoteContent.Type == Workflow {
remoteWorkFlowList = append(remoteWorkFlowList, remoteContent.Content...)
switch remoteContent.Type {
case Template:
remoteTemplateList.Append(remoteContent.Content...)
case Workflow:
remoteWorkFlowList.Append(remoteContent.Content...)
}
}
}
return remoteTemplateList, remoteWorkFlowList, err
for _, templateURL := range templateURLs {
awg.Add()
go loadItem(templateURL, Template)
}
for _, workflowURL := range workflowURLs {
awg.Add()
go loadItem(workflowURL, Workflow)
}
awg.Wait()
return remoteTemplateList.Slice, remoteWorkFlowList.Slice, err
}
func getRemoteContent(URL string, remoteTemplateDomainList []string, remoteContentChannel chan<- RemoteContent, contentType ContentType) {
func getRemoteContent(URL string, remoteTemplateDomainList []string, contentType ContentType) RemoteContent {
if err := validateRemoteTemplateURL(URL, remoteTemplateDomainList); err != nil {
remoteContentChannel <- RemoteContent{
Error: err,
}
return
return RemoteContent{Error: err}
}
if strings.HasPrefix(URL, "http") && stringsutil.HasSuffixAny(URL, extensions.YAML) {
remoteContentChannel <- RemoteContent{
return RemoteContent{
Content: []string{URL},
Type: contentType,
}
return
}
response, err := retryablehttp.DefaultClient().Get(URL)
if err != nil {
remoteContentChannel <- RemoteContent{
Error: err,
return RemoteContent{Error: err}
}
return
}
defer response.Body.Close()
defer func() {
_ = response.Body.Close()
}()
if response.StatusCode < 200 || response.StatusCode > 299 {
remoteContentChannel <- RemoteContent{
Error: fmt.Errorf("get \"%s\": unexpect status %d", URL, response.StatusCode),
}
return
return RemoteContent{Error: fmt.Errorf("get \"%s\": unexpect status %d", URL, response.StatusCode)}
}
scanner := bufio.NewScanner(response.Body)
@ -97,23 +109,17 @@ func getRemoteContent(URL string, remoteTemplateDomainList []string, remoteConte
}
if utils.IsURL(text) {
if err := validateRemoteTemplateURL(text, remoteTemplateDomainList); err != nil {
remoteContentChannel <- RemoteContent{
Error: err,
}
return
return RemoteContent{Error: err}
}
}
templateList = append(templateList, text)
}
if err := scanner.Err(); err != nil {
remoteContentChannel <- RemoteContent{
Error: errors.Wrap(err, "get \"%s\""),
}
return
return RemoteContent{Error: errors.Wrap(err, "get \"%s\"")}
}
remoteContentChannel <- RemoteContent{
return RemoteContent{
Content: templateList,
Type: contentType,
}

View File

@ -1,6 +1,7 @@
package core
import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
@ -17,14 +18,16 @@ import (
type Engine struct {
workPool *WorkPool
options *types.Options
executerOpts protocols.ExecutorOptions
executerOpts *protocols.ExecutorOptions
Callback func(*output.ResultEvent) // Executed on results
Logger *gologger.Logger
}
// New returns a new Engine instance
func New(options *types.Options) *Engine {
engine := &Engine{
options: options,
Logger: options.Logger,
}
engine.workPool = engine.GetWorkPool()
return engine
@ -47,12 +50,12 @@ func (e *Engine) GetWorkPool() *WorkPool {
// SetExecuterOptions sets the executer options for the engine. This is required
// before using the engine to perform any execution.
func (e *Engine) SetExecuterOptions(options protocols.ExecutorOptions) {
func (e *Engine) SetExecuterOptions(options *protocols.ExecutorOptions) {
e.executerOpts = options
}
// ExecuterOptions returns protocols.ExecutorOptions for nuclei engine.
func (e *Engine) ExecuterOptions() protocols.ExecutorOptions {
func (e *Engine) ExecuterOptions() *protocols.ExecutorOptions {
return e.executerOpts
}

View File

@ -5,7 +5,6 @@ import (
"sync"
"sync/atomic"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
@ -50,7 +49,7 @@ func (e *Engine) ExecuteScanWithOpts(ctx context.Context, templatesList []*templ
totalReqAfterClustering := getRequestCount(finalTemplates) * int(target.Count())
if !noCluster && totalReqAfterClustering < totalReqBeforeCluster {
gologger.Info().Msgf("Templates clustered: %d (Reduced %d Requests)", clusterCount, totalReqBeforeCluster-totalReqAfterClustering)
e.Logger.Info().Msgf("Templates clustered: %d (Reduced %d Requests)", clusterCount, totalReqBeforeCluster-totalReqAfterClustering)
}
// 0 matches means no templates were found in the directory
@ -110,6 +109,8 @@ func (e *Engine) executeTemplateSpray(ctx context.Context, templatesList []*temp
defer wp.Wait()
for _, template := range templatesList {
template := template
select {
case <-ctx.Done():
return results

View File

@ -4,9 +4,10 @@ import (
"context"
"sync"
"sync/atomic"
"time"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/scan"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
@ -38,7 +39,7 @@ func (e *Engine) executeAllSelfContained(ctx context.Context, alltemplates []*te
match, err = template.Executer.Execute(ctx)
}
if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
e.options.Logger.Warning().Msgf("[%s] Could not execute step (self-contained): %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
}
results.CompareAndSwap(false, match)
}(v)
@ -47,8 +48,15 @@ func (e *Engine) executeAllSelfContained(ctx context.Context, alltemplates []*te
// executeTemplateWithTargets executes a given template on x targets (with a internal targetpool(i.e concurrency))
func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templates.Template, target provider.InputProvider, results *atomic.Bool) {
// this is target pool i.e max target to execute
wg := e.workPool.InputPool(template.Type())
if e.workPool == nil {
e.workPool = e.GetWorkPool()
}
// Bounded worker pool using input concurrency
pool := e.workPool.InputPool(template.Type())
workerCount := 1
if pool != nil && pool.Size > 0 {
workerCount = pool.Size
}
var (
index uint32
@ -77,6 +85,41 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
currentInfo.Unlock()
}
// task represents a single target execution unit
type task struct {
index uint32
skip bool
value *contextargs.MetaInput
}
tasks := make(chan task)
var workersWg sync.WaitGroup
workersWg.Add(workerCount)
for i := 0; i < workerCount; i++ {
go func() {
defer workersWg.Done()
for t := range tasks {
func() {
defer cleanupInFlight(t.index)
select {
case <-ctx.Done():
return
default:
}
if t.skip {
return
}
match, err := e.executeTemplateOnInput(ctx, template, t.value)
if err != nil {
e.options.Logger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), t.value.Input, err)
}
results.CompareAndSwap(false, match)
}()
}
}()
}
target.Iterate(func(scannedValue *contextargs.MetaInput) bool {
select {
case <-ctx.Done():
@ -88,13 +131,13 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
// skips indexes lower than the minimum in-flight at interruption time
var skip bool
if resumeFromInfo.Completed { // the template was completed
gologger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Template already completed\n", template.ID, scannedValue.Input)
e.options.Logger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Template already completed", template.ID, scannedValue.Input)
skip = true
} else if index < resumeFromInfo.SkipUnder { // index lower than the sliding window (bulk-size)
gologger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Target already processed\n", template.ID, scannedValue.Input)
e.options.Logger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Target already processed", template.ID, scannedValue.Input)
skip = true
} else if _, isInFlight := resumeFromInfo.InFlight[index]; isInFlight { // the target wasn't completed successfully
gologger.Debug().Msgf("[%s] Repeating \"%s\": Resume - Target wasn't completed\n", template.ID, scannedValue.Input)
e.options.Logger.Debug().Msgf("[%s] Repeating \"%s\": Resume - Target wasn't completed", template.ID, scannedValue.Input)
// skip is already false, but leaving it here for clarity
skip = false
} else if index > resumeFromInfo.DoAbove { // index above the sliding window (bulk-size)
@ -108,46 +151,32 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
// Skip if the host has had errors
if e.executerOpts.HostErrorsCache != nil && e.executerOpts.HostErrorsCache.Check(e.executerOpts.ProtocolType.String(), contextargs.NewWithMetaInput(ctx, scannedValue)) {
skipEvent := &output.ResultEvent{
TemplateID: template.ID,
TemplatePath: template.Path,
Info: template.Info,
Type: e.executerOpts.ProtocolType.String(),
Host: scannedValue.Input,
MatcherStatus: false,
Error: "host was skipped as it was found unresponsive",
Timestamp: time.Now(),
}
if e.Callback != nil {
e.Callback(skipEvent)
} else if e.executerOpts.Output != nil {
_ = e.executerOpts.Output.Write(skipEvent)
}
return true
}
wg.Add()
go func(index uint32, skip bool, value *contextargs.MetaInput) {
defer wg.Done()
defer cleanupInFlight(index)
if skip {
return
}
var match bool
var err error
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
ctx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
match = e.executeWorkflow(ctx, template.CompiledWorkflow)
default:
if e.Callback != nil {
if results, err := template.Executer.ExecuteWithResults(ctx); err == nil {
for _, result := range results {
e.Callback(result)
}
}
match = true
} else {
match, err = template.Executer.Execute(ctx)
}
}
if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
}
results.CompareAndSwap(false, match)
}(index, skip, scannedValue)
tasks <- task{index: index, skip: skip, value: scannedValue}
index++
return true
})
wg.Wait()
close(tasks)
workersWg.Wait()
// on completion marks the template as completed
currentInfo.Lock()
@ -185,30 +214,35 @@ func (e *Engine) executeTemplatesOnTarget(ctx context.Context, alltemplates []*t
go func(template *templates.Template, value *contextargs.MetaInput, wg *syncutil.AdaptiveWaitGroup) {
defer wg.Done()
var match bool
var err error
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
ctx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
match = e.executeWorkflow(ctx, template.CompiledWorkflow)
default:
if e.Callback != nil {
if results, err := template.Executer.ExecuteWithResults(ctx); err == nil {
for _, result := range results {
e.Callback(result)
}
}
match = true
} else {
match, err = template.Executer.Execute(ctx)
}
}
match, err := e.executeTemplateOnInput(ctx, template, value)
if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
e.options.Logger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), value.Input, err)
}
results.CompareAndSwap(false, match)
}(tpl, target, sg)
}
}
// executeTemplateOnInput performs template execution for a single input and returns match status and error
func (e *Engine) executeTemplateOnInput(ctx context.Context, template *templates.Template, value *contextargs.MetaInput) (bool, error) {
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
scanCtx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
return e.executeWorkflow(scanCtx, template.CompiledWorkflow), nil
default:
if e.Callback != nil {
results, err := template.Executer.ExecuteWithResults(scanCtx)
if err != nil {
return false, err
}
for _, result := range results {
e.Callback(result)
}
return len(results) > 0, nil
}
return template.Executer.Execute(scanCtx)
}
}

Some files were not shown because too many files have changed in this diff Show More