Compare commits

...

131 Commits
v3.4.7 ... dev

Author SHA1 Message Date
Mzack9999
75016d1e96
Merge pull request #6500 from projectdiscovery/dwisiswant0/fix/issue-6499-6498
fix: suppress warn code flag not found & excludes known misc dir
2025-10-06 11:06:48 +02:00
dependabot[bot]
6208dbe06a chore(deps): bump the modules group with 10 updates
Bumps the modules group with 10 updates:

| Package | From | To |
| --- | --- | --- |
| [github.com/projectdiscovery/fastdialer](https://github.com/projectdiscovery/fastdialer) | `0.4.11` | `0.4.12` |
| [github.com/projectdiscovery/hmap](https://github.com/projectdiscovery/hmap) | `0.0.94` | `0.0.95` |
| [github.com/projectdiscovery/retryabledns](https://github.com/projectdiscovery/retryabledns) | `1.0.107` | `1.0.108` |
| [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go) | `1.0.126` | `1.0.127` |
| [github.com/projectdiscovery/dsl](https://github.com/projectdiscovery/dsl) | `0.7.1` | `0.7.2` |
| [github.com/projectdiscovery/gologger](https://github.com/projectdiscovery/gologger) | `1.1.56` | `1.1.57` |
| [github.com/projectdiscovery/networkpolicy](https://github.com/projectdiscovery/networkpolicy) | `0.1.25` | `0.1.26` |
| [github.com/projectdiscovery/useragent](https://github.com/projectdiscovery/useragent) | `0.0.101` | `0.0.102` |
| [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.48` | `0.2.49` |
| [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.2.3` | `1.2.4` |


Updates `github.com/projectdiscovery/fastdialer` from 0.4.11 to 0.4.12
- [Release notes](https://github.com/projectdiscovery/fastdialer/releases)
- [Commits](https://github.com/projectdiscovery/fastdialer/compare/v0.4.11...v0.4.12)

Updates `github.com/projectdiscovery/hmap` from 0.0.94 to 0.0.95
- [Release notes](https://github.com/projectdiscovery/hmap/releases)
- [Commits](https://github.com/projectdiscovery/hmap/compare/v0.0.94...v0.0.95)

Updates `github.com/projectdiscovery/retryabledns` from 1.0.107 to 1.0.108
- [Release notes](https://github.com/projectdiscovery/retryabledns/releases)
- [Commits](https://github.com/projectdiscovery/retryabledns/compare/v1.0.107...v1.0.108)

Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.126 to 1.0.127
- [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases)
- [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.126...v1.0.127)

Updates `github.com/projectdiscovery/dsl` from 0.7.1 to 0.7.2
- [Release notes](https://github.com/projectdiscovery/dsl/releases)
- [Commits](https://github.com/projectdiscovery/dsl/compare/v0.7.1...v0.7.2)

Updates `github.com/projectdiscovery/gologger` from 1.1.56 to 1.1.57
- [Release notes](https://github.com/projectdiscovery/gologger/releases)
- [Commits](https://github.com/projectdiscovery/gologger/compare/v1.1.56...v1.1.57)

Updates `github.com/projectdiscovery/networkpolicy` from 0.1.25 to 0.1.26
- [Release notes](https://github.com/projectdiscovery/networkpolicy/releases)
- [Commits](https://github.com/projectdiscovery/networkpolicy/compare/v0.1.25...v0.1.26)

Updates `github.com/projectdiscovery/useragent` from 0.0.101 to 0.0.102
- [Release notes](https://github.com/projectdiscovery/useragent/releases)
- [Commits](https://github.com/projectdiscovery/useragent/compare/v0.0.101...v0.0.102)

Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.48 to 0.2.49
- [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases)
- [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.48...v0.2.49)

Updates `github.com/projectdiscovery/cdncheck` from 1.2.3 to 1.2.4
- [Release notes](https://github.com/projectdiscovery/cdncheck/releases)
- [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml)
- [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.2.3...v1.2.4)

---
updated-dependencies:
- dependency-name: github.com/projectdiscovery/fastdialer
  dependency-version: 0.4.12
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/hmap
  dependency-version: 0.0.95
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/retryabledns
  dependency-version: 1.0.108
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/retryablehttp-go
  dependency-version: 1.0.127
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/dsl
  dependency-version: 0.7.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/gologger
  dependency-version: 1.1.57
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/networkpolicy
  dependency-version: 0.1.26
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/useragent
  dependency-version: 0.0.102
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/wappalyzergo
  dependency-version: 0.2.49
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/cdncheck
  dependency-version: 1.2.4
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-06 06:19:17 +00:00
Dwi Siswanto
c903da3a0c
fix(config): normalize fpath in IsTemplate
* normalize file `fpath` in `IsTemplate` using
  filepath.FromSlash to ensure consistent matching
  across platforms.
* update `GetKnownMiscDirectories` docs to clarify
  that trailing slashes prevent false positives,
  since `IsTemplate` compares against normalized
  full paths.

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-09-30 00:40:47 +07:00
dependabot[bot]
86be8429ed
chore(deps): bump the modules group with 7 updates (#6505)
Bumps the modules group with 7 updates:

| Package | From | To |
| --- | --- | --- |
| [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go) | `1.0.125` | `1.0.126` |
| [github.com/projectdiscovery/dsl](https://github.com/projectdiscovery/dsl) | `0.7.0` | `0.7.1` |
| [github.com/projectdiscovery/gologger](https://github.com/projectdiscovery/gologger) | `1.1.55` | `1.1.56` |
| [github.com/projectdiscovery/mapcidr](https://github.com/projectdiscovery/mapcidr) | `1.1.34` | `1.1.95` |
| [github.com/projectdiscovery/utils](https://github.com/projectdiscovery/utils) | `0.5.0` | `0.6.0` |
| [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.47` | `0.2.48` |
| [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.2.0` | `1.2.3` |


Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.125 to 1.0.126
- [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases)
- [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.125...v1.0.126)

Updates `github.com/projectdiscovery/dsl` from 0.7.0 to 0.7.1
- [Release notes](https://github.com/projectdiscovery/dsl/releases)
- [Commits](https://github.com/projectdiscovery/dsl/compare/v0.7.0...v0.7.1)

Updates `github.com/projectdiscovery/gologger` from 1.1.55 to 1.1.56
- [Release notes](https://github.com/projectdiscovery/gologger/releases)
- [Commits](https://github.com/projectdiscovery/gologger/compare/v1.1.55...v1.1.56)

Updates `github.com/projectdiscovery/mapcidr` from 1.1.34 to 1.1.95
- [Release notes](https://github.com/projectdiscovery/mapcidr/releases)
- [Changelog](https://github.com/projectdiscovery/mapcidr/blob/main/.goreleaser.yml)
- [Commits](https://github.com/projectdiscovery/mapcidr/compare/v1.1.34...v1.1.95)

Updates `github.com/projectdiscovery/utils` from 0.5.0 to 0.6.0
- [Release notes](https://github.com/projectdiscovery/utils/releases)
- [Changelog](https://github.com/projectdiscovery/utils/blob/main/CHANGELOG.md)
- [Commits](https://github.com/projectdiscovery/utils/compare/v0.5.0...v0.6.0)

Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.47 to 0.2.48
- [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases)
- [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.47...v0.2.48)

Updates `github.com/projectdiscovery/cdncheck` from 1.2.0 to 1.2.3
- [Release notes](https://github.com/projectdiscovery/cdncheck/releases)
- [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml)
- [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.2.0...v1.2.3)

---
updated-dependencies:
- dependency-name: github.com/projectdiscovery/retryablehttp-go
  dependency-version: 1.0.126
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/dsl
  dependency-version: 0.7.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/gologger
  dependency-version: 1.1.56
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/mapcidr
  dependency-version: 1.1.95
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/utils
  dependency-version: 0.6.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/wappalyzergo
  dependency-version: 0.2.48
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/cdncheck
  dependency-version: 1.2.3
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: modules
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-29 23:35:43 +07:00
Dwi Siswanto
b529125031
refactor(confif): update known misc dirs & improve IsTemplate func
Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-09-27 16:02:12 +07:00
Dwi Siswanto
3ef581c5e8
chore(make): rm unnecessary flag on template-validate
Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-09-27 15:25:25 +07:00
Dwi Siswanto
ca11a2fad6
fix(disk): uses config.IsTemplate instead
fixes #6499

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-09-27 15:21:38 +07:00
Dwi Siswanto
7d450507f7
feat(config): adds known misc directories
and excludes em in IsTemplate func.

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-09-27 15:20:45 +07:00
Dwi Siswanto
95a72cfd50
fix(templates): suppress warn code flag not found
on validate.

fixes #6498

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-09-27 13:34:28 +07:00
Mzack9999
f8f89bb721
Merge pull request #6204 from projectdiscovery/RDP-Enc-func
CheckRDPEncryption function
2025-09-26 01:06:50 +02:00
Mzack9999
cb2d93174a fixing logic 2025-09-25 22:46:40 +02:00
Mzack9999
61bd0828dc Merge branch 'dev' into RDP-Enc-func 2025-09-25 22:07:17 +02:00
Dogan Can Bakir
d44f07f648
Merge pull request #6495 from projectdiscovery/fix_headless_loading
fix headless template loading logic when `-dast` option is enabled
2025-09-24 13:11:28 +03:00
Nakul Bharti
93be3b8291
fix: improve cleanup in parallel execution (#6490) 2025-09-24 01:12:43 +05:30
Doğan Can Bakır
202524283b
fix headless template loading logic when -dast option is enabled 2025-09-23 16:43:08 +03:00
Nakul Bharti
8ea5061f5e
jira: hotfix for Cloud to use /rest/api/3/search/jql (#6489)
* jira: hotfix for Cloud to use /rest/api/3/search/jql in FindExistingIssue; add live test verifying v3 endpoint

* jira: fix Cloud v3 search response handling (no total); set Self from base

* fix lint error

* tests(jira): apply De Morgan to satisfy staticcheck QF1001
2025-09-22 22:44:10 +05:30
Dwi Siswanto
d2cf69aebb
feat(fuzz): enhance MultiPartForm with metadata APIs (#6486)
* feat(fuzz): enhance `MultiPartForm` with metadata APIs

* add `SetFileMetadata`/`GetFileMetadata` APIs for
  file metadata management.
* implement RFC-2046 boundary validation
  (max 70 chars).
* add boundary validation in `Decode` method.

* fix `filesMetadata` initialization.
* fix mem leak by removing defer from file reading
  loop.
* fix file metadata overwriting by storing first
  file's metadata instead of last.

Closes #6405, #6406

Signed-off-by: Dwi Siswanto <git@dw1.io>

* chore(fuzz): satisfy lint errs

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-09-22 22:09:24 +05:30
Mzack9999
39e9286371
Feat 6231 deadlock (#6469)
* fixing recursive deadlock

* using atomics

* fixing init
2025-09-22 21:49:56 +05:30
dependabot[bot]
0ea42e5f66 chore(deps): bump the modules group with 6 updates
Bumps the modules group with 6 updates:

| Package | From | To |
| --- | --- | --- |
| [github.com/projectdiscovery/fastdialer](https://github.com/projectdiscovery/fastdialer) | `0.4.10` | `0.4.11` |
| [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go) | `1.0.124` | `1.0.125` |
| [github.com/projectdiscovery/gologger](https://github.com/projectdiscovery/gologger) | `1.1.54` | `1.1.55` |
| [github.com/projectdiscovery/networkpolicy](https://github.com/projectdiscovery/networkpolicy) | `0.1.24` | `0.1.25` |
| [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.46` | `0.2.47` |
| [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.1.36` | `1.2.0` |


Updates `github.com/projectdiscovery/fastdialer` from 0.4.10 to 0.4.11
- [Release notes](https://github.com/projectdiscovery/fastdialer/releases)
- [Commits](https://github.com/projectdiscovery/fastdialer/compare/v0.4.10...v0.4.11)

Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.124 to 1.0.125
- [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases)
- [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.124...v1.0.125)

Updates `github.com/projectdiscovery/gologger` from 1.1.54 to 1.1.55
- [Release notes](https://github.com/projectdiscovery/gologger/releases)
- [Commits](https://github.com/projectdiscovery/gologger/compare/v1.1.54...v1.1.55)

Updates `github.com/projectdiscovery/networkpolicy` from 0.1.24 to 0.1.25
- [Release notes](https://github.com/projectdiscovery/networkpolicy/releases)
- [Commits](https://github.com/projectdiscovery/networkpolicy/compare/v0.1.24...v0.1.25)

Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.46 to 0.2.47
- [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases)
- [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.46...v0.2.47)

Updates `github.com/projectdiscovery/cdncheck` from 1.1.36 to 1.2.0
- [Release notes](https://github.com/projectdiscovery/cdncheck/releases)
- [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml)
- [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.1.36...v1.2.0)

---
updated-dependencies:
- dependency-name: github.com/projectdiscovery/fastdialer
  dependency-version: 0.4.11
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/retryablehttp-go
  dependency-version: 1.0.125
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/gologger
  dependency-version: 1.1.55
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/networkpolicy
  dependency-version: 0.1.25
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/wappalyzergo
  dependency-version: 0.2.47
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/cdncheck
  dependency-version: 1.2.0
  dependency-type: indirect
  update-type: version-update:semver-minor
  dependency-group: modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-22 12:00:08 +00:00
ghost
f610ed4cab docs: update syntax & JSON schema 🤖 2025-09-15 23:06:45 +00:00
halcyondream
792998d8e2
Refactored header-based auth scans not to normalize the header names. (#6479)
* Refactored header-based auth scans not to normalize the header names.

* Removed the header validation as it's not really useful here.

* adding docs

---------

Co-authored-by: Mzack9999 <mzack9999@protonmail.com>
2025-09-16 04:35:00 +05:30
Nakul Bharti
c4fa2c74c1
cache, goroutine and unbounded workers management (#6420)
* Enhance matcher compilation with caching for regex and DSL expressions to improve performance. Update template parsing to conditionally retain raw templates based on size constraints.

* Implement caching for regex and DSL expressions in extractors and matchers to enhance performance. Introduce a buffer pool in raw requests to reduce memory allocations. Update template cache management for improved efficiency.

* feat: improve concurrency to be bound

* refactor: replace fmt.Sprintf with fmt.Fprintf for improved performance in header handling

* feat: add regex matching tests and benchmarks for performance evaluation

* feat: add prefix check in regex extraction to optimize matching process

* feat: implement regex caching mechanism to enhance performance in extractors and matchers, along with tests and benchmarks for validation

* feat: add unit tests for template execution in the core engine, enhancing test coverage and reliability

* feat: enhance error handling in template execution and improve regex caching logic for better performance

* Implement caching for regex and DSL expressions in the cache package, replacing previous sync.Map usage. Add unit tests for cache functionality, including eviction by capacity and retrieval of cached items. Update extractors and matchers to utilize the new cache system for improved performance and memory efficiency.

* Add tests for SetCapacities in cache package to ensure cache behavior on capacity changes

- Implemented TestSetCapacities_NoRebuildOnZero to verify that setting capacities to zero does not clear existing caches.
- Added TestSetCapacities_BeforeFirstUse to confirm that initial cache settings are respected and not overridden by subsequent capacity changes.

* Refactor matchers and update load test generator to use io package

- Removed maxRegexScanBytes constant from match.go.
- Replaced ioutil with io package in load_test.go for NopCloser usage.
- Restored TestValidate_AllowsInlineMultiline in load_test.go to ensure inline validation functionality.

* Add cancellation support in template execution and enhance test coverage

- Updated executeTemplateWithTargets to respect context cancellation.
- Introduced fakeTargetProvider and slowExecuter for testing.
- Added Test_executeTemplateWithTargets_RespectsCancellation to validate cancellation behavior during template execution.
2025-09-15 23:48:02 +05:30
Nakul Bharti
d4f1a815ed
fix: update go jira deps (#6475)
* fix: handle jira deprecated endpoint

* refactor: update Jira issue search result structure to include 'Self' field

* Revert "refactor: update Jira issue search result structure to include 'Self' field"

This reverts commit b0953419d33dff3fb61f1bcdcddab0ae759379b8.

* Revert "fix: handle jira deprecated endpoint"

This reverts commit 1fc05076cdb31906f403d80455b2e1609a66e2ae.

* chore(deps): bump github.com/andygrunwald/go-jira to v1.16.1 and tidy

* fix(jira): migrate Issue.Search to SearchV2JQL with explicit Fields
2025-09-15 18:23:08 +05:30
dependabot[bot]
a65841c034 chore(deps): bump the modules group with 7 updates
Bumps the modules group with 7 updates:

| Package | From | To |
| --- | --- | --- |
| [github.com/projectdiscovery/fastdialer](https://github.com/projectdiscovery/fastdialer) | `0.4.9` | `0.4.10` |
| [github.com/projectdiscovery/hmap](https://github.com/projectdiscovery/hmap) | `0.0.93` | `0.0.94` |
| [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go) | `1.0.123` | `1.0.124` |
| [github.com/projectdiscovery/dsl](https://github.com/projectdiscovery/dsl) | `0.6.0` | `0.7.0` |
| [github.com/projectdiscovery/networkpolicy](https://github.com/projectdiscovery/networkpolicy) | `0.1.23` | `0.1.24` |
| [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.45` | `0.2.46` |
| [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.1.35` | `1.1.36` |


Updates `github.com/projectdiscovery/fastdialer` from 0.4.9 to 0.4.10
- [Release notes](https://github.com/projectdiscovery/fastdialer/releases)
- [Commits](https://github.com/projectdiscovery/fastdialer/compare/v0.4.9...v0.4.10)

Updates `github.com/projectdiscovery/hmap` from 0.0.93 to 0.0.94
- [Release notes](https://github.com/projectdiscovery/hmap/releases)
- [Commits](https://github.com/projectdiscovery/hmap/compare/v0.0.93...v0.0.94)

Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.123 to 1.0.124
- [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases)
- [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.123...v1.0.124)

Updates `github.com/projectdiscovery/dsl` from 0.6.0 to 0.7.0
- [Release notes](https://github.com/projectdiscovery/dsl/releases)
- [Commits](https://github.com/projectdiscovery/dsl/compare/v0.6.0...v0.7.0)

Updates `github.com/projectdiscovery/networkpolicy` from 0.1.23 to 0.1.24
- [Release notes](https://github.com/projectdiscovery/networkpolicy/releases)
- [Commits](https://github.com/projectdiscovery/networkpolicy/compare/v0.1.23...v0.1.24)

Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.45 to 0.2.46
- [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases)
- [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.45...v0.2.46)

Updates `github.com/projectdiscovery/cdncheck` from 1.1.35 to 1.1.36
- [Release notes](https://github.com/projectdiscovery/cdncheck/releases)
- [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml)
- [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.1.35...v1.1.36)

---
updated-dependencies:
- dependency-name: github.com/projectdiscovery/fastdialer
  dependency-version: 0.4.10
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/hmap
  dependency-version: 0.0.94
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/retryablehttp-go
  dependency-version: 1.0.124
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/dsl
  dependency-version: 0.7.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/networkpolicy
  dependency-version: 0.1.24
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/wappalyzergo
  dependency-version: 0.2.46
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/cdncheck
  dependency-version: 1.1.36
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-15 07:12:24 +00:00
Mzack9999
876974f38b
Merge pull request #6422 from zy9ard3/dev
No changes message for github custom template update to INF from ERR for better logging
2025-09-12 21:21:40 +02:00
nu11z
ca543d7885
Remove the stack trace when the nuclei-ignore file does not exist (#6455)
* remove the stack trace when the nuclei-ignore file does not exist

* removing useless debug stack

---------

Co-authored-by: Mzack9999 <mzack9999@protonmail.com>
2025-09-12 23:36:36 +05:30
Mzack9999
fde6f72934 refactor 2025-09-12 19:03:56 +02:00
Mzack9999
3af37362e3
Merge pull request #6472 from projectdiscovery/maint-rate-unlimit
centralizing ratelimiter logic
2025-09-12 18:59:13 +02:00
Mzack9999
99a9ce398d Merge branch 'dev' into pr/6422 2025-09-12 18:25:18 +02:00
Mzack9999
48af0b4f6c
Merge pull request #6464 from projectdiscovery/jira-custom-template-syntax
feat: added new text/template syntax to jira custom fields
2025-09-12 18:21:29 +02:00
Mzack9999
e1dfa1baa7 adding me 2025-09-12 18:13:23 +02:00
Mzack9999
089e2a4ee0 centralizing ratelimiter logic 2025-09-12 17:46:42 +02:00
Mzack9999
46555bcd1e
Merge pull request #6413 from projectdiscovery/feat-4842-vnc
adding vnc auth
2025-09-12 13:17:18 +02:00
Mzack9999
521a21c06a Merge branch 'dev' into feat-4842-vnc 2025-09-12 11:51:17 +02:00
Mzack9999
1acd40f97f
Merge pull request #6465 from projectdiscovery/4690_dont_load_dup_templates
dont load templates with the same ID
2025-09-12 11:46:51 +02:00
Mzack9999
09c2ca540a
Merge pull request #6471 from projectdiscovery/feat-4872-oracle-atp2
code from https://github.com/projectdiscovery/nuclei/pull/6427
2025-09-12 11:45:50 +02:00
Mzack9999
c863143771 lint 2025-09-12 10:35:09 +02:00
Mzack9999
5c8da8d88b code from https://github.com/projectdiscovery/nuclei/pull/6427 2025-09-12 10:29:42 +02:00
Mzack9999
4b22a3d53e release fix 2025-09-11 21:14:55 +00:00
Mzack9999
94c77c1a28 httpx fix 2025-09-11 21:14:55 +00:00
dependabot[bot]
ee1c847626 chore(deps): bump the modules group with 9 updates
Bumps the modules group with 9 updates:

| Package | From | To |
| --- | --- | --- |
| [github.com/projectdiscovery/fastdialer](https://github.com/projectdiscovery/fastdialer) | `0.4.7` | `0.4.9` |
| [github.com/projectdiscovery/retryabledns](https://github.com/projectdiscovery/retryabledns) | `1.0.106` | `1.0.107` |
| [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go) | `1.0.121` | `1.0.123` |
| [github.com/projectdiscovery/dsl](https://github.com/projectdiscovery/dsl) | `0.5.1` | `0.6.0` |
| [github.com/projectdiscovery/httpx](https://github.com/projectdiscovery/httpx) | `1.7.1-0.20250902174407-8d6c2658663f` | `1.7.1` |
| [github.com/projectdiscovery/networkpolicy](https://github.com/projectdiscovery/networkpolicy) | `0.1.21` | `0.1.23` |
| [github.com/projectdiscovery/utils](https://github.com/projectdiscovery/utils) | `0.4.24-0.20250823123502-bd7f2849ddb4` | `0.5.0` |
| [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.43` | `0.2.45` |
| [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.1.33` | `1.1.35` |


Updates `github.com/projectdiscovery/fastdialer` from 0.4.7 to 0.4.9
- [Release notes](https://github.com/projectdiscovery/fastdialer/releases)
- [Commits](https://github.com/projectdiscovery/fastdialer/compare/v0.4.7...v0.4.9)

Updates `github.com/projectdiscovery/retryabledns` from 1.0.106 to 1.0.107
- [Release notes](https://github.com/projectdiscovery/retryabledns/releases)
- [Commits](https://github.com/projectdiscovery/retryabledns/compare/v1.0.106...v1.0.107)

Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.121 to 1.0.123
- [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases)
- [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.121...v1.0.123)

Updates `github.com/projectdiscovery/dsl` from 0.5.1 to 0.6.0
- [Release notes](https://github.com/projectdiscovery/dsl/releases)
- [Commits](https://github.com/projectdiscovery/dsl/compare/v0.5.1...v0.6.0)

Updates `github.com/projectdiscovery/httpx` from 1.7.1-0.20250902174407-8d6c2658663f to 1.7.1
- [Release notes](https://github.com/projectdiscovery/httpx/releases)
- [Changelog](https://github.com/projectdiscovery/httpx/blob/dev/.goreleaser.yml)
- [Commits](https://github.com/projectdiscovery/httpx/commits/v1.7.1)

Updates `github.com/projectdiscovery/networkpolicy` from 0.1.21 to 0.1.23
- [Release notes](https://github.com/projectdiscovery/networkpolicy/releases)
- [Commits](https://github.com/projectdiscovery/networkpolicy/compare/v0.1.21...v0.1.23)

Updates `github.com/projectdiscovery/utils` from 0.4.24-0.20250823123502-bd7f2849ddb4 to 0.5.0
- [Release notes](https://github.com/projectdiscovery/utils/releases)
- [Changelog](https://github.com/projectdiscovery/utils/blob/main/CHANGELOG.md)
- [Commits](https://github.com/projectdiscovery/utils/commits/v0.5.0)

Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.43 to 0.2.45
- [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases)
- [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.43...v0.2.45)

Updates `github.com/projectdiscovery/cdncheck` from 1.1.33 to 1.1.35
- [Release notes](https://github.com/projectdiscovery/cdncheck/releases)
- [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml)
- [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.1.33...v1.1.35)

---
updated-dependencies:
- dependency-name: github.com/projectdiscovery/fastdialer
  dependency-version: 0.4.9
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/retryabledns
  dependency-version: 1.0.107
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/retryablehttp-go
  dependency-version: 1.0.123
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/dsl
  dependency-version: 0.6.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/httpx
  dependency-version: 1.7.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/networkpolicy
  dependency-version: 0.1.23
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/utils
  dependency-version: 0.5.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/wappalyzergo
  dependency-version: 0.2.45
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/cdncheck
  dependency-version: 1.1.35
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-11 21:14:55 +00:00
Mzack9999
a21bfc4303
Merge pull request #6261 from alban-stourbe-wmx/feature/ytt-yaml-templating
feat(templating): add vars templating into yaml inputs (ytt)
2025-09-11 22:54:54 +02:00
Mzack9999
c487e59602 lint 2025-09-11 21:41:59 +02:00
Mzack9999
1f8dc4c358 Merge branch 'dev' into pr/6261 2025-09-11 21:33:40 +02:00
Mzack9999
608159bbbe lint 2025-09-10 19:53:23 +02:00
Mzack9999
b05359bc82 using synclockmap 2025-09-10 19:48:36 +02:00
Doğan Can Bakır
4916cf34f0
dont load templates with the same ID 2025-09-10 16:44:12 +03:00
Ice3man
f460bf926d feat: added additional text/template helpers 2025-09-10 17:32:43 +05:30
Ice3man
218a2f69a5 feat: added new text/template syntax to jira custom fields 2025-09-10 16:51:20 +05:30
dependabot[bot]
ff5734ba15
chore(deps): bump the workflows group across 1 directory with 2 updates (#6462)
Bumps the workflows group with 2 updates in the / directory: [actions/checkout](https://github.com/actions/checkout) and [actions/stale](https://github.com/actions/stale).


Updates `actions/checkout` from 4 to 5
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v4...v5)

Updates `actions/stale` from 9 to 10
- [Release notes](https://github.com/actions/stale/releases)
- [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/stale/compare/v9...v10)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
  dependency-group: workflows
- dependency-name: actions/stale
  dependency-version: '10'
  dependency-type: direct:production
  update-type: version-update:semver-major
  dependency-group: workflows
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-08 17:34:42 +07:00
mkrs2404
9c64a1cb9b
Reporting validation (#6456)
* add custom validator for reporting issues

* use httpx dev branch

* remove yaml marshal/unmarshal for validator callback
2025-09-05 19:53:26 +05:30
dependabot[bot]
32dfeacd9d
chore(deps): bump the modules group across 1 directory with 11 updates (#6438)
* chore(deps): bump the modules group across 1 directory with 11 updates

Bumps the modules group with 10 updates in the / directory:

| Package | From | To |
| --- | --- | --- |
| [github.com/projectdiscovery/fastdialer](https://github.com/projectdiscovery/fastdialer) | `0.4.6` | `0.4.7` |
| [github.com/projectdiscovery/hmap](https://github.com/projectdiscovery/hmap) | `0.0.92` | `0.0.93` |
| [github.com/projectdiscovery/retryabledns](https://github.com/projectdiscovery/retryabledns) | `1.0.105` | `1.0.106` |
| [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go) | `1.0.120` | `1.0.121` |
| [github.com/projectdiscovery/dsl](https://github.com/projectdiscovery/dsl) | `0.5.0` | `0.5.1` |
| [github.com/projectdiscovery/gozero](https://github.com/projectdiscovery/gozero) | `0.0.3` | `0.1.0` |
| [github.com/projectdiscovery/ratelimit](https://github.com/projectdiscovery/ratelimit) | `0.0.81` | `0.0.82` |
| [github.com/projectdiscovery/tlsx](https://github.com/projectdiscovery/tlsx) | `1.1.9` | `1.2.0` |
| [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.37` | `0.2.43` |
| [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.1.27` | `1.1.33` |



Updates `github.com/projectdiscovery/fastdialer` from 0.4.6 to 0.4.7
- [Release notes](https://github.com/projectdiscovery/fastdialer/releases)
- [Commits](https://github.com/projectdiscovery/fastdialer/compare/v0.4.6...v0.4.7)

Updates `github.com/projectdiscovery/hmap` from 0.0.92 to 0.0.93
- [Release notes](https://github.com/projectdiscovery/hmap/releases)
- [Commits](https://github.com/projectdiscovery/hmap/compare/v0.0.92...v0.0.93)

Updates `github.com/projectdiscovery/retryabledns` from 1.0.105 to 1.0.106
- [Release notes](https://github.com/projectdiscovery/retryabledns/releases)
- [Commits](https://github.com/projectdiscovery/retryabledns/compare/v1.0.105...v1.0.106)

Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.120 to 1.0.121
- [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases)
- [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.120...v1.0.121)

Updates `github.com/projectdiscovery/dsl` from 0.5.0 to 0.5.1
- [Release notes](https://github.com/projectdiscovery/dsl/releases)
- [Commits](https://github.com/projectdiscovery/dsl/compare/v0.5.0...v0.5.1)

Updates `github.com/projectdiscovery/gozero` from 0.0.3 to 0.1.0
- [Release notes](https://github.com/projectdiscovery/gozero/releases)
- [Commits](https://github.com/projectdiscovery/gozero/compare/v0.0.3...v0.1.0)

Updates `github.com/projectdiscovery/networkpolicy` from 0.1.20 to 0.1.21
- [Release notes](https://github.com/projectdiscovery/networkpolicy/releases)
- [Commits](https://github.com/projectdiscovery/networkpolicy/compare/v0.1.20...v0.1.21)

Updates `github.com/projectdiscovery/ratelimit` from 0.0.81 to 0.0.82
- [Release notes](https://github.com/projectdiscovery/ratelimit/releases)
- [Commits](https://github.com/projectdiscovery/ratelimit/compare/v0.0.81...v0.0.82)

Updates `github.com/projectdiscovery/tlsx` from 1.1.9 to 1.2.0
- [Release notes](https://github.com/projectdiscovery/tlsx/releases)
- [Changelog](https://github.com/projectdiscovery/tlsx/blob/main/.goreleaser.yml)
- [Commits](https://github.com/projectdiscovery/tlsx/compare/v1.1.9...v1.2.0)

Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.37 to 0.2.43
- [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases)
- [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.37...v0.2.43)

Updates `github.com/projectdiscovery/cdncheck` from 1.1.27 to 1.1.33
- [Release notes](https://github.com/projectdiscovery/cdncheck/releases)
- [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml)
- [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.1.27...v1.1.33)

---
updated-dependencies:
- dependency-name: github.com/projectdiscovery/fastdialer
  dependency-version: 0.4.7
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/hmap
  dependency-version: 0.0.93
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/retryabledns
  dependency-version: 1.0.106
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/retryablehttp-go
  dependency-version: 1.0.121
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/dsl
  dependency-version: 0.5.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/gozero
  dependency-version: 0.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/networkpolicy
  dependency-version: 0.1.21
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/ratelimit
  dependency-version: 0.0.82
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/tlsx
  dependency-version: 1.2.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/wappalyzergo
  dependency-version: 0.2.43
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/cdncheck
  dependency-version: 1.1.33
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: modules
...

Signed-off-by: dependabot[bot] <support@github.com>

* bump

* httpx dev

* mod tidy

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Mzack9999 <mzack9999@protonmail.com>
2025-09-01 17:52:46 +07:00
PDTeamX
48c389b063 link update 2025-08-31 03:16:13 +05:30
PDTeamX
36b4f68eec misc hyperlink update 2025-08-31 03:13:14 +05:30
PDTeamX
af7b2f166e issue / discussion template update 2025-08-31 03:10:51 +05:30
Dogan Can Bakir
b25937b310
Merge pull request #6425 from projectdiscovery/bump_httpx_version
bump httpx version
2025-08-28 10:04:35 +03:00
Dogan Can Bakir
100d6528f5
Merge branch 'dev' into bump_httpx_version 2025-08-28 08:55:23 +03:00
cui
d76187f99a
Refactor to use reflect.TypeFor (#6428) 2025-08-27 22:31:04 +05:30
Lorenzo Susini
8194fabcf8
test(reporting/exporters/mongo): add mongo integration test with test… (#6237)
* test(reporting/exporters/mongo): add mongo integration test with testcontainer-go module

Signed-off-by: Lorenzo Susini <susinilorenzo1@gmail.com>

* execute exportes only on linux

---------

Signed-off-by: Lorenzo Susini <susinilorenzo1@gmail.com>
Co-authored-by: Mzack9999 <mzack9999@protonmail.com>
2025-08-27 04:25:31 +05:30
dependabot[bot]
5063af46b1 chore(deps): bump github.com/go-viper/mapstructure/v2
Bumps the go_modules group with 1 update in the / directory: [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure).


Updates `github.com/go-viper/mapstructure/v2` from 2.3.0 to 2.4.0
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.3.0...v2.4.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.4.0
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-08-26 07:08:27 +00:00
Doğan Can Bakır
776cb4fcf2
bump httpx version 2025-08-26 10:04:36 +03:00
zy9ard3
1f0aef970c
fix for error.Is false return 2025-08-26 10:48:10 +05:30
zy9ard3
5b7debf349
Update pkg/external/customtemplates/github.go
Co-authored-by: Dwi Siswanto <25837540+dwisiswant0@users.noreply.github.com>
2025-08-26 09:05:31 +05:30
Mzack9999
e83382d4e4 lint 2025-08-25 15:33:21 +02:00
Mzack9999
b61321cd19 Merge branch 'dev' into feat-4842-vnc 2025-08-25 15:22:14 +02:00
Mzack9999
f20f95f67e integration test 2025-08-25 15:13:23 +02:00
Mzack9999
efcef55681 lint 2025-08-25 13:59:01 +02:00
PDTeamX
0f7b33cebf limited test, instead of all 2025-08-25 13:56:03 +05:30
Dwi Siswanto
a1b5a0ed99
fix(fuzz): handles duplicate multipart form field names (#6404)
* fix: handle duplicate field names in multipart form encoding

* fix(fuzz): handles `[]any` type in `*MultiPartForm.Encode`

Signed-off-by: Dwi Siswanto <git@dw1.io>

* test(fuzz): adds panic recovery & display encoded out

Signed-off-by: Dwi Siswanto <git@dw1.io>

* fix(fuzz): incorrectly treated mixed type field

in `*MultiPartForm.Encode`

Signed-off-by: Dwi Siswanto <git@dw1.io>

* test(fuzz): refactor compare w decoded instead

Signed-off-by: Dwi Siswanto <git@dw1.io>

* chore(fuzz): prealloc for `[]any` type

Signed-off-by: Dwi Siswanto <git@dw1.io>

* fix(fuzz): treats nil value as empty string

Signed-off-by: Dwi Siswanto <git@dw1.io>

* chore(fuzz): rm early error return for non-array file

Signed-off-by: Dwi Siswanto <git@dw1.io>

* test(fuzz): adds `TestMultiPartFormFileUpload` test

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
Co-authored-by: yusei-wy <31252054+yusei-wy@users.noreply.github.com>
2025-08-25 13:42:51 +05:30
Tarun Koyalwar
19247ae74b
Path-Based Fuzzing SQL fix (#6400)
* setup claude

* migrate to using errkit

* fix unused imports + lint errors

* update settings.json

* fix url encoding issue

* fix lint error

* fix the path fuzzing component

* fix lint error
2025-08-25 13:36:58 +05:30
zy9ard3
5be258f948
no changes custom template message should be INF not ERR 2025-08-25 01:15:56 +05:30
Dwi Siswanto
309018fbf4
fix: segfault in template caching logic (#6421)
* fix: segfault in template caching logic

when templates had no executable requests after
option updates.

the cached templates could end up with 0 requests
and no flow execution path, resulting in a nil
engine pointer that was later derefer w/o
validation.

bug seq:
caching template (w/ valid requests) -> get cached
template -> `*ExecutorOptions.Options` copied and
modified (inconsistent) -> requests updated (with
new options -- some may be invalid, and without
recompile) -> template returned w/o validation ->
`compileProtocolRequests` -> `NewTemplateExecuter`
receive empty requests + empty flow = nil engine
-> `*TemplateExecuter.{Compile,Execute}` invoked
on nil engine = panic.

RCA:
1. `*ExecutorOptions.ApplyNewEngineOptions`
   overwriting many fields.
2. copy op pointless; create a copy of options and
   then immediately replace it with original
   pointer.
3. missing executable requests validation after
   cached templates is reconstructed with updated
   options.

Thus, this affected `--automatic-scan` mode where
tech detection templates often have conditional
requests that may be filtered based on runtime
options.

Fixes #6417

Signed-off-by: Dwi Siswanto <git@dw1.io>

* fix(templates): recompile workflow with `tplCopy.Options`

Signed-off-by: Dwi Siswanto <git@dw1.io>

* fix(templates): strengthen cache hit guard

Signed-off-by: Dwi Siswanto <git@dw1.io>

* fix(protocols): skips template-specific fields

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-08-23 20:01:23 +05:30
PDTeamX
5e9ada23b2 Update constants.go 2025-08-23 19:51:23 +05:30
PDTeamX
00f4595f0b version update 2025-08-22 20:27:44 +05:30
Ice3man
30e520754b
feat: fixed output event for skipped hosts (#6415)
* feat: fixed output event for skipped hosts

* misc
2025-08-22 20:25:07 +05:30
Mzack9999
6b358b39a3 lint 2025-08-21 23:38:58 +02:00
Mzack9999
b41f4d97d6 gen go+js 2025-08-21 22:04:55 +02:00
Mzack9999
5c15c77777 adding vnc auth 2025-08-21 22:02:47 +02:00
Sandeep Singh
b4644af80a
Lint + test fixes after utils dep update (#6393)
* fix: remove undefined errorutil.ShowStackTrace

* feat: add make lint support and integrate with test

* refactor: migrate errorutil to errkit across codebase

- Replace deprecated errorutil with modern errkit
- Convert error declarations from var to func for better compatibility
- Fix all SA1019 deprecation warnings
- Maintain error chain support and stack traces

* fix: improve DNS test reliability using Google DNS

- Configure test to use Google DNS (8.8.8.8) for stability
- Fix nil pointer issue in DNS client initialization
- Keep production defaults unchanged

* fixing logic

* removing unwanted branches in makefile

---------

Co-authored-by: Mzack9999 <mzack9999@protonmail.com>
2025-08-20 05:28:23 +05:30
Dogan Can Bakir
44eeb5a60b
enable templates for template listing and displaying (#6343) 2025-08-17 01:50:22 +05:30
PDTeamX
e1f8a18d38 dep update + removed unused code 2025-08-16 15:51:32 +05:30
Dwi Siswanto
6a6fa4d38f
feat(fuzz): eval variables (#6358)
* feat(fuzz): eval vars for rule keys & values

Signed-off-by: Dwi Siswanto <git@dw1.io>

* chore: re-fmt fuzzing/dast errors

Signed-off-by: Dwi Siswanto <git@dw1.io>

* test(fuzz): adds `TestEvaluateVariables`

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-08-16 14:41:22 +05:30
Dwi Siswanto
9fcacd0f86
ci(tests): migrate to golangci-lint v2 (#6380)
* chore: satisfy lints

Signed-off-by: Dwi Siswanto <git@dw1.io>

* ci(tests): migrate to golangci-lint v2

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-08-16 13:20:09 +07:00
Dwi Siswanto
70eeb6c210
fix: prevent unnecessary template updates (#6379)
* test(installer): adds `TestIsOutdatedVersionFix`

Signed-off-by: Dwi Siswanto <git@dw1.io>

* fix: prevent unnecessary template updates

when version API fails.

* fix `catalog/config.IsOutdatedVersion` logic for
  empty version strings
* add GitHub API fallback when PDTM API is unavail
* only show outdated msg for actual version
  mismatches

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-08-16 04:50:20 +05:30
ysokolovsky
d569cfe864
fix(headless): merge extra headers (#6376)
* headless: fix extra headers overwrite

* headless: set Accept-Language when no custom headers
2025-08-16 04:48:34 +05:30
dependabot[bot]
89de8a5a59 chore(deps): bump the go_modules group across 1 directory with 2 updates
Bumps the go_modules group with 2 updates in the / directory: [github.com/docker/docker](https://github.com/docker/docker) and [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure).


Updates `github.com/docker/docker` from 27.1.1+incompatible to 28.0.0+incompatible
- [Release notes](https://github.com/docker/docker/releases)
- [Commits](https://github.com/docker/docker/compare/v27.1.1...v28.0.0)

Updates `github.com/go-viper/mapstructure/v2` from 2.2.1 to 2.3.0
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.2.1...v2.3.0)

---
updated-dependencies:
- dependency-name: github.com/docker/docker
  dependency-version: 28.0.0+incompatible
  dependency-type: indirect
  dependency-group: go_modules
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.3.0
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-08-15 20:52:01 +00:00
Dwi Siswanto
7e95d9a185
build(make): update template-validate cmds (#6385)
Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-08-15 06:05:51 +05:30
Doğan Can Bakır
6996b4ab75
bump version 2025-08-13 19:22:34 -07:00
Ice3man
8ef3662634
Merge pull request #6364 from projectdiscovery/loading-performance-improvements-v2
feat: loading templates performance improvements
2025-08-06 01:58:03 +05:30
Ice3man
1b6ae44bb7 Merge branch 'dev' of https://github.com/projectdiscovery/nuclei into loading-performance-improvements-v2 2025-08-06 01:57:41 +05:30
Ice3man
bba2c3a576
Merge pull request #6368 from projectdiscovery/fix/waf-detector-nil-pointer
fix: prevent nil pointer panic in WAF detector
2025-08-06 01:53:14 +05:30
knakul853
b685d637f3 fix: prevent nil pointer panic in WAF detector
- Add nil checks for detector and regexCache in DetectWAF()
- Add nil check for individual regex entries before MatchString()
- Add comprehensive unit tests for nil pointer scenarios
- Prevents runtime panic when WAF detector encounters nil pointers during regex matching
2025-08-04 21:12:43 +05:30
Dwi Siswanto
cff86b5c98
fix(events): correct JSON encoder type in ScanStatsWorker (#6366)
Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-08-02 21:49:48 -07:00
Ice3man
3d7f995ddc use bounded concurrency for templates loading 2025-08-03 03:53:34 +05:30
Ice3man
5ba21e272a feat: loading templates performance improvements 2025-08-02 15:58:18 +05:30
PDTeamX
b0fe565a8b Merge branch 'main' into dev 2025-08-02 02:06:03 -07:00
poning
3ac3146ef9
fix(offlinehttp): Replace "-" in headers with "_" for DSL variables (#6363)
* Replace "-" in headers with "_" for DSL variables in passive mode

* test(offlinehttp): adjust haystack & needle in `TestHTTPOperatorExtract`

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
Co-authored-by: Dwi Siswanto <git@dw1.io>
2025-08-02 05:35:03 +07:00
Ice3man
06707ea76f
bugfix: preserve original transport for linear http client (#6357) 2025-07-30 21:38:07 +05:30
Štefan Baebler
91adfeb91c
Bump github.com/bytedance/sonic to v1.14.0 for Go 1.25 compatibility (#6348)
* Bump github.com/bytedance/sonic to v1.14.0  for Go 1.25 compatibility

Fixes #6335
by using https://github.com/bytedance/sonic/releases/tag/v1.14.0

$ go get github.com/bytedance/sonic@v1.14.0 && go mod tidy
go: upgraded github.com/bytedance/sonic v1.13.3 => v1.14.0
go: upgraded github.com/bytedance/sonic/loader v0.2.4 => v0.3.0

* doc(json): update supported plats

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
Co-authored-by: Dwi Siswanto <git@dw1.io>
2025-07-28 19:46:44 +07:00
Dogan Can Bakir
5daf84dd6b
Merge pull request #6338 from jishudashen/dev
chore: fix inconsistent function name in comment
2025-07-21 11:02:48 +03:00
jishudashen
0337b33490 chore: fix inconsistent function name in comment
Signed-off-by: jishudashen <jishudashen@foxmail.com>
2025-07-21 14:13:22 +08:00
Dwi Siswanto
9133e0d2d0
feat(code): log unavail engines as an err while validating (#6326)
* feat(code): log unavail engines as an err while validating

Signed-off-by: Dwi Siswanto <git@dw1.io>

* chore(chore): i meant highest level

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-07-19 00:12:50 +05:30
Ice3man
05f69a6b24
feat: log event for template host skipped during scanning (#6324)
* feat: log event for template host skipped during scanning

* misc changes
2025-07-19 00:11:25 +05:30
HD Moore
5b89811b90
Support concurrent Nuclei engines in the same process (#6322)
* support for concurrent nuclei engines

* clarify LfaAllowed race

* remove unused mutex

* update LfaAllowed logic to prevent races until it can be reworked for per-execution ID

* Update pkg/templates/parser.go

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

* debug tests

* debug gh action

* fixig gh template test

* using atomic

* using synclockmap

* restore tests concurrency

* lint

* wiring executionId in js fs

---------

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
Co-authored-by: Mzack9999 <mzack9999@protonmail.com>
2025-07-19 00:10:58 +05:30
Mzack9999
3e9bee7400
Merge pull request #6321 from hdm/bug/various-race-conditions
Address race conditions in http.Request and MemGuardian
2025-07-15 15:19:02 +02:00
HD Moore
875941ce8d avoid data races using mutex for memguardian 2025-07-15 02:34:47 -05:00
HD Moore
6bf3f14798 avoid data races by using request clones 2025-07-15 02:34:29 -05:00
gopherorg
1079498182
refactor: use maps.Copy for cleaner map handling (#6283)
Signed-off-by: gopherorg <gopherworld@icloud.com>
2025-07-12 02:50:47 +05:30
Dwi Siswanto
a13ea39461
build(docker): bump builder image golang:1.23-alpine => golang:1.24-alpine (#6316)
Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-07-12 02:16:35 +05:30
HD Moore
f26996cb89
Remove singletons from Nuclei engine (continuation of #6210) (#6296)
* introducing execution id

* wip

* .

* adding separate execution context id

* lint

* vet

* fixing pg dialers

* test ignore

* fixing loader FD limit

* test

* fd fix

* wip: remove CloseProcesses() from dev merge

* wip: fix merge issue

* protocolstate: stop memguarding on last dialer delete

* avoid data race in dialers.RawHTTPClient

* use shared logger and avoid race conditions

* use shared logger and avoid race conditions

* go mod

* patch executionId into compiled template cache

* clean up comment in Parse

* go mod update

* bump echarts

* address merge issues

* fix use of gologger

* switch cmd/nuclei to options.Logger

* address merge issues with go.mod

* go vet: address copy of lock with new Copy function

* fixing tests

* disable speed control

* fix nil ExecuterOptions

* removing deprecated code

* fixing result print

* default logger

* cli default logger

* filter warning from results

* fix performance test

* hardcoding path

* disable upload

* refactor(runner): uses `Warning` instead of `Print` for `pdcpUploadErrMsg`

Signed-off-by: Dwi Siswanto <git@dw1.io>

* Revert "disable upload"

This reverts commit 114fbe6663361bf41cf8b2645fd2d57083d53682.

* Revert "hardcoding path"

This reverts commit cf12ca800e0a0e974bd9fd4826a24e51547f7c00.

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
Co-authored-by: Mzack9999 <mzack9999@protonmail.com>
Co-authored-by: Dwi Siswanto <git@dw1.io>
Co-authored-by: Dwi Siswanto <25837540+dwisiswant0@users.noreply.github.com>
2025-07-10 01:17:26 +05:30
Jose De La O Hernandez
285c5e1442
fixing panic caused by uninitialized colorizer (#6315) 2025-07-09 04:34:05 +05:30
Dwi Siswanto
7e2ec686ae
fix(lib): scans didn't stop on ctx cancellation (#6310)
* fix(lib): scans didn't stop on ctx cancellation

Signed-off-by: Dwi Siswanto <git@dw1.io>

* Update lib/sdk_test.go

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

* fix(lib): wait resources to be released b4 return

Signed-off-by: Dwi Siswanto <git@dw1.io>

---------

Signed-off-by: Dwi Siswanto <git@dw1.io>
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2025-07-09 01:04:16 +07:00
Mzack9999
3991cc6ec1
Merge pull request #6311 from projectdiscovery/dwisiswant0/chore/config/rm-deprecated-codes-and-calls
chore(config): rm deprecated codes and calls
2025-07-08 15:45:25 +02:00
dependabot[bot]
b756b2706f
chore(deps): bump the modules group with 3 updates (#6305)
Bumps the modules group with 3 updates: [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go), [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) and [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck).


Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.116 to 1.0.117
- [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases)
- [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.116...v1.0.117)

Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.35 to 0.2.36
- [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases)
- [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.35...v0.2.36)

Updates `github.com/projectdiscovery/cdncheck` from 1.1.15 to 1.1.26
- [Release notes](https://github.com/projectdiscovery/cdncheck/releases)
- [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml)
- [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.1.15...v1.1.26)

---
updated-dependencies:
- dependency-name: github.com/projectdiscovery/retryablehttp-go
  dependency-version: 1.0.117
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/wappalyzergo
  dependency-version: 0.2.36
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: modules
- dependency-name: github.com/projectdiscovery/cdncheck
  dependency-version: 1.1.26
  dependency-type: indirect
  update-type: version-update:semver-patch
  dependency-group: modules
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-07-08 17:57:09 +07:00
Dwi Siswanto
bd5864dbb5
chore(config): rm deprecated codes and calls
Signed-off-by: Dwi Siswanto <git@dw1.io>
2025-07-08 17:35:55 +07:00
Mzack9999
13754956ff
Merge pull request #6307 from projectdiscovery/6297-bugfix-tablewriter-memory-leak
bumping version + memory cleanup
2025-07-07 20:13:59 +02:00
Mzack9999
87de71dee9 bumping version + memory cleanup 2025-07-07 18:12:50 +02:00
alban-stourbe-wmx
eccd90d53c
fix(headless): Variables are now available into headless template (#6301)
* fix(headless): variables now available into simple headless template

* chore: erase debug logs
2025-07-04 21:51:09 +07:00
Mzack9999
d55ab2f827 use bytes slice 2025-07-03 18:05:08 +02:00
Mzack9999
cf8d067fea fixing test 2025-07-03 17:28:55 +02:00
Mzack9999
4baf46f080 fixing path 2025-07-03 17:05:14 +02:00
Mzack9999
8304462420 retain required empty spaces 2025-07-03 16:50:21 +02:00
Mzack9999
1f538bcac6 Merge branch 'dev' into pr/6261 2025-07-03 16:11:54 +02:00
Alban Stourbe
937fa1252b fix(main.go): add errcheck 2025-06-26 09:42:14 +02:00
Alban Stourbe
99914e1a32 fix code rabbit 2025-06-24 18:45:35 +02:00
Alban Stourbe
1a9a7563c0 feat: send struct from var file 2025-06-24 18:39:29 +02:00
Alban Stourbe
248548e075 feat(ytt): add ytt files var + add vars from cli and config 2025-06-24 18:32:45 +02:00
Alban Stourbe
3eb3f66897 fix: change gologger runner version 2025-06-13 13:59:24 +02:00
Alban Stourbe
5f501da063 fix: enhance code rabbit 2025-06-12 15:44:11 +02:00
Alban Stourbe
a0bd3b854e feat(templating): add vars templating into yaml inputs 2025-06-12 15:03:33 +02:00
pussycat0x
32845bccf2 CheckRDPEncryption 2025-05-01 18:20:02 +05:30
302 changed files with 8801 additions and 2283 deletions

View File

@ -0,0 +1,35 @@
{
"permissions": {
"allow": [
"Bash(find:*)",
"Bash(mkdir:*)",
"Bash(cp:*)",
"Bash(ls:*)",
"Bash(make:*)",
"Bash(go:*)",
"Bash(golangci-lint:*)",
"Bash(git merge:*)",
"Bash(git add:*)",
"Bash(git commit:*)",
"Bash(git push:*)",
"Bash(git pull:*)",
"Bash(git fetch:*)",
"Bash(git checkout:*)",
"WebFetch(*)",
"Write(*)",
"WebSearch(*)",
"MultiEdit(*)",
"Edit(*)",
"Bash(gh:*)",
"Bash(grep:*)",
"Bash(tree:*)",
"Bash(./nuclei:*)",
"WebFetch(domain:github.com)"
],
"deny": [
"Bash(make run:*)",
"Bash(./bin/nuclei:*)"
],
"defaultMode": "acceptEdits"
}
}

76
.github/DISCUSSION_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,76 @@
# Nuclei Discussion Guidelines
## Before Creating a Discussion
1. **Search existing discussions and issues** to avoid duplicates
2. **Check the documentation** and README first
3. **Browse the FAQ** and common questions
## Bug Reports in Discussions
When reporting a bug in [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a), please include:
### Required Information:
- **Clear title** with `[BUG]` prefix (e.g., "[BUG] Nuclei crashes when...")
- **Current behavior** - What's happening now?
- **Expected behavior** - What should happen instead?
- **Steps to reproduce** - Commands or actions that trigger the issue
- **Environment details**:
- OS and version
- Nuclei version (`nuclei -version`)
- Go version (if installed via `go install`)
- **Log output** - Run with `-verbose` or `-debug` for detailed logs
- **Redact sensitive information** - Remove target URLs, credentials, etc.
### After Discussion:
- Maintainers will review and validate the bug report
- Valid bugs will be converted to issues with proper labels and tracking
- Questions and misconfigurations will be resolved in the discussion
## Feature Requests in Discussions
When requesting a feature in [Ideas Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/ideas), please include:
### Required Information:
- **Clear title** with `[FEATURE]` prefix (e.g., "[FEATURE] Add support for...")
- **Feature description** - What do you want to be added?
- **Use case** - Why is this feature needed? What problem does it solve?
- **Implementation ideas** - If you have suggestions on how it could work
- **Alternatives considered** - What other solutions have you thought about?
### After Discussion:
- Community and maintainers will discuss the feasibility
- Popular and viable features will be converted to issues
- Similar features may be grouped together
- Rejected features will be explained in the discussion
## Getting Help
For general questions, troubleshooting, and "how-to" topics:
- Use [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a)
- Join the [Discord server](https://discord.gg/projectdiscovery) #nuclei channel
- Check existing discussions for similar questions
## Discussion to Issue Conversion Process
Only maintainers can convert discussions to issues. The process:
1. **Validation** - Maintainers review the discussion for completeness and validity
2. **Classification** - Determine if it's a bug, feature, enhancement, etc.
3. **Issue creation** - Create a properly formatted issue with appropriate labels
4. **Linking** - Link the issue back to the original discussion
5. **Resolution** - Mark the discussion as resolved or close it
This process ensures:
- High-quality issues that are actionable
- Proper triage and labeling
- Reduced noise in the issue tracker
- Community involvement in the validation process
## Why This Process?
- **Better organization** - Issues contain only validated, actionable items
- **Community input** - Discussions allow for community feedback before escalation
- **Quality control** - Maintainers ensure proper formatting and information
- **Reduced maintenance** - Fewer invalid or duplicate issues to manage
- **Clear separation** - Questions vs. actual bugs/features are clearly distinguished

View File

@ -2,14 +2,22 @@ blank_issues_enabled: false
contact_links:
- name: Ask an question / advise on using nuclei
url: https://github.com/projectdiscovery/nuclei/discussions/categories/q-a
about: Ask a question or request support for using nuclei
- name: 🐛 Report a Bug (Start with Discussion)
url: https://github.com/orgs/projectdiscovery/discussions/new?category=q-a
about: Start by reporting your issue in discussions for proper triage. Issues will be created after review to avoid duplicate/invalid reports.
- name: Share idea / feature to discuss for nuclei
url: https://github.com/projectdiscovery/nuclei/discussions/categories/ideas
about: Share idea / feature to discuss for nuclei
- name: 💡 Request a Feature (Start with Discussion)
url: https://github.com/orgs/projectdiscovery/discussions/new?category=ideas
about: Share your feature idea in discussions first. This helps validate and refine the request before creating an issue.
- name: Connect with PD Team (Discord)
- name: ❓ Ask Questions / Get Help
url: https://github.com/orgs/projectdiscovery/discussions
about: Get help and ask questions about using Nuclei. Many questions don't require issues.
- name: 🔍 Browse Existing Issues
url: https://github.com/projectdiscovery/nuclei/issues
about: Check existing issues to see if your problem has already been reported or is being worked on.
- name: 💬 Connect with PD Team (Discord)
url: https://discord.gg/projectdiscovery
about: Connect with PD Team for direct communication
about: Join our Discord for real-time discussions and community support on the #nuclei channel.

View File

@ -0,0 +1,45 @@
# Issue Template References
## Overview
This folder contains the preserved issue templates that are **not** directly accessible to users. These templates serve as references for maintainers when converting discussions to issues.
## New Workflow
### For Users:
1. **All reports start in Discussions** - Users cannot create issues directly
2. Bug reports go to [Q&A Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/q-a)
3. Feature requests go to [Ideas Discussions](https://github.com/projectdiscovery/nuclei/discussions/categories/ideas)
4. This helps filter out duplicate questions, invalid reports, and ensures proper triage
### For Maintainers:
1. **Review discussions** in both Q&A and Ideas categories
2. **Validate the reports** - ensure they're actual bugs/valid feature requests
3. **Use reference templates** when converting discussions to issues:
- Copy content from `bug-report-reference.yml` or `feature-request-reference.yml`
- Create a new issue manually with the appropriate template structure
- Link back to the original discussion
- Close the discussion or mark it as resolved
## Benefits
- **Better triage**: Avoid cluttering issues with questions and invalid reports
- **Community involvement**: Discussions allow for community input before creating issues
- **Quality control**: Maintainers can ensure issues follow proper format and contain necessary information
- **Reduced noise**: Only validated, actionable items become issues
## Reference Templates
- `bug-report-reference.yml` - Use when converting bug reports from discussions to issues
- `feature-request-reference.yml` - Use when converting feature requests from discussions to issues
## Converting a Discussion to Issue
1. Identify a valid discussion that needs to become an issue
2. Go to the main repository's Issues tab
3. Click "New Issue"
4. Manually create the issue using the reference template structure
5. Include all relevant information from the discussion
6. Add a comment linking back to the original discussion
7. Apply appropriate labels
8. Close or mark the discussion as resolved with a link to the created issue

View File

@ -2,6 +2,7 @@ addReviewers: true
reviewers:
- dogancanbakir
- dwisiswant0
- mzack9999
numberOfReviewers: 1
skipKeywords:

View File

@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
if: github.actor == 'dependabot[bot]'
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
token: ${{ secrets.DEPENDABOT_PAT }}

View File

@ -13,7 +13,7 @@ jobs:
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go/compat-checks@v1
with:
release-test: true

View File

@ -11,7 +11,7 @@ jobs:
if: "${{ !endsWith(github.actor, '[bot]') }}"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/git@v1
- run: make syntax-docs

View File

@ -28,7 +28,7 @@ jobs:
LIST_FILE: "/tmp/targets-${{ matrix.targets }}.txt"
PROFILE_MEM: "/tmp/nuclei-profile-${{ matrix.targets }}-targets"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/git@v1
- uses: projectdiscovery/actions/setup/go@v1
- name: Generate list

View File

@ -16,7 +16,7 @@ jobs:
env:
OUTPUT: "/tmp/results.sarif"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: go install golang.org/x/vuln/cmd/govulncheck@latest
- run: govulncheck -scan package -format sarif ./... > $OUTPUT

View File

@ -11,7 +11,7 @@ jobs:
env:
BENCH_OUT: "/tmp/bench.out"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: make build-test
- run: ./bin/nuclei.test -test.run - -test.bench=. -test.benchmem ./cmd/nuclei/ | tee $BENCH_OUT

View File

@ -16,7 +16,7 @@ jobs:
LIST_FILE: "/tmp/targets-${{ matrix.count }}.txt"
PROFILE_MEM: "/tmp/nuclei-perf-test-${{ matrix.count }}"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: make verify
- name: Generate list

View File

@ -10,7 +10,7 @@ jobs:
release:
runs-on: ubuntu-latest-16-cores
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
fetch-depth: 0
- uses: projectdiscovery/actions/setup/go@v1

View File

@ -13,7 +13,7 @@ jobs:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v9
- uses: actions/stale@v10
with:
days-before-stale: 90
days-before-close: 7

View File

@ -22,9 +22,9 @@ jobs:
if: "${{ !endsWith(github.actor, '[bot]') }}"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/golangci-lint@v1
- uses: projectdiscovery/actions/golangci-lint/v2@v1
tests:
name: "Tests"
@ -35,7 +35,7 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: "${{ matrix.os }}"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: make vet
- run: make build
@ -52,16 +52,18 @@ jobs:
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- name: "Simple"
run: go run .
working-directory: examples/simple/
# - run: go run . # Temporarily disabled very flaky in github actions
# working-directory: examples/advanced/
- name: "with Speed Control"
run: go run .
working-directory: examples/with_speed_control/
# TODO: FIX with ExecutionID (ref: https://github.com/projectdiscovery/nuclei/pull/6296)
# - name: "with Speed Control"
# run: go run .
# working-directory: examples/with_speed_control/
integration:
name: "Integration tests"
@ -72,7 +74,7 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/python@v1
- run: bash run.sh "${{ matrix.os }}"
@ -91,7 +93,7 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/setup/python@v1
- run: bash run.sh
@ -104,7 +106,7 @@ jobs:
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- run: make template-validate
@ -117,7 +119,7 @@ jobs:
contents: read
security-events: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: github/codeql-action/init@v3
with:
languages: 'go'
@ -129,7 +131,7 @@ jobs:
needs: ["tests"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: projectdiscovery/actions/setup/go@v1
- uses: projectdiscovery/actions/goreleaser@v1
@ -141,7 +143,7 @@ jobs:
TARGET_URL: "http://scanme.sh/a/?b=c"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- run: make build
- name: "Setup environment (push)"
if: ${{ github.event_name == 'push' }}

2
.gitignore vendored
View File

@ -28,6 +28,8 @@
/scrapefunc
/scrapefuncs
/tsgen
/integration_tests/integration-test
/integration_tests/nuclei
# Templates
/*.yaml

83
CLAUDE.md Normal file
View File

@ -0,0 +1,83 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Nuclei is a modern, high-performance vulnerability scanner built in Go that leverages YAML-based templates for customizable vulnerability detection. It supports multiple protocols (HTTP, DNS, TCP, SSL, WebSocket, WHOIS, JavaScript, Code) and is designed for zero false positives through real-world condition simulation.
## Development Commands
### Building and Testing
- `make build` - Build the main nuclei binary to ./bin/nuclei
- `make test` - Run unit tests with race detection
- `make integration` - Run integration tests (builds and runs test suite)
- `make functional` - Run functional tests
- `make vet` - Run go vet for code analysis
- `make tidy` - Clean up go modules
### Validation and Linting
- `make template-validate` - Validate nuclei templates using the built binary
- `go fmt ./...` - Format Go code
- `go vet ./...` - Static analysis
### Development Tools
- `make devtools-all` - Build all development tools (bindgen, tsgen, scrapefuncs)
- `make jsupdate-all` - Update JavaScript bindings and TypeScript definitions
- `make docs` - Generate documentation
- `make memogen` - Generate memoization code for JavaScript libraries
### Testing Specific Components
- Run single test: `go test -v ./pkg/path/to/package -run TestName`
- Integration tests are in `integration_tests/` and can be run via `make integration`
## Architecture Overview
### Core Components
- **cmd/nuclei** - Main CLI entry point with flag parsing and configuration
- **internal/runner** - Core runner that orchestrates the entire scanning process
- **pkg/core** - Execution engine with work pools and template clustering
- **pkg/templates** - Template parsing, compilation, and management
- **pkg/protocols** - Protocol implementations (HTTP, DNS, Network, etc.)
- **pkg/operators** - Matching and extraction logic (matchers/extractors)
- **pkg/catalog** - Template discovery and loading from disk/remote sources
### Protocol Architecture
Each protocol (HTTP, DNS, Network, etc.) implements:
- Request interface with Compile(), ExecuteWithResults(), Match(), Extract() methods
- Operators embedding for matching/extraction functionality
- Protocol-specific request building and execution logic
### Template System
- Templates are YAML files defining vulnerability detection logic
- Compiled into executable requests with operators (matchers/extractors)
- Support for workflows (multi-step template execution)
- Template clustering optimizes identical requests across multiple templates
### Key Execution Flow
1. Template loading and compilation via pkg/catalog/loader
2. Input provider setup for targets
3. Engine creation with work pools for concurrency
4. Template execution with result collection via operators
5. Output writing and reporting integration
### JavaScript Integration
- Custom JavaScript runtime for code protocol templates
- Auto-generated bindings in pkg/js/generated/
- Library implementations in pkg/js/libs/
- Development tools for binding generation in pkg/js/devtools/
## Template Development
- Templates located in separate nuclei-templates repository
- YAML format with info, requests, and operators sections
- Support for multiple protocol types in single template
- Built-in DSL functions for dynamic content generation
- Template validation available via `make template-validate`
## Key Directories
- **lib/** - SDK for embedding nuclei as a library
- **examples/** - Usage examples for different scenarios
- **integration_tests/** - Integration test suite with protocol-specific tests
- **pkg/fuzz/** - Fuzzing engine and DAST capabilities
- **pkg/input/** - Input processing for various formats (Burp, OpenAPI, etc.)
- **pkg/reporting/** - Result export and issue tracking integrations

View File

@ -1,5 +1,5 @@
# Build
FROM golang:1.23-alpine AS builder
FROM golang:1.24-alpine AS builder
RUN apk add build-base
WORKDIR /app

View File

@ -15,8 +15,8 @@ ifneq ($(shell go env GOOS),darwin)
endif
.PHONY: all build build-stats clean devtools-all devtools-bindgen devtools-scrapefuncs
.PHONY: devtools-tsgen docs docgen dsl-docs functional fuzzplayground go-build syntax-docs
.PHONY: integration jsupdate-all jsupdate-bindgen jsupdate-tsgen memogen scan-charts test
.PHONY: devtools-tsgen docs docgen dsl-docs functional fuzzplayground go-build lint lint-strict syntax-docs
.PHONY: integration jsupdate-all jsupdate-bindgen jsupdate-tsgen memogen scan-charts test test-with-lint
.PHONY: tidy ts verify download vet template-validate
all: build
@ -146,5 +146,14 @@ dsl-docs:
template-validate: build
template-validate:
./bin/nuclei -ut
./bin/nuclei -validate -et http/technologies
./bin/nuclei -validate -w workflows -et http/technologies
./bin/nuclei -validate \
-et http/technologies \
-t dns \
-t ssl \
-t network \
-t http/exposures \
-ept code
./bin/nuclei -validate \
-w workflows \
-et http/technologies \
-ept code

View File

@ -356,6 +356,7 @@ CLOUD:
AUTHENTICATION:
-sf, -secret-file string[] path to config file containing secrets for nuclei authenticated scan
-ps, -prefetch-secrets prefetch secrets from the secrets file
# NOTE: Headers in secrets files preserve exact casing (useful for case-sensitive APIs)
EXAMPLES:

View File

@ -42,8 +42,8 @@ func runFunctionalTests(debug bool) (error, bool) {
return errors.Wrap(err, "could not open test cases"), true
}
defer func() {
_ = file.Close()
}()
_ = file.Close()
}()
errored, failedTestCases := runTestCases(file, debug)

View File

@ -0,0 +1,104 @@
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/mongo"
"github.com/testcontainers/testcontainers-go"
mongocontainer "github.com/testcontainers/testcontainers-go/modules/mongodb"
osutil "github.com/projectdiscovery/utils/os"
mongoclient "go.mongodb.org/mongo-driver/mongo"
mongooptions "go.mongodb.org/mongo-driver/mongo/options"
)
const (
dbName = "test"
dbImage = "mongo:8"
)
var exportersTestCases = []TestCaseInfo{
{Path: "exporters/mongo", TestCase: &mongoExporter{}, DisableOn: func() bool {
return osutil.IsWindows() || osutil.IsOSX()
}},
}
type mongoExporter struct{}
func (m *mongoExporter) Execute(filepath string) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute)
defer cancel()
// Start a MongoDB container
mongodbContainer, err := mongocontainer.Run(ctx, dbImage)
defer func() {
if err := testcontainers.TerminateContainer(mongodbContainer); err != nil {
log.Printf("failed to terminate container: %s", err)
}
}()
if err != nil {
return fmt.Errorf("failed to start container: %w", err)
}
connString, err := mongodbContainer.ConnectionString(ctx)
if err != nil {
return fmt.Errorf("failed to get connection string for MongoDB container: %s", err)
}
connString = connString + dbName
// Create a MongoDB exporter and write a test result to the database
opts := mongo.Options{
ConnectionString: connString,
CollectionName: "test",
BatchSize: 1, // Ensure we write the result immediately
}
exporter, err := mongo.New(&opts)
if err != nil {
return fmt.Errorf("failed to create MongoDB exporter: %s", err)
}
defer func() {
if err := exporter.Close(); err != nil {
fmt.Printf("failed to close exporter: %s\n", err)
}
}()
res := &output.ResultEvent{
Request: "test request",
Response: "test response",
}
err = exporter.Export(res)
if err != nil {
return fmt.Errorf("failed to export result event to MongoDB: %s", err)
}
// Verify that the result was written to the database
clientOptions := mongooptions.Client().ApplyURI(connString)
client, err := mongoclient.Connect(ctx, clientOptions)
if err != nil {
return fmt.Errorf("error creating MongoDB client: %s", err)
}
defer func() {
if err := client.Disconnect(ctx); err != nil {
fmt.Printf("failed to disconnect from MongoDB: %s\n", err)
}
}()
collection := client.Database(dbName).Collection(opts.CollectionName)
var actualRes output.ResultEvent
err = collection.FindOne(ctx, map[string]interface{}{"request": res.Request}).Decode(&actualRes)
if err != nil {
return fmt.Errorf("failed to find document in MongoDB: %s", err)
}
if actualRes.Request != res.Request || actualRes.Response != res.Response {
return fmt.Errorf("exported result does not match expected result: got %v, want %v", actualRes, res)
}
return nil
}

View File

@ -179,8 +179,8 @@ func (h *headlessFileUpload) Execute(filePath string) error {
}
defer func() {
_ = file.Close()
}()
_ = file.Close()
}()
content, err := io.ReadAll(file)
if err != nil {
@ -238,8 +238,8 @@ func (h *headlessFileUploadNegative) Execute(filePath string) error {
}
defer func() {
_ = file.Close()
}()
_ = file.Close()
}()
content, err := io.ReadAll(file)
if err != nil {

View File

@ -19,7 +19,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
logutil "github.com/projectdiscovery/utils/log"
sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings"
@ -196,7 +196,7 @@ func (d *httpDefaultMatcherCondition) Execute(filePath string) error {
return err
}
if routerErr != nil {
return errorutil.NewWithErr(routerErr).Msgf("failed to send http request to interactsh server")
return errkit.Wrap(routerErr, "failed to send http request to interactsh server")
}
if err := expectResultsCount(results, 1); err != nil {
return err
@ -628,10 +628,10 @@ func (h *httpRawWithParams) Execute(filePath string) error {
// we intentionally use params["test"] instead of params.Get("test") to test the case where
// there are multiple parameters with the same name
if !reflect.DeepEqual(params["key1"], []string{"value1"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value1"}, params["key1"])
errx = errkit.Append(errx, errkit.New("key1 not found in params", "expected", []string{"value1"}, "got", params["key1"]))
}
if !reflect.DeepEqual(params["key2"], []string{"value2"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value2"}, params["key2"])
errx = errkit.Append(errx, errkit.New("key2 not found in params", "expected", []string{"value2"}, "got", params["key2"]))
}
_, _ = fmt.Fprintf(w, "Test is test raw-params-matcher text")
})
@ -948,8 +948,8 @@ func (h *httpRequestSelfContained) Execute(filePath string) error {
_ = server.ListenAndServe()
}()
defer func() {
_ = server.Close()
}()
_ = server.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil {
@ -971,10 +971,10 @@ func (h *httpRequestSelfContainedWithParams) Execute(filePath string) error {
// we intentionally use params["test"] instead of params.Get("test") to test the case where
// there are multiple parameters with the same name
if !reflect.DeepEqual(params["something"], []string{"here"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"here"}, params["something"])
errx = errkit.Append(errx, errkit.New("something not found in params", "expected", []string{"here"}, "got", params["something"]))
}
if !reflect.DeepEqual(params["key"], []string{"value"}) {
errx = errorutil.WrapfWithNil(errx, "expected %v, got %v", []string{"value"}, params["key"])
errx = errkit.Append(errx, errkit.New("key not found in params", "expected", []string{"value"}, "got", params["key"]))
}
_, _ = w.Write([]byte("This is self-contained response"))
})
@ -986,8 +986,8 @@ func (h *httpRequestSelfContainedWithParams) Execute(filePath string) error {
_ = server.ListenAndServe()
}()
defer func() {
_ = server.Close()
}()
_ = server.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-esc")
if err != nil {
@ -1021,20 +1021,20 @@ func (h *httpRequestSelfContainedFileInput) Execute(filePath string) error {
_ = server.ListenAndServe()
}()
defer func() {
_ = server.Close()
}()
_ = server.Close()
}()
// create temp file
FileLoc, err := os.CreateTemp("", "self-contained-payload-*.txt")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to create temp file")
return errkit.Wrap(err, "failed to create temp file")
}
if _, err := FileLoc.Write([]byte("one\ntwo\n")); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write payload to temp file")
return errkit.Wrap(err, "failed to write payload to temp file")
}
defer func() {
_ = FileLoc.Close()
}()
_ = FileLoc.Close()
}()
results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "", debug, "-V", "test="+FileLoc.Name(), "-esc")
if err != nil {
@ -1046,7 +1046,7 @@ func (h *httpRequestSelfContainedFileInput) Execute(filePath string) error {
}
if !sliceutil.ElementsMatch(gotReqToEndpoints, []string{"/one", "/two", "/one", "/two"}) {
return errorutil.NewWithTag(filePath, "expected requests to be sent to `/one` and `/two` endpoints but were sent to `%v`", gotReqToEndpoints)
return errkit.New("expected requests to be sent to `/one` and `/two` endpoints but were sent to `%v`", gotReqToEndpoints, "filePath", filePath)
}
return nil
}

View File

@ -4,6 +4,7 @@ import (
"flag"
"fmt"
"os"
"regexp"
"runtime"
"strings"
@ -56,6 +57,7 @@ var (
"flow": flowTestcases,
"javascript": jsTestcases,
"matcher-status": matcherStatusTestcases,
"exporters": exportersTestCases,
}
// flakyTests are run with a retry count of 3
flakyTests = map[string]bool{
@ -90,8 +92,8 @@ func main() {
defer fuzzplayground.Cleanup()
server := fuzzplayground.GetPlaygroundServer()
defer func() {
_ = server.Close()
}()
_ = server.Close()
}()
go func() {
if err := server.Start("localhost:8082"); err != nil {
if !strings.Contains(err.Error(), "Server closed") {
@ -210,7 +212,7 @@ func execute(testCase testutils.TestCase, templatePath string) (string, error) {
}
func expectResultsCount(results []string, expectedNumbers ...int) error {
results = filterHeadlessLogs(results)
results = filterLines(results)
match := sliceutil.Contains(expectedNumbers, len(results))
if !match {
return fmt.Errorf("incorrect number of results: %d (actual) vs %v (expected) \nResults:\n\t%s\n", len(results), expectedNumbers, strings.Join(results, "\n\t")) // nolint:all
@ -224,6 +226,13 @@ func normalizeSplit(str string) []string {
})
}
// filterLines applies all filtering functions to the results
func filterLines(results []string) []string {
results = filterHeadlessLogs(results)
results = filterUnsignedTemplatesWarnings(results)
return results
}
// if chromium is not installed go-rod installs it in .cache directory
// this function filters out the logs from download and installation
func filterHeadlessLogs(results []string) []string {
@ -237,3 +246,16 @@ func filterHeadlessLogs(results []string) []string {
}
return filtered
}
// filterUnsignedTemplatesWarnings filters out warning messages about unsigned templates
func filterUnsignedTemplatesWarnings(results []string) []string {
filtered := []string{}
unsignedTemplatesRegex := regexp.MustCompile(`Loading \d+ unsigned templates for scan\. Use with caution\.`)
for _, result := range results {
if unsignedTemplatesRegex.MatchString(result) {
continue
}
filtered = append(filtered, result)
}
return filtered
}

View File

@ -15,13 +15,17 @@ var jsTestcases = []TestCaseInfo{
{Path: "protocols/javascript/ssh-server-fingerprint.yaml", TestCase: &javascriptSSHServerFingerprint{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }},
{Path: "protocols/javascript/net-multi-step.yaml", TestCase: &networkMultiStep{}},
{Path: "protocols/javascript/net-https.yaml", TestCase: &javascriptNetHttps{}},
{Path: "protocols/javascript/oracle-auth-test.yaml", TestCase: &javascriptOracleAuthTest{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }},
{Path: "protocols/javascript/vnc-pass-brute.yaml", TestCase: &javascriptVncPassBrute{}},
}
var (
redisResource *dockertest.Resource
sshResource *dockertest.Resource
pool *dockertest.Pool
defaultRetry = 3
redisResource *dockertest.Resource
sshResource *dockertest.Resource
oracleResource *dockertest.Resource
vncResource *dockertest.Resource
pool *dockertest.Pool
defaultRetry = 3
)
type javascriptNetHttps struct{}
@ -98,6 +102,71 @@ func (j *javascriptSSHServerFingerprint) Execute(filePath string) error {
return multierr.Combine(errs...)
}
type javascriptOracleAuthTest struct{}
func (j *javascriptOracleAuthTest) Execute(filePath string) error {
if oracleResource == nil || pool == nil {
// skip test as oracle is not running
return nil
}
tempPort := oracleResource.GetPort("1521/tcp")
finalURL := "localhost:" + tempPort
defer purge(oracleResource)
errs := []error{}
for i := 0; i < defaultRetry; i++ {
results := []string{}
var err error
_ = pool.Retry(func() error {
//let ssh server start
time.Sleep(3 * time.Second)
results, err = testutils.RunNucleiTemplateAndGetResults(filePath, finalURL, debug)
return nil
})
if err != nil {
return err
}
if err := expectResultsCount(results, 1); err == nil {
return nil
} else {
errs = append(errs, err)
}
}
return multierr.Combine(errs...)
}
type javascriptVncPassBrute struct{}
func (j *javascriptVncPassBrute) Execute(filePath string) error {
if vncResource == nil || pool == nil {
// skip test as vnc is not running
return nil
}
tempPort := vncResource.GetPort("5900/tcp")
finalURL := "localhost:" + tempPort
defer purge(vncResource)
errs := []error{}
for i := 0; i < defaultRetry; i++ {
results := []string{}
var err error
_ = pool.Retry(func() error {
//let ssh server start
time.Sleep(3 * time.Second)
results, err = testutils.RunNucleiTemplateAndGetResults(filePath, finalURL, debug)
return nil
})
if err != nil {
return err
}
if err := expectResultsCount(results, 1); err == nil {
return nil
} else {
errs = append(errs, err)
}
}
return multierr.Combine(errs...)
}
// purge any given resource if it is not nil
func purge(resource *dockertest.Resource) {
if resource != nil && pool != nil {
@ -163,4 +232,41 @@ func init() {
if err := sshResource.Expire(30); err != nil {
log.Printf("Could not expire resource: %s", err)
}
// setup a temporary oracle instance
oracleResource, err = pool.RunWithOptions(&dockertest.RunOptions{
Repository: "gvenzl/oracle-xe",
Tag: "latest",
Env: []string{
"ORACLE_PASSWORD=mysecret",
},
Platform: "linux/amd64",
})
if err != nil {
log.Printf("Could not start Oracle resource: %s", err)
return
}
// by default expire after 30 sec
if err := oracleResource.Expire(30); err != nil {
log.Printf("Could not expire Oracle resource: %s", err)
}
// setup a temporary vnc server
vncResource, err = pool.RunWithOptions(&dockertest.RunOptions{
Repository: "dorowu/ubuntu-desktop-lxde-vnc",
Tag: "latest",
Env: []string{
"VNC_PASSWORD=mysecret",
},
Platform: "linux/amd64",
})
if err != nil {
log.Printf("Could not start resource: %s", err)
return
}
// by default expire after 30 sec
if err := vncResource.Expire(30); err != nil {
log.Printf("Could not expire resource: %s", err)
}
}

View File

@ -68,17 +68,21 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
cache := hosterrorscache.New(30, hosterrorscache.DefaultMaxHostsCount, nil)
defer cache.Close()
defaultOpts := types.DefaultOptions()
defaultOpts.ExecutionId = "test"
mockProgress := &testutils.MockProgressClient{}
reportingClient, err := reporting.New(&reporting.Options{}, "", false)
reportingClient, err := reporting.New(&reporting.Options{ExecutionId: defaultOpts.ExecutionId}, "", false)
if err != nil {
return nil, err
}
defer reportingClient.Close()
defaultOpts := types.DefaultOptions()
_ = protocolstate.Init(defaultOpts)
_ = protocolinit.Init(defaultOpts)
defer protocolstate.Close(defaultOpts.ExecutionId)
defaultOpts.Templates = goflags.StringSlice{templatePath}
defaultOpts.ExcludeTags = config.ReadIgnoreFile().Tags
@ -100,7 +104,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
ratelimiter := ratelimit.New(context.Background(), 150, time.Second)
defer ratelimiter.Stop()
executerOpts := protocols.ExecutorOptions{
executerOpts := &protocols.ExecutorOptions{
Output: outputWriter,
Options: defaultOpts,
Progress: mockProgress,
@ -116,7 +120,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
engine := core.New(defaultOpts)
engine.SetExecuterOptions(executerOpts)
workflowLoader, err := parsers.NewLoader(&executerOpts)
workflowLoader, err := parsers.NewLoader(executerOpts)
if err != nil {
log.Fatalf("Could not create workflow loader: %s\n", err)
}
@ -128,7 +132,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
}
store.Load()
_ = engine.Execute(context.Background(), store.Templates(), provider.NewSimpleInputProviderWithUrls(templateURL))
_ = engine.Execute(context.Background(), store.Templates(), provider.NewSimpleInputProviderWithUrls(defaultOpts.ExecutionId, templateURL))
engine.WorkPool().Wait() // Wait for the scan to finish
return results, nil

View File

@ -10,7 +10,7 @@ import (
"github.com/julienschmidt/httprouter"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
permissionutil "github.com/projectdiscovery/utils/permission"
)
@ -223,7 +223,7 @@ type loadTemplateWithID struct{}
func (h *loadTemplateWithID) Execute(nooop string) error {
results, err := testutils.RunNucleiBareArgsAndGetResults(debug, nil, "-target", "scanme.sh", "-id", "self-signed-ssl")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id")
return errkit.Wrap(err, "failed to load template with id")
}
return expectResultsCount(results, 1)
}

View File

@ -34,8 +34,8 @@ func (h *networkBasic) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer func() {
_ = conn.Close()
}()
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil {
@ -71,8 +71,8 @@ func (h *networkMultiStep) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer func() {
_ = conn.Close()
}()
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 5, time.Duration(5)*time.Second)
if err != nil {
@ -119,8 +119,8 @@ type networkRequestSelContained struct{}
func (h *networkRequestSelContained) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer func() {
_ = conn.Close()
}()
_ = conn.Close()
}()
_, _ = conn.Write([]byte("Authentication successful"))
})
@ -141,8 +141,8 @@ func (h *networkVariables) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, defaultStaticPort, func(conn net.Conn) {
defer func() {
_ = conn.Close()
}()
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil {
@ -171,8 +171,8 @@ type networkPort struct{}
func (n *networkPort) Execute(filePath string) error {
ts := testutils.NewTCPServer(nil, 23846, func(conn net.Conn) {
defer func() {
_ = conn.Close()
}()
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil {
@ -206,8 +206,8 @@ func (n *networkPort) Execute(filePath string) error {
// this is positive test case where we expect port to be overridden and 34567 to be used
ts2 := testutils.NewTCPServer(nil, 34567, func(conn net.Conn) {
defer func() {
_ = conn.Close()
}()
_ = conn.Close()
}()
data, err := reader.ConnReadNWithTimeout(conn, 4, time.Duration(5)*time.Second)
if err != nil {

View File

@ -4,7 +4,7 @@ import (
"fmt"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
var profileLoaderTestcases = []TestCaseInfo{
@ -16,9 +16,9 @@ var profileLoaderTestcases = []TestCaseInfo{
type profileLoaderByRelFile struct{}
func (h *profileLoaderByRelFile) Execute(testName string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", "cloud.yml")
results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", "cloud.yml")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id")
return errkit.Wrap(err, "failed to load template with id")
}
if len(results) <= 10 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 10, len(results))
@ -29,9 +29,9 @@ func (h *profileLoaderByRelFile) Execute(testName string) error {
type profileLoaderById struct{}
func (h *profileLoaderById) Execute(testName string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", "cloud")
results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", "cloud")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id")
return errkit.Wrap(err, "failed to load template with id")
}
if len(results) <= 10 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 10, len(results))
@ -43,9 +43,9 @@ func (h *profileLoaderById) Execute(testName string) error {
type customProfileLoader struct{}
func (h *customProfileLoader) Execute(filepath string) error {
results, err := testutils.RunNucleiWithArgsAndGetResults(false, "-tl", "-tp", filepath)
results, err := testutils.RunNucleiWithArgsAndGetResults(debug, "-tl", "-tp", filepath)
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to load template with id")
return errkit.Wrap(err, "failed to load template with id")
}
if len(results) < 1 {
return fmt.Errorf("incorrect result: expected more results than %d, got %v", 1, len(results))

View File

@ -4,7 +4,7 @@ import (
"os"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
var templatesDirTestCases = []TestCaseInfo{
@ -17,7 +17,7 @@ type templateDirWithTargetTest struct{}
func (h *templateDirWithTargetTest) Execute(filePath string) error {
tempdir, err := os.MkdirTemp("", "nuclei-update-dir-*")
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to create temp dir")
return errkit.Wrap(err, "failed to create temp dir")
}
defer func() {
_ = os.RemoveAll(tempdir)

View File

@ -13,14 +13,16 @@ import (
"strings"
"time"
"github.com/projectdiscovery/gologger"
_pdcp "github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/utils/auth/pdcp"
"github.com/projectdiscovery/utils/env"
_ "github.com/projectdiscovery/utils/pprof"
stringsutil "github.com/projectdiscovery/utils/strings"
"github.com/rs/xid"
"gopkg.in/yaml.v2"
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/gologger/levels"
"github.com/projectdiscovery/interactsh/pkg/client"
"github.com/projectdiscovery/nuclei/v3/internal/runner"
@ -38,7 +40,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/types/scanstrategy"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/monitor"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file"
unitutils "github.com/projectdiscovery/utils/unit"
updateutils "github.com/projectdiscovery/utils/update"
@ -52,16 +54,18 @@ var (
)
func main() {
options.Logger = gologger.DefaultLogger
// enables CLI specific configs mostly interactive behavior
config.CurrentAppMode = config.AppModeCLI
if err := runner.ConfigureOptions(); err != nil {
gologger.Fatal().Msgf("Could not initialize options: %s\n", err)
options.Logger.Fatal().Msgf("Could not initialize options: %s\n", err)
}
_ = readConfig()
if options.ListDslSignatures {
gologger.Info().Msgf("The available custom DSL functions are:")
options.Logger.Info().Msgf("The available custom DSL functions are:")
fmt.Println(dsl.GetPrintableDslFunctionSignatures(options.NoColor))
return
}
@ -72,7 +76,7 @@ func main() {
templates.UseOptionsForSigner(options)
tsigner, err := signer.NewTemplateSigner(nil, nil) // will read from env , config or generate new keys
if err != nil {
gologger.Fatal().Msgf("couldn't initialize signer crypto engine: %s\n", err)
options.Logger.Fatal().Msgf("couldn't initialize signer crypto engine: %s\n", err)
}
successCounter := 0
@ -88,7 +92,7 @@ func main() {
if err != templates.ErrNotATemplate {
// skip warnings and errors as given items are not templates
errorCounter++
gologger.Error().Msgf("could not sign '%s': %s\n", iterItem, err)
options.Logger.Error().Msgf("could not sign '%s': %s\n", iterItem, err)
}
} else {
successCounter++
@ -97,10 +101,10 @@ func main() {
return nil
})
if err != nil {
gologger.Error().Msgf("%s\n", err)
options.Logger.Error().Msgf("%s\n", err)
}
}
gologger.Info().Msgf("All templates signatures were elaborated success=%d failed=%d\n", successCounter, errorCounter)
options.Logger.Info().Msgf("All templates signatures were elaborated success=%d failed=%d\n", successCounter, errorCounter)
return
}
@ -111,7 +115,7 @@ func main() {
createProfileFile := func(ext, profileType string) *os.File {
f, err := os.Create(memProfile + ext)
if err != nil {
gologger.Fatal().Msgf("profile: could not create %s profile %q file: %v", profileType, f.Name(), err)
options.Logger.Fatal().Msgf("profile: could not create %s profile %q file: %v", profileType, f.Name(), err)
}
return f
}
@ -125,18 +129,18 @@ func main() {
// Start tracing
if err := trace.Start(traceFile); err != nil {
gologger.Fatal().Msgf("profile: could not start trace: %v", err)
options.Logger.Fatal().Msgf("profile: could not start trace: %v", err)
}
// Start CPU profiling
if err := pprof.StartCPUProfile(cpuProfileFile); err != nil {
gologger.Fatal().Msgf("profile: could not start CPU profile: %v", err)
options.Logger.Fatal().Msgf("profile: could not start CPU profile: %v", err)
}
defer func() {
// Start heap memory snapshot
if err := pprof.WriteHeapProfile(memProfileFile); err != nil {
gologger.Fatal().Msgf("profile: could not write memory profile: %v", err)
options.Logger.Fatal().Msgf("profile: could not write memory profile: %v", err)
}
pprof.StopCPUProfile()
@ -146,24 +150,26 @@ func main() {
runtime.MemProfileRate = oldMemProfileRate
gologger.Info().Msgf("CPU profile saved at %q", cpuProfileFile.Name())
gologger.Info().Msgf("Memory usage snapshot saved at %q", memProfileFile.Name())
gologger.Info().Msgf("Traced at %q", traceFile.Name())
options.Logger.Info().Msgf("CPU profile saved at %q", cpuProfileFile.Name())
options.Logger.Info().Msgf("Memory usage snapshot saved at %q", memProfileFile.Name())
options.Logger.Info().Msgf("Traced at %q", traceFile.Name())
}()
}
options.ExecutionId = xid.New().String()
runner.ParseOptions(options)
if options.ScanUploadFile != "" {
if err := runner.UploadResultsToCloud(options); err != nil {
gologger.Fatal().Msgf("could not upload scan results to cloud dashboard: %s\n", err)
options.Logger.Fatal().Msgf("could not upload scan results to cloud dashboard: %s\n", err)
}
return
}
nucleiRunner, err := runner.New(options)
if err != nil {
gologger.Fatal().Msgf("Could not create runner: %s\n", err)
options.Logger.Fatal().Msgf("Could not create runner: %s\n", err)
}
if nucleiRunner == nil {
return
@ -176,13 +182,13 @@ func main() {
stackMonitor.RegisterCallback(func(dumpID string) error {
resumeFileName := fmt.Sprintf("crash-resume-file-%s.dump", dumpID)
if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...")
options.Logger.Info().Msgf("Uploading scan results to cloud...")
}
nucleiRunner.Close()
gologger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
options.Logger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
err := nucleiRunner.SaveResumeConfig(resumeFileName)
if err != nil {
return errorutil.NewWithErr(err).Msgf("couldn't create crash resume file")
return errkit.Wrap(err, "couldn't create crash resume file")
}
return nil
})
@ -191,37 +197,35 @@ func main() {
// Setup graceful exits
resumeFileName := types.DefaultResumeFilePath()
c := make(chan os.Signal, 1)
defer close(c)
signal.Notify(c, os.Interrupt)
go func() {
for range c {
gologger.Info().Msgf("CTRL+C pressed: Exiting\n")
if options.DASTServer {
nucleiRunner.Close()
os.Exit(1)
}
gologger.Info().Msgf("Attempting graceful shutdown...")
if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...")
}
<-c
options.Logger.Info().Msgf("CTRL+C pressed: Exiting\n")
if options.DASTServer {
nucleiRunner.Close()
if options.ShouldSaveResume() {
gologger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
err := nucleiRunner.SaveResumeConfig(resumeFileName)
if err != nil {
gologger.Error().Msgf("Couldn't create resume file: %s\n", err)
}
}
os.Exit(1)
}
options.Logger.Info().Msgf("Attempting graceful shutdown...")
if options.EnableCloudUpload {
options.Logger.Info().Msgf("Uploading scan results to cloud...")
}
nucleiRunner.Close()
if options.ShouldSaveResume() {
options.Logger.Info().Msgf("Creating resume file: %s\n", resumeFileName)
err := nucleiRunner.SaveResumeConfig(resumeFileName)
if err != nil {
options.Logger.Error().Msgf("Couldn't create resume file: %s\n", err)
}
}
os.Exit(1)
}()
if err := nucleiRunner.RunEnumeration(); err != nil {
if options.Validate {
gologger.Fatal().Msgf("Could not validate templates: %s\n", err)
options.Logger.Fatal().Msgf("Could not validate templates: %s\n", err)
} else {
gologger.Fatal().Msgf("Could not run nuclei: %s\n", err)
options.Logger.Fatal().Msgf("Could not run nuclei: %s\n", err)
}
}
nucleiRunner.Close()
@ -260,6 +264,8 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.StringVarP(&options.InputFileMode, "input-mode", "im", "list", fmt.Sprintf("mode of input file (%v)", provider.SupportedInputFormats())),
flagSet.BoolVarP(&options.FormatUseRequiredOnly, "required-only", "ro", false, "use only required fields in input format when generating requests"),
flagSet.BoolVarP(&options.SkipFormatValidation, "skip-format-validation", "sfv", false, "skip format validation (like missing vars) when parsing input file"),
flagSet.BoolVarP(&options.VarsTextTemplating, "vars-text-templating", "vtt", false, "enable text templating for vars in input file (only for yaml input mode)"),
flagSet.StringSliceVarP(&options.VarsFilePaths, "var-file-paths", "vfp", nil, "list of yaml file contained vars to inject into yaml input", goflags.CommaSeparatedStringSliceOptions),
)
flagSet.CreateGroup("templates", "Templates",
@ -542,11 +548,11 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
h := &pdcp.PDCPCredHandler{}
_, err := h.GetCreds()
if err != nil {
gologger.Fatal().Msg("To utilize the `-ai` flag, please configure your API key with the `-auth` flag or set the `PDCP_API_KEY` environment variable")
options.Logger.Fatal().Msg("To utilize the `-ai` flag, please configure your API key with the `-auth` flag or set the `PDCP_API_KEY` environment variable")
}
}
gologger.DefaultLogger.SetTimestamp(options.Timestamp, levels.LevelDebug)
options.Logger.SetTimestamp(options.Timestamp, levels.LevelDebug)
if options.VerboseVerbose {
// hide release notes if silent mode is enabled
@ -568,13 +574,49 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
config.DefaultConfig.SetConfigDir(customConfigDir)
readFlagsConfig(flagSet)
}
if cfgFile != "" {
if !fileutil.FileExists(cfgFile) {
gologger.Fatal().Msgf("given config file '%s' does not exist", cfgFile)
options.Logger.Fatal().Msgf("given config file '%s' does not exist", cfgFile)
}
// merge config file with flags
if err := flagSet.MergeConfigFile(cfgFile); err != nil {
gologger.Fatal().Msgf("Could not read config: %s\n", err)
options.Logger.Fatal().Msgf("Could not read config: %s\n", err)
}
if !options.Vars.IsEmpty() {
// Maybe we should add vars to the config file as well even if they are set via flags?
file, err := os.Open(cfgFile)
if err != nil {
gologger.Fatal().Msgf("Could not open config file: %s\n", err)
}
defer func() {
_ = file.Close()
}()
data := make(map[string]interface{})
err = yaml.NewDecoder(file).Decode(&data)
if err != nil {
gologger.Fatal().Msgf("Could not decode config file: %s\n", err)
}
variables := data["var"]
if variables != nil {
if varSlice, ok := variables.([]interface{}); ok {
for _, value := range varSlice {
if strVal, ok := value.(string); ok {
err = options.Vars.Set(strVal)
if err != nil {
gologger.Warning().Msgf("Could not set variable from config file: %s\n", err)
}
} else {
gologger.Warning().Msgf("Skipping non-string variable in config: %#v", value)
}
}
} else {
gologger.Warning().Msgf("No 'var' section found in config file: %s", cfgFile)
}
}
}
}
if options.NewTemplatesDirectory != "" {
@ -587,7 +629,7 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
if tp := findProfilePathById(templateProfile, defaultProfilesPath); tp != "" {
templateProfile = tp
} else {
gologger.Fatal().Msgf("'%s' is not a profile-id or profile path", templateProfile)
options.Logger.Fatal().Msgf("'%s' is not a profile-id or profile path", templateProfile)
}
}
if !filepath.IsAbs(templateProfile) {
@ -602,17 +644,17 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
}
}
if !fileutil.FileExists(templateProfile) {
gologger.Fatal().Msgf("given template profile file '%s' does not exist", templateProfile)
options.Logger.Fatal().Msgf("given template profile file '%s' does not exist", templateProfile)
}
if err := flagSet.MergeConfigFile(templateProfile); err != nil {
gologger.Fatal().Msgf("Could not read template profile: %s\n", err)
options.Logger.Fatal().Msgf("Could not read template profile: %s\n", err)
}
}
if len(options.SecretsFile) > 0 {
for _, secretFile := range options.SecretsFile {
if !fileutil.FileExists(secretFile) {
gologger.Fatal().Msgf("given secrets file '%s' does not exist", options.SecretsFile)
options.Logger.Fatal().Msgf("given secrets file '%s' does not exist", secretFile)
}
}
}
@ -638,25 +680,25 @@ func readFlagsConfig(flagset *goflags.FlagSet) {
if err != nil {
// something went wrong either dir is not readable or something else went wrong upstream in `goflags`
// warn and exit in this case
gologger.Warning().Msgf("Could not read config file: %s\n", err)
options.Logger.Warning().Msgf("Could not read config file: %s\n", err)
return
}
cfgFile := config.DefaultConfig.GetFlagsConfigFilePath()
if !fileutil.FileExists(cfgFile) {
if !fileutil.FileExists(defaultCfgFile) {
// if default config does not exist, warn and exit
gologger.Warning().Msgf("missing default config file : %s", defaultCfgFile)
options.Logger.Warning().Msgf("missing default config file : %s", defaultCfgFile)
return
}
// if does not exist copy it from the default config
if err = fileutil.CopyFile(defaultCfgFile, cfgFile); err != nil {
gologger.Warning().Msgf("Could not copy config file: %s\n", err)
options.Logger.Warning().Msgf("Could not copy config file: %s\n", err)
}
return
}
// if config file exists, merge it with the default config
if err = flagset.MergeConfigFile(cfgFile); err != nil {
gologger.Warning().Msgf("failed to merge configfile with flags got: %s\n", err)
options.Logger.Warning().Msgf("failed to merge configfile with flags got: %s\n", err)
}
}
@ -667,29 +709,29 @@ func disableUpdatesCallback() {
// printVersion prints the nuclei version and exits.
func printVersion() {
gologger.Info().Msgf("Nuclei Engine Version: %s", config.Version)
gologger.Info().Msgf("Nuclei Config Directory: %s", config.DefaultConfig.GetConfigDir())
gologger.Info().Msgf("Nuclei Cache Directory: %s", config.DefaultConfig.GetCacheDir()) // cache dir contains resume files
gologger.Info().Msgf("PDCP Directory: %s", pdcp.PDCPDir)
options.Logger.Info().Msgf("Nuclei Engine Version: %s", config.Version)
options.Logger.Info().Msgf("Nuclei Config Directory: %s", config.DefaultConfig.GetConfigDir())
options.Logger.Info().Msgf("Nuclei Cache Directory: %s", config.DefaultConfig.GetCacheDir()) // cache dir contains resume files
options.Logger.Info().Msgf("PDCP Directory: %s", pdcp.PDCPDir)
os.Exit(0)
}
// printTemplateVersion prints the nuclei template version and exits.
func printTemplateVersion() {
cfg := config.DefaultConfig
gologger.Info().Msgf("Public nuclei-templates version: %s (%s)\n", cfg.TemplateVersion, cfg.TemplatesDirectory)
options.Logger.Info().Msgf("Public nuclei-templates version: %s (%s)\n", cfg.TemplateVersion, cfg.TemplatesDirectory)
if fileutil.FolderExists(cfg.CustomS3TemplatesDirectory) {
gologger.Info().Msgf("Custom S3 templates location: %s\n", cfg.CustomS3TemplatesDirectory)
options.Logger.Info().Msgf("Custom S3 templates location: %s\n", cfg.CustomS3TemplatesDirectory)
}
if fileutil.FolderExists(cfg.CustomGitHubTemplatesDirectory) {
gologger.Info().Msgf("Custom GitHub templates location: %s ", cfg.CustomGitHubTemplatesDirectory)
options.Logger.Info().Msgf("Custom GitHub templates location: %s ", cfg.CustomGitHubTemplatesDirectory)
}
if fileutil.FolderExists(cfg.CustomGitLabTemplatesDirectory) {
gologger.Info().Msgf("Custom GitLab templates location: %s ", cfg.CustomGitLabTemplatesDirectory)
options.Logger.Info().Msgf("Custom GitLab templates location: %s ", cfg.CustomGitLabTemplatesDirectory)
}
if fileutil.FolderExists(cfg.CustomAzureTemplatesDirectory) {
gologger.Info().Msgf("Custom Azure templates location: %s ", cfg.CustomAzureTemplatesDirectory)
options.Logger.Info().Msgf("Custom Azure templates location: %s ", cfg.CustomAzureTemplatesDirectory)
}
os.Exit(0)
}
@ -705,13 +747,13 @@ Following files will be deleted:
Note: Make sure you have backup of your custom nuclei-templates before proceeding
`, config.DefaultConfig.GetConfigDir(), config.DefaultConfig.TemplatesDirectory)
gologger.Print().Msg(warning)
options.Logger.Print().Msg(warning)
reader := bufio.NewReader(os.Stdin)
for {
fmt.Print("Are you sure you want to continue? [y/n]: ")
resp, err := reader.ReadString('\n')
if err != nil {
gologger.Fatal().Msgf("could not read response: %s", err)
options.Logger.Fatal().Msgf("could not read response: %s", err)
}
resp = strings.TrimSpace(resp)
if stringsutil.EqualFoldAny(resp, "y", "yes") {
@ -724,13 +766,13 @@ Note: Make sure you have backup of your custom nuclei-templates before proceedin
}
err := os.RemoveAll(config.DefaultConfig.GetConfigDir())
if err != nil {
gologger.Fatal().Msgf("could not delete config dir: %s", err)
options.Logger.Fatal().Msgf("could not delete config dir: %s", err)
}
err = os.RemoveAll(config.DefaultConfig.TemplatesDirectory)
if err != nil {
gologger.Fatal().Msgf("could not delete templates dir: %s", err)
options.Logger.Fatal().Msgf("could not delete templates dir: %s", err)
}
gologger.Info().Msgf("Successfully deleted all nuclei configurations files and nuclei-templates")
options.Logger.Info().Msgf("Successfully deleted all nuclei configurations files and nuclei-templates")
os.Exit(0)
}
@ -750,14 +792,7 @@ func findProfilePathById(profileId, templatesDir string) string {
return nil
})
if err != nil && err.Error() != "FOUND" {
gologger.Error().Msgf("%s\n", err)
options.Logger.Error().Msgf("%s\n", err)
}
return profilePath
}
func init() {
// print stacktrace of errors in debug mode
if strings.EqualFold(os.Getenv("DEBUG"), "true") {
errorutil.ShowStackTrace = true
}
}

View File

@ -20,7 +20,6 @@ var (
func TestMain(m *testing.M) {
// Set up
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
_ = os.Setenv("DISABLE_STDOUT", "true")
@ -93,6 +92,8 @@ func getDefaultOptions() *types.Options {
LoadHelperFileFunction: types.DefaultOptions().LoadHelperFileFunction,
// DialerKeepAlive: time.Duration(0),
// DASTServerAddress: "localhost:9055",
ExecutionId: "test",
Logger: gologger.DefaultLogger,
}
}

View File

@ -23,7 +23,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
"gopkg.in/yaml.v3"
)
@ -146,8 +146,8 @@ func process(opts options) error {
gologger.Fatal().Msgf("could not open error log file: %s\n", err)
}
defer func() {
_ = errFile.Close()
}()
_ = errFile.Close()
}()
}
templateCatalog := disk.NewCatalog(filepath.Dir(opts.input))
@ -243,7 +243,7 @@ func enhanceTemplate(data string) (string, bool, error) {
return data, false, err
}
if resp.StatusCode != 200 {
return data, false, errorutil.New("unexpected status code: %v", resp.Status)
return data, false, errkit.New("unexpected status code: %v", resp.Status)
}
var templateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&templateResp); err != nil {
@ -254,20 +254,20 @@ func enhanceTemplate(data string) (string, bool, error) {
}
if templateResp.ValidateErrorCount > 0 {
if len(templateResp.ValidateError) > 0 {
return data, false, errorutil.NewWithTag("validate", templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line)
return data, false, errkit.New(templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line, "tag", "validate")
}
return data, false, errorutil.New("validation failed").WithTag("validate")
return data, false, errkit.New("validation failed", "tag", "validate")
}
if templateResp.Error.Name != "" {
return data, false, errorutil.New("%s", templateResp.Error.Name)
return data, false, errkit.New("%s", templateResp.Error.Name)
}
if strings.TrimSpace(templateResp.Enhanced) == "" && !templateResp.Lint {
if templateResp.LintError.Reason != "" {
return data, false, errorutil.NewWithTag("lint", templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line)
return data, false, errkit.New(templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line, "tag", "lint")
}
return data, false, errorutil.NewWithTag("lint", "at line: %v", templateResp.LintError.Mark.Line)
return data, false, errkit.New("at line: %v", templateResp.LintError.Mark.Line, "tag", "lint")
}
return data, false, errorutil.New("template enhance failed")
return data, false, errkit.New("template enhance failed")
}
// formatTemplate formats template data using templateman format api
@ -277,7 +277,7 @@ func formatTemplate(data string) (string, bool, error) {
return data, false, err
}
if resp.StatusCode != 200 {
return data, false, errorutil.New("unexpected status code: %v", resp.Status)
return data, false, errkit.New("unexpected status code: %v", resp.Status)
}
var templateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&templateResp); err != nil {
@ -288,20 +288,20 @@ func formatTemplate(data string) (string, bool, error) {
}
if templateResp.ValidateErrorCount > 0 {
if len(templateResp.ValidateError) > 0 {
return data, false, errorutil.NewWithTag("validate", templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line)
return data, false, errkit.New(templateResp.ValidateError[0].Message+": at line %v", templateResp.ValidateError[0].Mark.Line, "tag", "validate")
}
return data, false, errorutil.New("validation failed").WithTag("validate")
return data, false, errkit.New("validation failed", "tag", "validate")
}
if templateResp.Error.Name != "" {
return data, false, errorutil.New("%s", templateResp.Error.Name)
return data, false, errkit.New("%s", templateResp.Error.Name)
}
if strings.TrimSpace(templateResp.Updated) == "" && !templateResp.Lint {
if templateResp.LintError.Reason != "" {
return data, false, errorutil.NewWithTag("lint", templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line)
return data, false, errkit.New(templateResp.LintError.Reason+" : at line %v", templateResp.LintError.Mark.Line, "tag", "lint")
}
return data, false, errorutil.NewWithTag("lint", "at line: %v", templateResp.LintError.Mark.Line)
return data, false, errkit.New("at line: %v", templateResp.LintError.Mark.Line, "tag", "lint")
}
return data, false, errorutil.New("template format failed")
return data, false, errkit.New("template format failed")
}
// lintTemplate lints template data using templateman lint api
@ -311,7 +311,7 @@ func lintTemplate(data string) (bool, error) {
return false, err
}
if resp.StatusCode != 200 {
return false, errorutil.New("unexpected status code: %v", resp.Status)
return false, errkit.New("unexpected status code: %v", resp.Status)
}
var lintResp TemplateLintResp
if err := json.NewDecoder(resp.Body).Decode(&lintResp); err != nil {
@ -321,9 +321,9 @@ func lintTemplate(data string) (bool, error) {
return true, nil
}
if lintResp.LintError.Reason != "" {
return false, errorutil.NewWithTag("lint", lintResp.LintError.Reason+" : at line %v", lintResp.LintError.Mark.Line)
return false, errkit.New(lintResp.LintError.Reason+" : at line %v", lintResp.LintError.Mark.Line, "tag", "lint")
}
return false, errorutil.NewWithTag("lint", "at line: %v", lintResp.LintError.Mark.Line)
return false, errkit.New("at line: %v", lintResp.LintError.Mark.Line, "tag", "lint")
}
// validateTemplate validates template data using templateman validate api
@ -333,7 +333,7 @@ func validateTemplate(data string) (bool, error) {
return false, err
}
if resp.StatusCode != 200 {
return false, errorutil.New("unexpected status code: %v", resp.Status)
return false, errkit.New("unexpected status code: %v", resp.Status)
}
var validateResp TemplateResp
if err := json.NewDecoder(resp.Body).Decode(&validateResp); err != nil {
@ -344,14 +344,14 @@ func validateTemplate(data string) (bool, error) {
}
if validateResp.ValidateErrorCount > 0 {
if len(validateResp.ValidateError) > 0 {
return false, errorutil.NewWithTag("validate", validateResp.ValidateError[0].Message+": at line %v", validateResp.ValidateError[0].Mark.Line)
return false, errkit.New(validateResp.ValidateError[0].Message+": at line %v", validateResp.ValidateError[0].Mark.Line, "tag", "validate")
}
return false, errorutil.New("validation failed").WithTag("validate")
return false, errkit.New("validation failed", "tag", "validate")
}
if validateResp.Error.Name != "" {
return false, errorutil.New("%s", validateResp.Error.Name)
return false, errkit.New("%s", validateResp.Error.Name)
}
return false, errorutil.New("template validation failed")
return false, errkit.New("template validation failed")
}
// parseAndAddMaxRequests parses and adds max requests to templates
@ -401,7 +401,7 @@ func parseAndAddMaxRequests(catalog catalog.Catalog, path, data string) (string,
// parseTemplate parses a template and returns the template object
func parseTemplate(catalog catalog.Catalog, templatePath string) (*templates.Template, error) {
executorOpts := protocols.ExecutorOptions{
executorOpts := &protocols.ExecutorOptions{
Catalog: catalog,
Options: defaultOpts,
}

View File

@ -99,12 +99,12 @@ func main() {
gologger.Info().Msgf("✓ Template signed & verified successfully")
}
func defaultExecutorOpts(templatePath string) protocols.ExecutorOptions {
func defaultExecutorOpts(templatePath string) *protocols.ExecutorOptions {
// use parsed options when initializing signer instead of default options
options := types.DefaultOptions()
templates.UseOptionsForSigner(options)
catalog := disk.NewCatalog(filepath.Dir(templatePath))
executerOpts := protocols.ExecutorOptions{
executerOpts := &protocols.ExecutorOptions{
Catalog: catalog,
Options: options,
TemplatePath: templatePath,

215
go.mod
View File

@ -1,10 +1,12 @@
module github.com/projectdiscovery/nuclei/v3
go 1.24.1
go 1.24.2
toolchain go1.24.4
require (
github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible
github.com/andygrunwald/go-jira v1.16.0
github.com/andygrunwald/go-jira v1.16.1
github.com/antchfx/htmlquery v1.3.4
github.com/bluele/gcache v0.0.2
github.com/go-playground/validator/v10 v10.26.0
@ -16,16 +18,16 @@ require (
github.com/json-iterator/go v1.1.12
github.com/julienschmidt/httprouter v1.3.0
github.com/logrusorgru/aurora v2.0.3+incompatible
github.com/miekg/dns v1.1.66
github.com/olekukonko/tablewriter v0.0.5
github.com/miekg/dns v1.1.68
github.com/olekukonko/tablewriter v1.0.8
github.com/pkg/errors v0.9.1
github.com/projectdiscovery/clistats v0.1.1
github.com/projectdiscovery/fastdialer v0.4.1
github.com/projectdiscovery/hmap v0.0.91
github.com/projectdiscovery/fastdialer v0.4.12
github.com/projectdiscovery/hmap v0.0.95
github.com/projectdiscovery/interactsh v1.2.4
github.com/projectdiscovery/rawhttp v0.1.90
github.com/projectdiscovery/retryabledns v1.0.103
github.com/projectdiscovery/retryablehttp-go v1.0.116
github.com/projectdiscovery/retryabledns v1.0.108
github.com/projectdiscovery/retryablehttp-go v1.0.127
github.com/projectdiscovery/yamldoc-go v1.0.6
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rs/xid v1.6.0
@ -35,42 +37,42 @@ require (
github.com/spf13/cast v1.9.2
github.com/syndtr/goleveldb v1.0.0
github.com/valyala/fasttemplate v1.2.2
github.com/weppos/publicsuffix-go v0.40.3-0.20250311103038-7794c8c0723b
github.com/weppos/publicsuffix-go v0.50.0
go.uber.org/multierr v1.11.0
golang.org/x/net v0.41.0
golang.org/x/net v0.44.0
golang.org/x/oauth2 v0.30.0
golang.org/x/text v0.26.0
golang.org/x/text v0.29.0
gopkg.in/yaml.v2 v2.4.0
)
require (
code.gitea.io/sdk/gitea v0.21.0
carvel.dev/ytt v0.52.0
code.gitea.io/sdk/gitea v0.17.0
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.1
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0
github.com/DataDog/gostackparse v0.7.0
github.com/Masterminds/semver/v3 v3.4.0
github.com/Masterminds/semver/v3 v3.2.1
github.com/Mzack9999/gcache v0.0.0-20230410081825-519e28eab057
github.com/alecthomas/chroma v0.10.0
github.com/Mzack9999/goja v0.0.0-20250507184235-e46100e9c697
github.com/Mzack9999/goja_nodejs v0.0.0-20250507184139-66bcbf65c883
github.com/alexsnet/go-vnc v0.1.0
github.com/alitto/pond v1.9.2
github.com/antchfx/xmlquery v1.4.4
github.com/antchfx/xpath v1.3.4
github.com/antchfx/xpath v1.3.3
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
github.com/aws/aws-sdk-go-v2 v1.36.5
github.com/aws/aws-sdk-go-v2/config v1.29.17
github.com/aws/aws-sdk-go-v2/credentials v1.17.70
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.82
github.com/aws/aws-sdk-go-v2/service/s3 v1.82.0
github.com/bytedance/sonic v1.13.3
github.com/bytedance/sonic v1.14.0
github.com/cespare/xxhash v1.1.0
github.com/charmbracelet/glamour v0.10.0
github.com/clbanning/mxj/v2 v2.7.0
github.com/ditashi/jsbeautifier-go v0.0.0-20141206144643-2520a8026a9c
github.com/docker/go-units v0.5.0
github.com/dop251/goja v0.0.0-20250624190929-4d26883d182a
github.com/dop251/goja_nodejs v0.0.0-20250409162600-f7acab6894b0
github.com/fatih/structs v1.1.0
github.com/getkin/kin-openapi v0.132.0
github.com/go-echarts/go-echarts/v2 v2.6.0
github.com/go-git/go-git/v5 v5.16.2
github.com/go-ldap/ldap/v3 v3.4.11
github.com/go-pg/pg v8.0.7+incompatible
@ -89,52 +91,51 @@ require (
github.com/microsoft/go-mssqldb v1.9.2
github.com/ory/dockertest/v3 v3.12.0
github.com/praetorian-inc/fingerprintx v1.1.15
github.com/projectdiscovery/dsl v0.5.0
github.com/projectdiscovery/dsl v0.7.2
github.com/projectdiscovery/fasttemplate v0.0.2
github.com/projectdiscovery/gcache v0.0.0-20241015120333-12546c6e3f4c
github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb
github.com/projectdiscovery/goflags v0.1.74
github.com/projectdiscovery/gologger v1.1.54
github.com/projectdiscovery/gologger v1.1.57
github.com/projectdiscovery/gostruct v0.0.2
github.com/projectdiscovery/gozero v0.0.3
github.com/projectdiscovery/httpx v1.7.0
github.com/projectdiscovery/mapcidr v1.1.34
github.com/projectdiscovery/gozero v0.1.0
github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a
github.com/projectdiscovery/mapcidr v1.1.95
github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5
github.com/projectdiscovery/networkpolicy v0.1.17
github.com/projectdiscovery/ratelimit v0.0.81
github.com/projectdiscovery/networkpolicy v0.1.26
github.com/projectdiscovery/ratelimit v0.0.82
github.com/projectdiscovery/rdap v0.9.0
github.com/projectdiscovery/sarif v0.0.1
github.com/projectdiscovery/tlsx v1.1.9
github.com/projectdiscovery/tlsx v1.2.1
github.com/projectdiscovery/uncover v1.1.0
github.com/projectdiscovery/useragent v0.0.101
github.com/projectdiscovery/utils v0.4.21
github.com/projectdiscovery/wappalyzergo v0.2.35
github.com/projectdiscovery/useragent v0.0.102
github.com/projectdiscovery/utils v0.6.0
github.com/projectdiscovery/wappalyzergo v0.2.49
github.com/redis/go-redis/v9 v9.11.0
github.com/seh-msft/burpxml v1.0.1
github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466
github.com/stretchr/testify v1.10.0
github.com/sijms/go-ora/v2 v2.9.0
github.com/stretchr/testify v1.11.1
github.com/tarunKoyalwar/goleak v0.0.0-20240429141123-0efa90dbdcf9
github.com/trivago/tgo v1.0.7
github.com/testcontainers/testcontainers-go v0.38.0
github.com/testcontainers/testcontainers-go/modules/mongodb v0.37.0
github.com/yassinebenaid/godump v0.11.1
github.com/zmap/zgrab2 v0.1.8
gitlab.com/gitlab-org/api/client-go v0.130.1
go.mongodb.org/mongo-driver v1.17.4
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
golang.org/x/term v0.32.0
golang.org/x/tools v0.34.0
golang.org/x/term v0.35.0
gopkg.in/yaml.v3 v3.0.1
moul.io/http2curl v1.0.0
)
require (
aead.dev/minisign v0.2.0 // indirect
dario.cat/mergo v1.0.0 // indirect
dario.cat/mergo v1.0.2 // indirect
filippo.io/edwards25519 v1.1.0 // indirect
git.mills.io/prologic/smtpd v0.0.0-20210710122116-a525b76c287a // indirect
github.com/42wim/httpsig v1.2.2 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
@ -174,7 +175,7 @@ require (
github.com/bodgit/sevenzip v1.6.0 // indirect
github.com/bodgit/windows v1.0.1 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/bytedance/sonic/loader v0.2.4 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/caddyserver/certmagic v0.19.2 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
@ -184,28 +185,35 @@ require (
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/cheggaaa/pb/v3 v3.1.4 // indirect
github.com/cheggaaa/pb/v3 v3.1.6 // indirect
github.com/cloudflare/cfssl v1.6.4 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect
github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 // indirect
github.com/cnf/structhash v0.0.0-20250313080605-df4c6cc74a9a // indirect
github.com/containerd/continuity v0.4.5 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
github.com/containerd/errdefs/pkg v0.3.0 // indirect
github.com/containerd/log v0.1.0 // indirect
github.com/containerd/platforms v0.2.1 // indirect
github.com/cpuguy83/dockercfg v0.3.2 // indirect
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/davidmz/go-pageant v1.0.2 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/dimchansky/utfbom v1.1.1 // indirect
github.com/distribution/reference v0.6.0 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/docker/cli v27.4.1+incompatible // indirect
github.com/docker/docker v27.1.1+incompatible // indirect
github.com/docker/go-connections v0.5.0 // indirect
github.com/docker/docker v28.3.3+incompatible // indirect
github.com/docker/go-connections v0.6.0 // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/ebitengine/purego v0.8.4 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/fatih/color v1.16.0 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/felixge/fgprof v0.9.5 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/free5gc/util v1.0.5-0.20230511064842-2e120956883b // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/gaissmai/bart v0.20.4 // indirect
github.com/gaissmai/bart v0.25.0 // indirect
github.com/geoffgarside/ber v1.1.0 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/gin-gonic/gin v1.9.1 // indirect
@ -213,39 +221,37 @@ require (
github.com/go-fed/httpsig v1.1.0 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.6.2 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/swag v0.23.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
github.com/go-viper/mapstructure/v2 v2.3.0 // indirect
github.com/goburrow/cache v0.1.4 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
github.com/golang-jwt/jwt/v5 v5.2.2 // indirect
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
github.com/golang-sql/sqlexp v0.1.0 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/certificate-transparency-go v1.1.4 // indirect
github.com/google/certificate-transparency-go v1.3.2 // indirect
github.com/google/go-github/v30 v30.1.0 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 // indirect
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/go-version v1.7.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hbakhtiyor/strsim v0.0.0-20190107154042-4d2bbb273edf // indirect
github.com/hdm/jarm-go v0.0.7 // indirect
github.com/imdario/mergo v0.3.13 // indirect
github.com/iangcarroll/cookiemonster v1.6.0 // indirect
github.com/imdario/mergo v0.3.16 // indirect
github.com/itchyny/timefmt-go v0.1.6 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jcmturner/aescts/v2 v2.0.0 // indirect
@ -254,6 +260,7 @@ require (
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/k14s/starlark-go v0.0.0-20200720175618-3a5c849cc368 // indirect
github.com/kataras/jwt v0.1.10 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect
@ -266,8 +273,9 @@ require (
github.com/logrusorgru/aurora/v4 v4.0.0 // indirect
github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/lufia/plan9stats v0.0.0-20250821153705-5981dea3221d // indirect
github.com/mackerelio/go-osstat v0.2.4 // indirect
github.com/magiconair/properties v1.8.10 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
@ -279,19 +287,26 @@ require (
github.com/minio/selfupdate v0.6.1-0.20230907112617-f11e74f84ca7 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect
github.com/moby/sys/user v0.3.0 // indirect
github.com/moby/term v0.5.0 // indirect
github.com/moby/go-archive v0.1.0 // indirect
github.com/moby/patternmatcher v0.6.0 // indirect
github.com/moby/sys/sequential v0.6.0 // indirect
github.com/moby/sys/user v0.4.0 // indirect
github.com/moby/sys/userns v0.1.0 // indirect
github.com/moby/term v0.5.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/morikuni/aec v1.0.0 // indirect
github.com/muesli/reflow v0.3.0 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/nwaples/rardecode/v2 v2.1.0 // indirect
github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect
github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect
github.com/olekukonko/errors v1.1.0 // indirect
github.com/olekukonko/ll v0.0.9 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.0 // indirect
github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/opencontainers/runc v1.2.3 // indirect
github.com/openrdap/rdap v0.9.1 // indirect
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
@ -300,19 +315,18 @@ require (
github.com/pjbgf/sha1cd v0.3.2 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/projectdiscovery/asnmap v1.1.1 // indirect
github.com/projectdiscovery/blackrock v0.0.1 // indirect
github.com/projectdiscovery/cdncheck v1.1.15 // indirect
github.com/projectdiscovery/cdncheck v1.2.4 // indirect
github.com/projectdiscovery/freeport v0.0.7 // indirect
github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect
github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect
github.com/refraction-networking/utls v1.7.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect
github.com/refraction-networking/utls v1.7.1 // indirect
github.com/sashabaranov/go-openai v1.37.0 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/shirou/gopsutil/v4 v4.25.7 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.3.1 // indirect
@ -321,16 +335,16 @@ require (
github.com/tidwall/buntdb v1.3.1 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
github.com/tidwall/grect v0.1.4 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/match v1.2.0 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/rtred v0.1.2 // indirect
github.com/tidwall/tinyqueue v0.1.1 // indirect
github.com/tim-ywliu/nested-logrus-formatter v1.3.2 // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/tklauser/go-sysconf v0.3.15 // indirect
github.com/tklauser/numcpus v0.10.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect
github.com/ulikunitz/xz v0.5.12 // indirect
github.com/ulikunitz/xz v0.5.15 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
@ -343,34 +357,65 @@ require (
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
github.com/ysmood/fetchup v0.2.3 // indirect
github.com/ysmood/goob v0.4.0 // indirect
github.com/ysmood/got v0.40.0 // indirect
github.com/ysmood/gson v0.7.3 // indirect
github.com/ysmood/leakless v0.9.0 // indirect
github.com/yuin/goldmark v1.7.8 // indirect
github.com/yuin/goldmark v1.7.13 // indirect
github.com/yuin/goldmark-emoji v1.0.5 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/zcalusic/sysinfo v1.0.2 // indirect
github.com/zeebo/blake3 v0.2.3 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 // indirect
go.opentelemetry.io/otel v1.37.0 // indirect
go.opentelemetry.io/otel/metric v1.37.0 // indirect
go.opentelemetry.io/otel/trace v1.37.0 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/sync v0.17.0 // indirect
gopkg.in/djherbis/times.v1 v1.3.0 // indirect
mellium.im/sasl v0.3.2 // indirect
)
require (
github.com/dimchansky/utfbom v1.1.1 // indirect
github.com/goburrow/cache v0.1.4 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect
github.com/trivago/tgo v1.0.7
github.com/ysmood/goob v0.4.0 // indirect
github.com/ysmood/gson v0.7.3 // indirect
github.com/ysmood/leakless v0.9.0 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/zmap/rc2 v0.0.0-20190804163417-abaa70531248 // indirect
github.com/zmap/zcrypto v0.0.0-20240512203510-0fef58d9a9db // indirect
go.etcd.io/bbolt v1.3.10 // indirect
go.uber.org/zap v1.25.0 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
go.etcd.io/bbolt v1.4.0 // indirect
go.uber.org/zap v1.27.0 // indirect
goftp.io/server/v2 v2.0.1 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/mod v0.25.0 // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/crypto v0.42.0 // indirect
golang.org/x/exp v0.0.0-20250911091902-df9299821621
golang.org/x/mod v0.28.0 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.37.0
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect
gopkg.in/corvus-ch/zbase32.v1 v1.0.0 // indirect
gopkg.in/djherbis/times.v1 v1.3.0 // indirect
)
require (
github.com/alecthomas/chroma v0.10.0
github.com/go-echarts/go-echarts/v2 v2.6.0
gopkg.in/warnings.v0 v0.1.2 // indirect
mellium.im/sasl v0.3.2 // indirect
)
// https://go.dev/ref/mod#go-mod-file-retract
retract v3.2.0 // retract due to broken js protocol issue
// Fix genproto version conflicts
replace (
google.golang.org/genproto => google.golang.org/genproto v0.0.0-20240814211410-ddb44dafa142
google.golang.org/genproto/googleapis/api => google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142
google.golang.org/genproto/googleapis/rpc => google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1
)

1300
go.sum

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,38 @@
id: fuzz-body
info:
name: fuzzing error sqli payloads in http req body
author: pdteam
severity: info
description: |
This template attempts to find SQL injection vulnerabilities by fuzzing http body
It automatically handles and parses json,xml,multipart form and x-www-form-urlencoded data
and performs fuzzing on the value of every key
http:
- pre-condition:
- type: dsl
dsl:
- method != "GET"
- method != "HEAD"
condition: and
payloads:
injection:
- "'"
- "\""
- ";"
fuzzing:
- part: body
type: postfix
mode: single
fuzz:
- '{{injection}}'
stop-at-first-match: true
matchers:
- type: word
words:
- "unrecognized token:"
- "null"

View File

@ -0,0 +1,38 @@
id: vnc-password-test
info:
name: VNC Password Authentication Test
author: pdteam
severity: high
description: |
Tests VNC authentication with correct and incorrect passwords.
metadata:
shodan-query: product:"vnc"
tags: js,network,vnc,authentication
javascript:
- pre-condition: |
isPortOpen(Host,Port)
code: |
let vnc = require('nuclei/vnc');
let client = new vnc.VNCClient();
client.Connect(Host, Port, Password);
args:
Host: "{{Host}}"
Port: "5900"
Password: "{{passwords}}"
payloads:
passwords:
- ""
- root
- password
- admin
- mysecret
stop-at-first-match: true
matchers:
- type: dsl
dsl:
- "success == true"

View File

@ -19,7 +19,7 @@ import (
"github.com/projectdiscovery/retryablehttp-go"
pdcpauth "github.com/projectdiscovery/utils/auth/pdcp"
"github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
unitutils "github.com/projectdiscovery/utils/unit"
updateutils "github.com/projectdiscovery/utils/update"
urlutil "github.com/projectdiscovery/utils/url"
@ -55,10 +55,11 @@ type UploadWriter struct {
scanName string
counter atomic.Int32
TeamID string
Logger *gologger.Logger
}
// NewUploadWriter creates a new upload writer
func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*UploadWriter, error) {
func NewUploadWriter(ctx context.Context, logger *gologger.Logger, creds *pdcpauth.PDCPCredentials) (*UploadWriter, error) {
if creds == nil {
return nil, fmt.Errorf("no credentials provided")
}
@ -66,6 +67,7 @@ func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*Upl
creds: creds,
done: make(chan struct{}, 1),
TeamID: NoneTeamID,
Logger: logger,
}
var err error
reader, writer := io.Pipe()
@ -75,11 +77,11 @@ func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*Upl
output.WithJson(true, true),
)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create output writer")
return nil, errkit.Wrap(err, "could not create output writer")
}
tmp, err := urlutil.Parse(creds.Server)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not parse server url")
return nil, errkit.Wrap(err, "could not parse server url")
}
tmp.Path = uploadEndpoint
tmp.Update()
@ -128,8 +130,8 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// continuously read from the reader and send to channel
go func() {
defer func() {
_ = r.Close()
}()
_ = r.Close()
}()
defer close(ch)
for {
data, err := reader.ReadString('\n')
@ -147,9 +149,9 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
close(u.done)
// if no scanid is generated no results were uploaded
if u.scanID == "" {
gologger.Verbose().Msgf("Scan results upload to cloud skipped, no results found to upload")
u.Logger.Verbose().Msgf("Scan results upload to cloud skipped, no results found to upload")
} else {
gologger.Info().Msgf("%v Scan results uploaded to cloud, you can view scan results at %v", u.counter.Load(), getScanDashBoardURL(u.scanID, u.TeamID))
u.Logger.Info().Msgf("%v Scan results uploaded to cloud, you can view scan results at %v", u.counter.Load(), getScanDashBoardURL(u.scanID, u.TeamID))
}
}()
// temporary buffer to store the results
@ -162,7 +164,7 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// flush before exit
if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
}
}
return
@ -170,14 +172,14 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// flush the buffer
if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
}
}
case line, ok := <-ch:
if !ok {
if buff.Len() > 0 {
if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
}
}
return
@ -185,7 +187,7 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
if buff.Len()+len(line) > MaxChunkSize {
// flush existing buffer
if err := u.uploadChunk(buff); err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
u.Logger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
}
} else {
buff.WriteString(line)
@ -197,37 +199,37 @@ func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
// uploadChunk uploads a chunk of data to the server
func (u *UploadWriter) uploadChunk(buff *bytes.Buffer) error {
if err := u.upload(buff.Bytes()); err != nil {
return errorutil.NewWithErr(err).Msgf("could not upload chunk")
return errkit.Wrap(err, "could not upload chunk")
}
// if successful, reset the buffer
buff.Reset()
// log in verbose mode
gologger.Warning().Msgf("Uploaded results chunk, you can view scan results at %v", getScanDashBoardURL(u.scanID, u.TeamID))
u.Logger.Warning().Msgf("Uploaded results chunk, you can view scan results at %v", getScanDashBoardURL(u.scanID, u.TeamID))
return nil
}
func (u *UploadWriter) upload(data []byte) error {
req, err := u.getRequest(data)
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not create upload request")
return errkit.Wrap(err, "could not create upload request")
}
resp, err := u.client.Do(req)
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not upload results")
return errkit.Wrap(err, "could not upload results")
}
defer func() {
_ = resp.Body.Close()
}()
_ = resp.Body.Close()
}()
bin, err := io.ReadAll(resp.Body)
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not get id from response")
return errkit.Wrap(err, "could not get id from response")
}
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("could not upload results got status code %v on %v", resp.StatusCode, resp.Request.URL.String())
}
var uploadResp uploadResponse
if err := json.Unmarshal(bin, &uploadResp); err != nil {
return errorutil.NewWithErr(err).Msgf("could not unmarshal response got %v", string(bin))
return errkit.Wrap(err, fmt.Sprintf("could not unmarshal response got %v", string(bin)))
}
if uploadResp.ID != "" && u.scanID == "" {
u.scanID = uploadResp.ID
@ -252,7 +254,7 @@ func (u *UploadWriter) getRequest(bin []byte) (*retryablehttp.Request, error) {
}
req, err := retryablehttp.NewRequest(method, url, bytes.NewReader(bin))
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create cloud upload request")
return nil, errkit.Wrap(err, "could not create cloud upload request")
}
// add pdtm meta params
req.Params.Merge(updateutils.GetpdtmParams(config.Version))
@ -260,7 +262,7 @@ func (u *UploadWriter) getRequest(bin []byte) (*retryablehttp.Request, error) {
if u.scanName != "" && req.Path == uploadEndpoint {
req.Params.Add("name", u.scanName)
}
req.URL.Update()
req.Update()
req.Header.Set(pdcpauth.ApiKeyHeaderName, u.creds.APIKey)
if u.TeamID != NoneTeamID && u.TeamID != "" {

View File

@ -2,11 +2,11 @@ package runner
import (
"context"
"fmt"
"sync/atomic"
"time"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/hmap/store/hybrid"
"github.com/projectdiscovery/httpx/common/httpx"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
@ -28,7 +28,7 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
// currently http probing for input mode types is not supported
return hm, nil
}
gologger.Info().Msgf("Running httpx on input host")
r.Logger.Info().Msgf("Running httpx on input host")
httpxOptions := httpx.DefaultOptions
if r.options.AliveHttpProxy != "" {
@ -38,7 +38,13 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
}
httpxOptions.RetryMax = r.options.Retries
httpxOptions.Timeout = time.Duration(r.options.Timeout) * time.Second
httpxOptions.NetworkPolicy = protocolstate.NetworkPolicy
dialers := protocolstate.GetDialersWithId(r.options.ExecutionId)
if dialers == nil {
return nil, fmt.Errorf("dialers not initialized for %s", r.options.ExecutionId)
}
httpxOptions.NetworkPolicy = dialers.NetworkPolicy
httpxClient, err := httpx.New(&httpxOptions)
if err != nil {
return nil, errors.Wrap(err, "could not create httpx client")
@ -57,7 +63,7 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
if r.options.ProbeConcurrency > 0 && swg.Size != r.options.ProbeConcurrency {
if err := swg.Resize(context.Background(), r.options.ProbeConcurrency); err != nil {
gologger.Error().Msgf("Could not resize workpool: %s\n", err)
r.Logger.Error().Msgf("Could not resize workpool: %s\n", err)
}
}
@ -74,6 +80,6 @@ func (r *Runner) initializeTemplatesHTTPInput() (*hybrid.HybridMap, error) {
})
swg.Wait()
gologger.Info().Msgf("Found %d URL from httpx", count.Load())
r.Logger.Info().Msgf("Found %d URL from httpx", count.Load())
return hm, nil
}

View File

@ -17,22 +17,22 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/scan"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
type AuthLazyFetchOptions struct {
TemplateStore *loader.Store
ExecOpts protocols.ExecutorOptions
ExecOpts *protocols.ExecutorOptions
OnError func(error)
}
// GetAuthTmplStore create new loader for loading auth templates
func GetAuthTmplStore(opts types.Options, catalog catalog.Catalog, execOpts protocols.ExecutorOptions) (*loader.Store, error) {
func GetAuthTmplStore(opts *types.Options, catalog catalog.Catalog, execOpts *protocols.ExecutorOptions) (*loader.Store, error) {
tmpls := []string{}
for _, file := range opts.SecretsFile {
data, err := authx.GetTemplatePathsFromSecretFile(file)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("failed to get template paths from secrets file")
return nil, errkit.Wrap(err, "failed to get template paths from secrets file")
}
tmpls = append(tmpls, data...)
}
@ -54,11 +54,11 @@ func GetAuthTmplStore(opts types.Options, catalog catalog.Catalog, execOpts prot
opts.Protocols = nil
opts.ExcludeProtocols = nil
opts.IncludeConditions = nil
cfg := loader.NewConfig(&opts, catalog, execOpts)
cfg := loader.NewConfig(opts, catalog, execOpts)
cfg.StoreId = loader.AuthStoreId
store, err := loader.New(cfg)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("failed to initialize dynamic auth templates store")
return nil, errkit.Wrap(err, "failed to initialize dynamic auth templates store")
}
return store, nil
}

View File

@ -31,7 +31,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/utils/yaml"
fileutil "github.com/projectdiscovery/utils/file"
"github.com/projectdiscovery/utils/generic"
logutil "github.com/projectdiscovery/utils/log"
stringsutil "github.com/projectdiscovery/utils/strings"
)
@ -40,6 +39,8 @@ const (
DefaultDumpTrafficOutputFolder = "output"
)
var validateOptions = validator.New()
func ConfigureOptions() error {
// with FileStringSliceOptions, FileNormalizedStringSliceOptions, FileCommaSeparatedStringSliceOptions
// if file has the extension `.yaml` or `.json` we consider those as strings and not files to be read
@ -71,17 +72,17 @@ func ParseOptions(options *types.Options) {
vardump.Limit = options.VarDumpLimit
}
if options.ShowActions {
gologger.Info().Msgf("Showing available headless actions: ")
options.Logger.Info().Msgf("Showing available headless actions: ")
for action := range engine.ActionStringToAction {
gologger.Print().Msgf("\t%s", action)
options.Logger.Print().Msgf("\t%s", action)
}
os.Exit(0)
}
defaultProfilesPath := filepath.Join(config.DefaultConfig.GetTemplateDir(), "profiles")
if options.ListTemplateProfiles {
gologger.Print().Msgf(
"\nListing available %v nuclei template profiles for %v",
options.Logger.Print().Msgf(
"Listing available %v nuclei template profiles for %v",
config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory,
)
@ -93,23 +94,23 @@ func ParseOptions(options *types.Options) {
return nil
}
if profileRelPath, err := filepath.Rel(templatesRootDir, iterItem); err == nil {
gologger.Print().Msgf("%s (%s)\n", profileRelPath, strings.TrimSuffix(filepath.Base(iterItem), ext))
options.Logger.Print().Msgf("%s (%s)\n", profileRelPath, strings.TrimSuffix(filepath.Base(iterItem), ext))
}
return nil
})
if err != nil {
gologger.Error().Msgf("%s\n", err)
options.Logger.Error().Msgf("%s\n", err)
}
os.Exit(0)
}
if options.StoreResponseDir != DefaultDumpTrafficOutputFolder && !options.StoreResponse {
gologger.Debug().Msgf("Store response directory specified, enabling \"store-resp\" flag automatically\n")
options.Logger.Debug().Msgf("Store response directory specified, enabling \"store-resp\" flag automatically\n")
options.StoreResponse = true
}
// Validate the options passed by the user and if any
// invalid options have been used, exit.
if err := ValidateOptions(options); err != nil {
gologger.Fatal().Msgf("Program exiting: %s\n", err)
options.Logger.Fatal().Msgf("Program exiting: %s\n", err)
}
// Load the resolvers if user asked for them
@ -117,7 +118,7 @@ func ParseOptions(options *types.Options) {
err := protocolinit.Init(options)
if err != nil {
gologger.Fatal().Msgf("Could not initialize protocols: %s\n", err)
options.Logger.Fatal().Msgf("Could not initialize protocols: %s\n", err)
}
// Set GitHub token in env variable. runner.getGHClientWithToken() reads token from env
@ -139,8 +140,7 @@ func ParseOptions(options *types.Options) {
// validateOptions validates the configuration options passed
func ValidateOptions(options *types.Options) error {
validate := validator.New()
if err := validate.Struct(options); err != nil {
if err := validateOptions.Struct(options); err != nil {
if _, ok := err.(*validator.InvalidValidationError); ok {
return err
}
@ -169,7 +169,7 @@ func ValidateOptions(options *types.Options) error {
return err
}
if options.Validate {
validateTemplatePaths(config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows)
validateTemplatePaths(options.Logger, config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows)
}
if options.DAST {
if err := validateDASTOptions(options); err != nil {
@ -182,7 +182,7 @@ func ValidateOptions(options *types.Options) error {
if generic.EqualsAny("", options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile) {
return errors.New("if a client certification option is provided, then all three must be provided")
}
validateCertificatePaths(options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile)
validateCertificatePaths(options.Logger, options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile)
}
// Verify AWS secrets are passed if a S3 template bucket is passed
if options.AwsBucketName != "" && options.UpdateTemplates && !options.AwsTemplateDisableDownload {
@ -305,8 +305,8 @@ func createReportingOptions(options *types.Options) (*reporting.Options, error)
return nil, errors.Wrap(err, "could not open reporting config file")
}
defer func() {
_ = file.Close()
}()
_ = file.Close()
}()
if err := yaml.DecodeAndValidate(file, reportingOptions); err != nil {
return nil, errors.Wrap(err, "could not parse reporting config file")
@ -344,32 +344,33 @@ func createReportingOptions(options *types.Options) (*reporting.Options, error)
}
reportingOptions.OmitRaw = options.OmitRawRequests
reportingOptions.ExecutionId = options.ExecutionId
return reportingOptions, nil
}
// configureOutput configures the output logging levels to be displayed on the screen
func configureOutput(options *types.Options) {
if options.NoColor {
gologger.DefaultLogger.SetFormatter(formatter.NewCLI(true))
options.Logger.SetFormatter(formatter.NewCLI(true))
}
// If the user desires verbose output, show verbose output
if options.Debug || options.DebugRequests || options.DebugResponse {
gologger.DefaultLogger.SetMaxLevel(levels.LevelDebug)
options.Logger.SetMaxLevel(levels.LevelDebug)
}
// Debug takes precedence before verbose
// because debug is a lower logging level.
if options.Verbose || options.Validate {
gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose)
options.Logger.SetMaxLevel(levels.LevelVerbose)
}
if options.NoColor {
gologger.DefaultLogger.SetFormatter(formatter.NewCLI(true))
options.Logger.SetFormatter(formatter.NewCLI(true))
}
if options.Silent {
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
options.Logger.SetMaxLevel(levels.LevelSilent)
}
// disable standard logger (ref: https://github.com/golang/go/issues/19895)
logutil.DisableDefaultLogger()
// logutil.DisableDefaultLogger()
}
// loadResolvers loads resolvers from both user-provided flags and file
@ -380,11 +381,11 @@ func loadResolvers(options *types.Options) {
file, err := os.Open(options.ResolversFile)
if err != nil {
gologger.Fatal().Msgf("Could not open resolvers file: %s\n", err)
options.Logger.Fatal().Msgf("Could not open resolvers file: %s\n", err)
}
defer func() {
_ = file.Close()
}()
_ = file.Close()
}()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
@ -400,7 +401,7 @@ func loadResolvers(options *types.Options) {
}
}
func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPaths []string) {
func validateTemplatePaths(logger *gologger.Logger, templatesDirectory string, templatePaths, workflowPaths []string) {
allGivenTemplatePaths := append(templatePaths, workflowPaths...)
for _, templatePath := range allGivenTemplatePaths {
if templatesDirectory != templatePath && filepath.IsAbs(templatePath) {
@ -408,7 +409,7 @@ func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPat
if err == nil && fileInfo.IsDir() {
relativizedPath, err2 := filepath.Rel(templatesDirectory, templatePath)
if err2 != nil || (len(relativizedPath) >= 2 && relativizedPath[:2] == "..") {
gologger.Warning().Msgf("The given path (%s) is outside the default template directory path (%s)! "+
logger.Warning().Msgf("The given path (%s) is outside the default template directory path (%s)! "+
"Referenced sub-templates with relative paths in workflows will be resolved against the default template directory.", templatePath, templatesDirectory)
break
}
@ -417,12 +418,12 @@ func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPat
}
}
func validateCertificatePaths(certificatePaths ...string) {
func validateCertificatePaths(logger *gologger.Logger, certificatePaths ...string) {
for _, certificatePath := range certificatePaths {
if !fileutil.FileExists(certificatePath) {
// The provided path to the PEM certificate does not exist for the client authentication. As this is
// required for successful authentication, log and return an error
gologger.Fatal().Msgf("The given path (%s) to the certificate does not exist!", certificatePath)
logger.Fatal().Msgf("The given path (%s) to the certificate does not exist!", certificatePath)
break
}
}
@ -449,7 +450,7 @@ func readEnvInputVars(options *types.Options) {
// Attempt to convert the repo ID to an integer
repoIDInt, err := strconv.Atoi(repoID)
if err != nil {
gologger.Warning().Msgf("Invalid GitLab template repository ID: %s", repoID)
options.Logger.Warning().Msgf("Invalid GitLab template repository ID: %s", repoID)
continue
}

View File

@ -7,9 +7,8 @@ import (
"os"
"strings"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file"
proxyutils "github.com/projectdiscovery/utils/proxy"
)
@ -31,8 +30,8 @@ func loadProxyServers(options *types.Options) error {
return fmt.Errorf("could not open proxy file: %w", err)
}
defer func() {
_ = file.Close()
}()
_ = file.Close()
}()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
proxy := scanner.Text()
@ -51,18 +50,18 @@ func loadProxyServers(options *types.Options) error {
}
proxyURL, err := url.Parse(aliveProxy)
if err != nil {
return errorutil.WrapfWithNil(err, "failed to parse proxy got %v", err)
return errkit.Wrapf(err, "failed to parse proxy got %v", err)
}
if options.ProxyInternal {
_ = os.Setenv(HTTP_PROXY_ENV, proxyURL.String())
}
switch proxyURL.Scheme {
case proxyutils.HTTP, proxyutils.HTTPS:
gologger.Verbose().Msgf("Using %s as proxy server", proxyURL.String())
options.Logger.Verbose().Msgf("Using %s as proxy server", proxyURL.String())
options.AliveHttpProxy = proxyURL.String()
case proxyutils.SOCKS5:
options.AliveSocksProxy = proxyURL.String()
gologger.Verbose().Msgf("Using %s as socket proxy server", proxyURL.String())
options.Logger.Verbose().Msgf("Using %s as socket proxy server", proxyURL.String())
}
return nil
}

View File

@ -10,6 +10,7 @@ import (
"sync/atomic"
"time"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/nuclei/v3/internal/server"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
@ -32,7 +33,6 @@ import (
"github.com/pkg/errors"
"github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/colorizer"
"github.com/projectdiscovery/nuclei/v3/internal/httpapi"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
@ -95,6 +95,7 @@ type Runner struct {
inputProvider provider.InputProvider
fuzzFrequencyCache *frequency.Tracker
httpStats *outputstats.Tracker
Logger *gologger.Logger
//general purpose temporary directory
tmpDir string
@ -108,10 +109,11 @@ type Runner struct {
func New(options *types.Options) (*Runner, error) {
runner := &Runner{
options: options,
Logger: options.Logger,
}
if options.HealthCheck {
gologger.Print().Msgf("%s\n", DoHealthCheck(options))
runner.Logger.Print().Msgf("%s\n", DoHealthCheck(options))
os.Exit(0)
}
@ -119,14 +121,22 @@ func New(options *types.Options) (*Runner, error) {
if config.DefaultConfig.CanCheckForUpdates() {
if err := installer.NucleiVersionCheck(); err != nil {
if options.Verbose || options.Debug {
gologger.Error().Msgf("nuclei version check failed got: %s\n", err)
runner.Logger.Error().Msgf("nuclei version check failed got: %s\n", err)
}
}
// if template list or template display is enabled, enable all templates
if options.TemplateList || options.TemplateDisplay {
options.EnableCodeTemplates = true
options.EnableFileTemplates = true
options.EnableSelfContainedTemplates = true
options.EnableGlobalMatchersTemplates = true
}
// check for custom template updates and update if available
ctm, err := customtemplates.NewCustomTemplatesManager(options)
if err != nil {
gologger.Error().Label("custom-templates").Msgf("Failed to create custom templates manager: %s\n", err)
runner.Logger.Error().Label("custom-templates").Msgf("Failed to create custom templates manager: %s\n", err)
}
// Check for template updates and update if available.
@ -136,15 +146,15 @@ func New(options *types.Options) (*Runner, error) {
DisablePublicTemplates: options.PublicTemplateDisableDownload,
}
if err := tm.FreshInstallIfNotExists(); err != nil {
gologger.Warning().Msgf("failed to install nuclei templates: %s\n", err)
runner.Logger.Warning().Msgf("failed to install nuclei templates: %s\n", err)
}
if err := tm.UpdateIfOutdated(); err != nil {
gologger.Warning().Msgf("failed to update nuclei templates: %s\n", err)
runner.Logger.Warning().Msgf("failed to update nuclei templates: %s\n", err)
}
if config.DefaultConfig.NeedsIgnoreFileUpdate() {
if err := installer.UpdateIgnoreFile(); err != nil {
gologger.Warning().Msgf("failed to update nuclei ignore file: %s\n", err)
runner.Logger.Warning().Msgf("failed to update nuclei ignore file: %s\n", err)
}
}
@ -152,7 +162,7 @@ func New(options *types.Options) (*Runner, error) {
// we automatically check for updates unless explicitly disabled
// this print statement is only to inform the user that there are no updates
if !config.DefaultConfig.NeedsTemplateUpdate() {
gologger.Info().Msgf("No new updates found for nuclei templates")
runner.Logger.Info().Msgf("No new updates found for nuclei templates")
}
// manually trigger update of custom templates
if ctm != nil {
@ -161,20 +171,25 @@ func New(options *types.Options) (*Runner, error) {
}
}
parser := templates.NewParser()
if options.Validate {
parser.ShouldValidate = true
if op, ok := options.Parser.(*templates.Parser); ok {
// Enable passing in an existing parser instance
// This uses a type assertion to avoid an import loop
runner.parser = op
} else {
parser := templates.NewParser()
if options.Validate {
parser.ShouldValidate = true
}
// TODO: refactor to pass options reference globally without cycles
parser.NoStrictSyntax = options.NoStrictSyntax
runner.parser = parser
}
// TODO: refactor to pass options reference globally without cycles
parser.NoStrictSyntax = options.NoStrictSyntax
runner.parser = parser
yaml.StrictSyntax = !options.NoStrictSyntax
if options.Headless {
if engine.MustDisableSandbox() {
gologger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n")
runner.Logger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n")
}
browser, err := engine.New(options)
if err != nil {
@ -226,11 +241,11 @@ func New(options *types.Options) (*Runner, error) {
if options.HttpApiEndpoint != "" {
apiServer := httpapi.New(options.HttpApiEndpoint, options)
gologger.Info().Msgf("Listening api endpoint on: %s", options.HttpApiEndpoint)
runner.Logger.Info().Msgf("Listening api endpoint on: %s", options.HttpApiEndpoint)
runner.httpApiEndpoint = apiServer
go func() {
if err := apiServer.Start(); err != nil {
gologger.Error().Msgf("Failed to start API server: %s", err)
runner.Logger.Error().Msgf("Failed to start API server: %s", err)
}
}()
}
@ -284,7 +299,7 @@ func New(options *types.Options) (*Runner, error) {
// create the resume configuration structure
resumeCfg := types.NewResumeCfg()
if runner.options.ShouldLoadResume() {
gologger.Info().Msg("Resuming from save checkpoint")
runner.Logger.Info().Msg("Resuming from save checkpoint")
file, err := os.ReadFile(runner.options.Resume)
if err != nil {
return nil, err
@ -326,6 +341,7 @@ func New(options *types.Options) (*Runner, error) {
}
opts := interactsh.DefaultOptions(runner.output, runner.issuesClient, runner.progress)
opts.Logger = runner.Logger
opts.Debug = runner.options.Debug
opts.NoColor = runner.options.NoColor
if options.InteractshURL != "" {
@ -355,24 +371,20 @@ func New(options *types.Options) (*Runner, error) {
}
interactshClient, err := interactsh.New(opts)
if err != nil {
gologger.Error().Msgf("Could not create interactsh client: %s", err)
runner.Logger.Error().Msgf("Could not create interactsh client: %s", err)
} else {
runner.interactsh = interactshClient
}
if options.RateLimitMinute > 0 {
gologger.Print().Msgf("[%v] %v", aurora.BrightYellow("WRN"), "rate limit per minute is deprecated - use rate-limit-duration")
runner.Logger.Print().Msgf("[%v] %v", aurora.BrightYellow("WRN"), "rate limit per minute is deprecated - use rate-limit-duration")
options.RateLimit = options.RateLimitMinute
options.RateLimitDuration = time.Minute
}
if options.RateLimit > 0 && options.RateLimitDuration == 0 {
options.RateLimitDuration = time.Second
}
if options.RateLimit == 0 && options.RateLimitDuration == 0 {
runner.rateLimiter = ratelimit.NewUnlimited(context.Background())
} else {
runner.rateLimiter = ratelimit.New(context.Background(), uint(options.RateLimit), options.RateLimitDuration)
}
runner.rateLimiter = utils.GetRateLimiter(context.Background(), options.RateLimit, options.RateLimitDuration)
if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil {
runner.tmpDir = tmpDir
@ -382,7 +394,7 @@ func New(options *types.Options) (*Runner, error) {
}
// runStandardEnumeration runs standard enumeration
func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
func (r *Runner) runStandardEnumeration(executerOpts *protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
if r.options.AutomaticScan {
return r.executeSmartWorkflowInput(executerOpts, store, engine)
}
@ -413,7 +425,7 @@ func (r *Runner) Close() {
if r.inputProvider != nil {
r.inputProvider.Close()
}
protocolinit.Close()
protocolinit.Close(r.options.ExecutionId)
if r.pprofServer != nil {
r.pprofServer.Stop()
}
@ -440,22 +452,21 @@ func (r *Runner) setupPDCPUpload(writer output.Writer) output.Writer {
r.options.EnableCloudUpload = true
}
if !r.options.EnableCloudUpload && !EnableCloudUpload {
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] Scan results upload to cloud is disabled.", r.colorizer.BrightYellow("WRN"))
r.pdcpUploadErrMsg = "Scan results upload to cloud is disabled."
return writer
}
color := aurora.NewAurora(!r.options.NoColor)
h := &pdcpauth.PDCPCredHandler{}
creds, err := h.GetCreds()
if err != nil {
if err != pdcpauth.ErrNoCreds && !HideAutoSaveMsg {
gologger.Verbose().Msgf("Could not get credentials for cloud upload: %s\n", err)
r.Logger.Verbose().Msgf("Could not get credentials for cloud upload: %s\n", err)
}
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] To view results on Cloud Dashboard, Configure API key from %v", color.BrightYellow("WRN"), pdcpauth.DashBoardURL)
r.pdcpUploadErrMsg = fmt.Sprintf("To view results on Cloud Dashboard, configure API key from %v", pdcpauth.DashBoardURL)
return writer
}
uploadWriter, err := pdcp.NewUploadWriter(context.Background(), creds)
uploadWriter, err := pdcp.NewUploadWriter(context.Background(), r.Logger, creds)
if err != nil {
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] PDCP (%v) Auto-Save Failed: %s\n", color.BrightYellow("WRN"), pdcpauth.DashBoardURL, err)
r.pdcpUploadErrMsg = fmt.Sprintf("PDCP (%v) Auto-Save Failed: %s\n", pdcpauth.DashBoardURL, err)
return writer
}
if r.options.ScanID != "" {
@ -491,6 +502,7 @@ func (r *Runner) RunEnumeration() error {
Parser: r.parser,
TemporaryDirectory: r.tmpDir,
FuzzStatsDB: r.fuzzStats,
Logger: r.Logger,
}
dastServer, err := server.New(&server.Options{
Address: r.options.DASTServerAddress,
@ -532,7 +544,7 @@ func (r *Runner) RunEnumeration() error {
// Create the executor options which will be used throughout the execution
// stage by the nuclei engine modules.
executorOpts := protocols.ExecutorOptions{
executorOpts := &protocols.ExecutorOptions{
Output: r.output,
Options: r.options,
Progress: r.progress,
@ -550,6 +562,8 @@ func (r *Runner) RunEnumeration() error {
Parser: r.parser,
FuzzParamsFrequency: fuzzFreqCache,
GlobalMatchers: globalmatchers.New(),
DoNotCache: r.options.DoNotCacheTemplates,
Logger: r.Logger,
}
if config.DefaultConfig.IsDebugArgEnabled(config.DebugExportURLPattern) {
@ -558,7 +572,7 @@ func (r *Runner) RunEnumeration() error {
}
if len(r.options.SecretsFile) > 0 && !r.options.Validate {
authTmplStore, err := GetAuthTmplStore(*r.options, r.catalog, executorOpts)
authTmplStore, err := GetAuthTmplStore(r.options, r.catalog, executorOpts)
if err != nil {
return errors.Wrap(err, "failed to load dynamic auth templates")
}
@ -578,8 +592,8 @@ func (r *Runner) RunEnumeration() error {
if r.options.ShouldUseHostError() {
maxHostError := r.options.MaxHostError
if r.options.TemplateThreads > maxHostError {
gologger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", r.colorizer.BrightYellow("WRN"))
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", r.options.TemplateThreads)
r.Logger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", r.colorizer.BrightYellow("WRN"))
r.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", r.options.TemplateThreads)
maxHostError = r.options.TemplateThreads
}
@ -594,7 +608,7 @@ func (r *Runner) RunEnumeration() error {
executorEngine := core.New(r.options)
executorEngine.SetExecuterOptions(executorOpts)
workflowLoader, err := parsers.NewLoader(&executorOpts)
workflowLoader, err := parsers.NewLoader(executorOpts)
if err != nil {
return errors.Wrap(err, "Could not create loader.")
}
@ -633,7 +647,7 @@ func (r *Runner) RunEnumeration() error {
return err
}
if stats.GetValue(templates.SyntaxErrorStats) == 0 && stats.GetValue(templates.SyntaxWarningStats) == 0 && stats.GetValue(templates.RuntimeWarningsStats) == 0 {
gologger.Info().Msgf("All templates validated successfully\n")
r.Logger.Info().Msgf("All templates validated successfully")
} else {
return errors.New("encountered errors while performing template validation")
}
@ -655,7 +669,7 @@ func (r *Runner) RunEnumeration() error {
}
ret := uncover.GetUncoverTargetsFromMetadata(context.TODO(), store.Templates(), r.options.UncoverField, uncoverOpts)
for host := range ret {
_ = r.inputProvider.SetWithExclusions(host)
_ = r.inputProvider.SetWithExclusions(r.options.ExecutionId, host)
}
}
// display execution info like version , templates used etc
@ -663,7 +677,7 @@ func (r *Runner) RunEnumeration() error {
// prefetch secrets if enabled
if executorOpts.AuthProvider != nil && r.options.PreFetchSecrets {
gologger.Info().Msgf("Pre-fetching secrets from authprovider[s]")
r.Logger.Info().Msgf("Pre-fetching secrets from authprovider[s]")
if err := executorOpts.AuthProvider.PreFetchSecrets(); err != nil {
return errors.Wrap(err, "could not pre-fetch secrets")
}
@ -697,7 +711,7 @@ func (r *Runner) RunEnumeration() error {
if r.dastServer != nil {
go func() {
if err := r.dastServer.Start(); err != nil {
gologger.Error().Msgf("could not start dast server: %v", err)
r.Logger.Error().Msgf("could not start dast server: %v", err)
}
}()
}
@ -731,10 +745,10 @@ func (r *Runner) RunEnumeration() error {
// todo: error propagation without canonical straight error check is required by cloud?
// use safe dereferencing to avoid potential panics in case of previous unchecked errors
if v := ptrutil.Safe(results); !v.Load() {
gologger.Info().Msgf("Scan completed in %s. No results found.", shortDur(timeTaken))
r.Logger.Info().Msgf("Scan completed in %s. No results found.", shortDur(timeTaken))
} else {
matchCount := r.output.ResultCount()
gologger.Info().Msgf("Scan completed in %s. %d matches found.", shortDur(timeTaken), matchCount)
r.Logger.Info().Msgf("Scan completed in %s. %d matches found.", shortDur(timeTaken), matchCount)
}
// check if a passive scan was requested but no target was provided
@ -775,7 +789,7 @@ func (r *Runner) isInputNonHTTP() bool {
return nonURLInput
}
func (r *Runner) executeSmartWorkflowInput(executorOpts protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
func (r *Runner) executeSmartWorkflowInput(executorOpts *protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
r.progress.Init(r.inputProvider.Count(), 0, 0)
service, err := automaticscan.New(automaticscan.Options{
@ -843,7 +857,7 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
if tmplCount == 0 && workflowCount == 0 {
// if dast flag is used print explicit warning
if r.options.DAST {
gologger.DefaultLogger.Print().Msgf("[%v] No DAST templates found", aurora.BrightYellow("WRN"))
r.Logger.Print().Msgf("[%v] No DAST templates found", aurora.BrightYellow("WRN"))
}
stats.ForceDisplayWarning(templates.SkippedCodeTmplTamperedStats)
} else {
@ -867,34 +881,34 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
gologger.Info().Msg(versionInfo(cfg.TemplateVersion, cfg.LatestNucleiTemplatesVersion, "nuclei-templates"))
if !HideAutoSaveMsg {
if r.pdcpUploadErrMsg != "" {
gologger.Print().Msgf("%s", r.pdcpUploadErrMsg)
r.Logger.Warning().Msgf("%s", r.pdcpUploadErrMsg)
} else {
gologger.Info().Msgf("To view results on cloud dashboard, visit %v/scans upon scan completion.", pdcpauth.DashBoardURL)
r.Logger.Info().Msgf("To view results on cloud dashboard, visit %v/scans upon scan completion.", pdcpauth.DashBoardURL)
}
}
if tmplCount > 0 || workflowCount > 0 {
if len(store.Templates()) > 0 {
gologger.Info().Msgf("New templates added in latest release: %d", len(config.DefaultConfig.GetNewAdditions()))
gologger.Info().Msgf("Templates loaded for current scan: %d", len(store.Templates()))
r.Logger.Info().Msgf("New templates added in latest release: %d", len(config.DefaultConfig.GetNewAdditions()))
r.Logger.Info().Msgf("Templates loaded for current scan: %d", len(store.Templates()))
}
if len(store.Workflows()) > 0 {
gologger.Info().Msgf("Workflows loaded for current scan: %d", len(store.Workflows()))
r.Logger.Info().Msgf("Workflows loaded for current scan: %d", len(store.Workflows()))
}
for k, v := range templates.SignatureStats {
value := v.Load()
if value > 0 {
if k == templates.Unsigned && !r.options.Silent && !config.DefaultConfig.HideTemplateSigWarning {
gologger.Print().Msgf("[%v] Loading %d unsigned templates for scan. Use with caution.", r.colorizer.BrightYellow("WRN"), value)
r.Logger.Print().Msgf("[%v] Loading %d unsigned templates for scan. Use with caution.", r.colorizer.BrightYellow("WRN"), value)
} else {
gologger.Info().Msgf("Executing %d signed templates from %s", value, k)
r.Logger.Info().Msgf("Executing %d signed templates from %s", value, k)
}
}
}
}
if r.inputProvider.Count() > 0 {
gologger.Info().Msgf("Targets loaded for current scan: %d", r.inputProvider.Count())
r.Logger.Info().Msgf("Targets loaded for current scan: %d", r.inputProvider.Count())
}
}
@ -921,7 +935,7 @@ func UploadResultsToCloud(options *types.Options) error {
return errors.Wrap(err, "could not get credentials for cloud upload")
}
ctx := context.TODO()
uploadWriter, err := pdcp.NewUploadWriter(ctx, creds)
uploadWriter, err := pdcp.NewUploadWriter(ctx, options.Logger, creds)
if err != nil {
return errors.Wrap(err, "could not create upload writer")
}
@ -941,20 +955,20 @@ func UploadResultsToCloud(options *types.Options) error {
return errors.Wrap(err, "could not open scan upload file")
}
defer func() {
_ = file.Close()
}()
_ = file.Close()
}()
gologger.Info().Msgf("Uploading scan results to cloud dashboard from %s", options.ScanUploadFile)
options.Logger.Info().Msgf("Uploading scan results to cloud dashboard from %s", options.ScanUploadFile)
dec := json.NewDecoder(file)
for dec.More() {
var r output.ResultEvent
err := dec.Decode(&r)
if err != nil {
gologger.Warning().Msgf("Could not decode jsonl: %s\n", err)
options.Logger.Warning().Msgf("Could not decode jsonl: %s\n", err)
continue
}
if err = uploadWriter.Write(&r); err != nil {
gologger.Warning().Msgf("[%s] failed to upload: %s\n", r.TemplateID, err)
options.Logger.Warning().Msgf("[%s] failed to upload: %s\n", r.TemplateID, err)
}
}
uploadWriter.Close()

View File

@ -12,7 +12,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
)
@ -25,7 +24,7 @@ func (r *Runner) logAvailableTemplate(tplPath string) {
panic("not a template")
}
if err != nil {
gologger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
r.Logger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
} else {
r.verboseTemplate(tpl)
}
@ -33,14 +32,14 @@ func (r *Runner) logAvailableTemplate(tplPath string) {
// log available templates for verbose (-vv)
func (r *Runner) verboseTemplate(tpl *templates.Template) {
gologger.Print().Msgf("%s\n", templates.TemplateLogMessage(tpl.ID,
r.Logger.Print().Msgf("%s\n", templates.TemplateLogMessage(tpl.ID,
types.ToString(tpl.Info.Name),
tpl.Info.Authors.ToSlice(),
tpl.Info.SeverityHolder.Severity))
}
func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
gologger.Print().Msgf(
r.Logger.Print().Msgf(
"\nListing available %v nuclei templates for %v",
config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory,
@ -52,20 +51,20 @@ func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
path := tpl.Path
tplBody, err := store.ReadTemplateFromURI(path, true)
if err != nil {
gologger.Error().Msgf("Could not read the template %s: %s", path, err)
r.Logger.Error().Msgf("Could not read the template %s: %s", path, err)
continue
}
if colorize {
path = aurora.Cyan(tpl.Path).String()
tplBody, err = r.highlightTemplate(&tplBody)
if err != nil {
gologger.Error().Msgf("Could not highlight the template %s: %s", tpl.Path, err)
r.Logger.Error().Msgf("Could not highlight the template %s: %s", tpl.Path, err)
continue
}
}
gologger.Silent().Msgf("Template: %s\n\n%s", path, tplBody)
r.Logger.Print().Msgf("Template: %s\n\n%s", path, tplBody)
} else {
gologger.Silent().Msgf("%s\n", strings.TrimPrefix(tpl.Path, config.DefaultConfig.TemplatesDirectory+string(filepath.Separator)))
r.Logger.Print().Msgf("%s\n", strings.TrimPrefix(tpl.Path, config.DefaultConfig.TemplatesDirectory+string(filepath.Separator)))
}
} else {
r.verboseTemplate(tpl)
@ -74,7 +73,7 @@ func (r *Runner) listAvailableStoreTemplates(store *loader.Store) {
}
func (r *Runner) listAvailableStoreTags(store *loader.Store) {
gologger.Print().Msgf(
r.Logger.Print().Msgf(
"\nListing available %v nuclei tags for %v",
config.DefaultConfig.TemplateVersion,
config.DefaultConfig.TemplatesDirectory,
@ -100,9 +99,9 @@ func (r *Runner) listAvailableStoreTags(store *loader.Store) {
for _, tag := range tagsList {
if r.options.JSONL {
marshalled, _ := jsoniter.Marshal(tag)
gologger.Silent().Msgf("%s\n", string(marshalled))
r.Logger.Debug().Msgf("%s", string(marshalled))
} else {
gologger.Silent().Msgf("%s (%d)\n", tag.Key, tag.Value)
r.Logger.Debug().Msgf("%s (%d)", tag.Key, tag.Value)
}
}
}

View File

@ -41,7 +41,7 @@ type nucleiExecutor struct {
engine *core.Engine
store *loader.Store
options *NucleiExecutorOptions
executorOpts protocols.ExecutorOptions
executorOpts *protocols.ExecutorOptions
}
type NucleiExecutorOptions struct {
@ -58,6 +58,7 @@ type NucleiExecutorOptions struct {
Colorizer aurora.Aurora
Parser parser.Parser
TemporaryDirectory string
Logger *gologger.Logger
}
func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
@ -66,7 +67,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
// Create the executor options which will be used throughout the execution
// stage by the nuclei engine modules.
executorOpts := protocols.ExecutorOptions{
executorOpts := &protocols.ExecutorOptions{
Output: opts.Output,
Options: opts.Options,
Progress: opts.Progress,
@ -85,6 +86,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
FuzzParamsFrequency: fuzzFreqCache,
GlobalMatchers: globalmatchers.New(),
FuzzStatsDB: opts.FuzzStatsDB,
Logger: opts.Logger,
}
if opts.Options.ShouldUseHostError() {
@ -93,7 +95,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
maxHostError = 100 // auto adjust for fuzzings
}
if opts.Options.TemplateThreads > maxHostError {
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads)
opts.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads)
maxHostError = opts.Options.TemplateThreads
}
@ -107,7 +109,7 @@ func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
executorEngine := core.New(opts.Options)
executorEngine.SetExecuterOptions(executorOpts)
workflowLoader, err := parsers.NewLoader(&executorOpts)
workflowLoader, err := parsers.NewLoader(executorOpts)
if err != nil {
return nil, errors.Wrap(err, "Could not create loader options.")
}

View File

@ -112,7 +112,7 @@ func New(options *Options) (*DASTServer, error) {
func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) {
server := &DASTServer{
nucleiExecutor: &nucleiExecutor{
executorOpts: protocols.ExecutorOptions{
executorOpts: &protocols.ExecutorOptions{
FuzzStatsDB: fuzzStatsDB,
},
},

View File

@ -7,7 +7,8 @@ import (
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/utils/errkit"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
@ -19,6 +20,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/utils/vardump"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
pkgtypes "github.com/projectdiscovery/nuclei/v3/pkg/types"
)
// TemplateSources contains template sources
@ -101,7 +103,7 @@ type InteractshOpts interactsh.Options
func WithInteractshOptions(opts InteractshOpts) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithInteractshOptions")
return errkit.Wrap(ErrOptionsNotSupported, "WithInteractshOptions")
}
optsPtr := &opts
e.interactshOpts = (*interactsh.Options)(optsPtr)
@ -179,7 +181,7 @@ func WithGlobalRateLimitCtx(ctx context.Context, maxTokens int, duration time.Du
return func(e *NucleiEngine) error {
e.opts.RateLimit = maxTokens
e.opts.RateLimitDuration = duration
e.rateLimiter = ratelimit.New(ctx, uint(e.opts.RateLimit), e.opts.RateLimitDuration)
e.rateLimiter = utils.GetRateLimiter(ctx, e.opts.RateLimit, e.opts.RateLimitDuration)
return nil
}
}
@ -205,7 +207,7 @@ func EnableHeadlessWithOpts(hopts *HeadlessOpts) NucleiSDKOptions {
e.opts.UseInstalledChrome = hopts.UseChrome
}
if engine.MustDisableSandbox() {
gologger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n")
e.Logger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox")
}
browser, err := engine.New(e.opts)
if err != nil {
@ -228,7 +230,7 @@ type StatsOptions struct {
func EnableStatsWithOpts(opts StatsOptions) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("EnableStatsWithOpts")
return errkit.Wrap(ErrOptionsNotSupported, "EnableStatsWithOpts")
}
if opts.Interval == 0 {
opts.Interval = 5 //sec
@ -256,7 +258,7 @@ type VerbosityOptions struct {
func WithVerbosity(opts VerbosityOptions) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithVerbosity")
return errkit.Wrap(ErrOptionsNotSupported, "WithVerbosity")
}
e.opts.Verbose = opts.Verbose
e.opts.Silent = opts.Silent
@ -289,15 +291,15 @@ type NetworkConfig struct {
func WithNetworkConfig(opts NetworkConfig) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithNetworkConfig")
return errkit.Wrap(ErrOptionsNotSupported, "WithNetworkConfig")
}
e.opts.NoHostErrors = opts.DisableMaxHostErr
e.opts.MaxHostError = opts.MaxHostError
if e.opts.ShouldUseHostError() {
maxHostError := opts.MaxHostError
if e.opts.TemplateThreads > maxHostError {
gologger.Print().Msgf("[%v] The concurrency value is higher than max-host-error", e.executerOpts.Colorizer.BrightYellow("WRN"))
gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", e.opts.TemplateThreads)
e.Logger.Warning().Msg("The concurrency value is higher than max-host-error")
e.Logger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", e.opts.TemplateThreads)
maxHostError = e.opts.TemplateThreads
e.opts.MaxHostError = maxHostError
}
@ -320,7 +322,7 @@ func WithNetworkConfig(opts NetworkConfig) NucleiSDKOptions {
func WithProxy(proxy []string, proxyInternalRequests bool) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithProxy")
return errkit.Wrap(ErrOptionsNotSupported, "WithProxy")
}
e.opts.Proxy = proxy
e.opts.ProxyInternal = proxyInternalRequests
@ -345,7 +347,7 @@ type OutputWriter output.Writer
func UseOutputWriter(writer OutputWriter) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("UseOutputWriter")
return errkit.Wrap(ErrOptionsNotSupported, "UseOutputWriter")
}
e.customWriter = writer
return nil
@ -360,7 +362,7 @@ type StatsWriter progress.Progress
func UseStatsWriter(writer StatsWriter) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("UseStatsWriter")
return errkit.Wrap(ErrOptionsNotSupported, "UseStatsWriter")
}
e.customProgress = writer
return nil
@ -374,7 +376,7 @@ func UseStatsWriter(writer StatsWriter) NucleiSDKOptions {
func WithTemplateUpdateCallback(disableTemplatesAutoUpgrade bool, callback func(newVersion string)) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithTemplateUpdateCallback")
return errkit.Wrap(ErrOptionsNotSupported, "WithTemplateUpdateCallback")
}
e.disableTemplatesAutoUpgrade = disableTemplatesAutoUpgrade
e.onUpdateAvailableCallback = callback
@ -386,7 +388,7 @@ func WithTemplateUpdateCallback(disableTemplatesAutoUpgrade bool, callback func(
func WithSandboxOptions(allowLocalFileAccess bool, restrictLocalNetworkAccess bool) NucleiSDKOptions {
return func(e *NucleiEngine) error {
if e.mode == threadSafe {
return ErrOptionsNotSupported.Msgf("WithSandboxOptions")
return errkit.Wrap(ErrOptionsNotSupported, "WithSandboxOptions")
}
e.opts.AllowLocalFileAccess = allowLocalFileAccess
e.opts.RestrictLocalNetworkAccess = restrictLocalNetworkAccess
@ -419,6 +421,14 @@ func EnableGlobalMatchersTemplates() NucleiSDKOptions {
}
}
// DisableTemplateCache disables template caching
func DisableTemplateCache() NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts.DoNotCacheTemplates = true
return nil
}
}
// EnableFileTemplates allows loading/executing file protocol templates
func EnableFileTemplates() NucleiSDKOptions {
return func(e *NucleiEngine) error {
@ -527,3 +537,25 @@ func WithResumeFile(file string) NucleiSDKOptions {
return nil
}
}
// WithLogger allows setting a shared gologger instance
func WithLogger(logger *gologger.Logger) NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.Logger = logger
if e.opts != nil {
e.opts.Logger = logger
}
if e.executerOpts != nil {
e.executerOpts.Logger = logger
}
return nil
}
}
// WithOptions sets all options at once
func WithOptions(opts *pkgtypes.Options) NucleiSDKOptions {
return func(e *NucleiEngine) error {
e.opts = opts
return nil
}
}

View File

@ -12,8 +12,9 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/ratelimit"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/utils/errkit"
"github.com/rs/xid"
)
// unsafeOptions are those nuclei objects/instances/types
@ -21,14 +22,14 @@ import (
// hence they are ephemeral and are created on every ExecuteNucleiWithOpts invocation
// in ThreadSafeNucleiEngine
type unsafeOptions struct {
executerOpts protocols.ExecutorOptions
executerOpts *protocols.ExecutorOptions
engine *core.Engine
}
// createEphemeralObjects creates ephemeral nuclei objects/instances/types
func createEphemeralObjects(ctx context.Context, base *NucleiEngine, opts *types.Options) (*unsafeOptions, error) {
u := &unsafeOptions{}
u.executerOpts = protocols.ExecutorOptions{
u.executerOpts = &protocols.ExecutorOptions{
Output: base.customWriter,
Options: opts,
Progress: base.customProgress,
@ -52,11 +53,7 @@ func createEphemeralObjects(ctx context.Context, base *NucleiEngine, opts *types
if opts.RateLimit > 0 && opts.RateLimitDuration == 0 {
opts.RateLimitDuration = time.Second
}
if opts.RateLimit == 0 && opts.RateLimitDuration == 0 {
u.executerOpts.RateLimiter = ratelimit.NewUnlimited(ctx)
} else {
u.executerOpts.RateLimiter = ratelimit.New(ctx, uint(opts.RateLimit), opts.RateLimitDuration)
}
u.executerOpts.RateLimiter = utils.GetRateLimiter(ctx, opts.RateLimit, opts.RateLimitDuration)
u.engine = core.New(opts)
u.engine.SetExecuterOptions(u.executerOpts)
return u, nil
@ -88,9 +85,11 @@ type ThreadSafeNucleiEngine struct {
// whose methods are thread-safe and can be used concurrently
// Note: Non-thread-safe methods start with Global prefix
func NewThreadSafeNucleiEngineCtx(ctx context.Context, opts ...NucleiSDKOptions) (*ThreadSafeNucleiEngine, error) {
defaultOptions := types.DefaultOptions()
defaultOptions.ExecutionId = xid.New().String()
// default options
e := &NucleiEngine{
opts: types.DefaultOptions(),
opts: defaultOptions,
mode: threadSafe,
}
for _, option := range opts {
@ -125,8 +124,8 @@ func (e *ThreadSafeNucleiEngine) GlobalResultCallback(callback func(event *outpu
// by invoking this method with different options and targets
// Note: Not all options are thread-safe. this method will throw error if you try to use non-thread-safe options
func (e *ThreadSafeNucleiEngine) ExecuteNucleiWithOptsCtx(ctx context.Context, targets []string, opts ...NucleiSDKOptions) error {
baseOpts := *e.eng.opts
tmpEngine := &NucleiEngine{opts: &baseOpts, mode: threadSafe}
baseOpts := e.eng.opts.Copy()
tmpEngine := &NucleiEngine{opts: baseOpts, mode: threadSafe}
for _, option := range opts {
if err := option(tmpEngine); err != nil {
return err
@ -142,19 +141,19 @@ func (e *ThreadSafeNucleiEngine) ExecuteNucleiWithOptsCtx(ctx context.Context, t
defer closeEphemeralObjects(unsafeOpts)
// load templates
workflowLoader, err := workflow.NewLoader(&unsafeOpts.executerOpts)
workflowLoader, err := workflow.NewLoader(unsafeOpts.executerOpts)
if err != nil {
return errorutil.New("Could not create workflow loader: %s\n", err)
return errkit.Wrapf(err, "Could not create workflow loader: %s", err)
}
unsafeOpts.executerOpts.WorkflowLoader = workflowLoader
store, err := loader.New(loader.NewConfig(tmpEngine.opts, e.eng.catalog, unsafeOpts.executerOpts))
if err != nil {
return errorutil.New("Could not create loader client: %s\n", err)
return errkit.Wrapf(err, "Could not create loader client: %s", err)
}
store.Load()
inputProvider := provider.NewSimpleInputProviderWithUrls(targets...)
inputProvider := provider.NewSimpleInputProviderWithUrls(e.eng.opts.ExecutionId, targets...)
if len(store.Templates()) == 0 && len(store.Workflows()) == 0 {
return ErrNoTemplatesAvailable

View File

@ -5,7 +5,9 @@ import (
"bytes"
"context"
"io"
"sync"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
@ -26,7 +28,8 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/ratelimit"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
"github.com/rs/xid"
)
// NucleiSDKOptions contains options for nuclei SDK
@ -34,13 +37,13 @@ type NucleiSDKOptions func(e *NucleiEngine) error
var (
// ErrNotImplemented is returned when a feature is not implemented
ErrNotImplemented = errorutil.New("Not implemented")
ErrNotImplemented = errkit.New("Not implemented")
// ErrNoTemplatesAvailable is returned when no templates are available to execute
ErrNoTemplatesAvailable = errorutil.New("No templates available")
ErrNoTemplatesAvailable = errkit.New("No templates available")
// ErrNoTargetsAvailable is returned when no targets are available to scan
ErrNoTargetsAvailable = errorutil.New("No targets available")
ErrNoTargetsAvailable = errkit.New("No targets available")
// ErrOptionsNotSupported is returned when an option is not supported in thread safe mode
ErrOptionsNotSupported = errorutil.NewWithFmt("Option %v not supported in thread safe mode")
ErrOptionsNotSupported = errkit.New("Option not supported in thread safe mode")
)
type engineMode uint
@ -64,6 +67,7 @@ type NucleiEngine struct {
templatesLoaded bool
// unexported core fields
ctx context.Context
interactshClient *interactsh.Client
catalog catalog.Catalog
rateLimiter *ratelimit.Limiter
@ -84,20 +88,23 @@ type NucleiEngine struct {
customWriter output.Writer
customProgress progress.Progress
rc reporting.Client
executerOpts protocols.ExecutorOptions
executerOpts *protocols.ExecutorOptions
// Logger instance for the engine
Logger *gologger.Logger
}
// LoadAllTemplates loads all nuclei template based on given options
func (e *NucleiEngine) LoadAllTemplates() error {
workflowLoader, err := workflow.NewLoader(&e.executerOpts)
workflowLoader, err := workflow.NewLoader(e.executerOpts)
if err != nil {
return errorutil.New("Could not create workflow loader: %s\n", err)
return errkit.Wrapf(err, "Could not create workflow loader: %s", err)
}
e.executerOpts.WorkflowLoader = workflowLoader
e.store, err = loader.New(loader.NewConfig(e.opts, e.catalog, e.executerOpts))
if err != nil {
return errorutil.New("Could not create loader client: %s\n", err)
return errkit.Wrapf(err, "Could not create loader client: %s", err)
}
e.store.Load()
e.templatesLoaded = true
@ -124,9 +131,9 @@ func (e *NucleiEngine) GetWorkflows() []*templates.Template {
func (e *NucleiEngine) LoadTargets(targets []string, probeNonHttp bool) {
for _, target := range targets {
if probeNonHttp {
_ = e.inputProvider.SetWithProbe(target, e.httpxClient)
_ = e.inputProvider.SetWithProbe(e.opts.ExecutionId, target, e.httpxClient)
} else {
e.inputProvider.Set(target)
e.inputProvider.Set(e.opts.ExecutionId, target)
}
}
}
@ -136,9 +143,9 @@ func (e *NucleiEngine) LoadTargetsFromReader(reader io.Reader, probeNonHttp bool
buff := bufio.NewScanner(reader)
for buff.Scan() {
if probeNonHttp {
_ = e.inputProvider.SetWithProbe(buff.Text(), e.httpxClient)
_ = e.inputProvider.SetWithProbe(e.opts.ExecutionId, buff.Text(), e.httpxClient)
} else {
e.inputProvider.Set(buff.Text())
e.inputProvider.Set(e.opts.ExecutionId, buff.Text())
}
}
}
@ -161,7 +168,7 @@ func (e *NucleiEngine) LoadTargetsWithHttpData(filePath string, filemode string)
// GetExecuterOptions returns the nuclei executor options
func (e *NucleiEngine) GetExecuterOptions() *protocols.ExecutorOptions {
return &e.executerOpts
return e.executerOpts
}
// ParseTemplate parses a template from given data
@ -229,7 +236,7 @@ func (e *NucleiEngine) closeInternal() {
// Close all resources used by nuclei engine
func (e *NucleiEngine) Close() {
e.closeInternal()
protocolinit.Close()
protocolinit.Close(e.opts.ExecutionId)
}
// ExecuteCallbackWithCtx executes templates on targets and calls callback on each result(only if results are found)
@ -246,9 +253,9 @@ func (e *NucleiEngine) ExecuteCallbackWithCtx(ctx context.Context, callback ...f
}
filtered := []func(event *output.ResultEvent){}
for _, callback := range callback {
if callback != nil {
filtered = append(filtered, callback)
for _, cb := range callback {
if cb != nil {
filtered = append(filtered, cb)
}
}
e.resultCallbacks = append(e.resultCallbacks, filtered...)
@ -258,15 +265,32 @@ func (e *NucleiEngine) ExecuteCallbackWithCtx(ctx context.Context, callback ...f
return ErrNoTemplatesAvailable
}
_ = e.engine.ExecuteScanWithOpts(ctx, templatesAndWorkflows, e.inputProvider, false)
defer e.engine.WorkPool().Wait()
var wg sync.WaitGroup
wg.Add(1)
go func() {
defer wg.Done()
_ = e.engine.ExecuteScanWithOpts(ctx, templatesAndWorkflows, e.inputProvider, false)
}()
// wait for context to be cancelled
select {
case <-ctx.Done():
<-wait(&wg) // wait for scan to finish
return ctx.Err()
case <-wait(&wg):
// scan finished
}
return nil
}
// ExecuteWithCallback is same as ExecuteCallbackWithCtx but with default context
// Note this is deprecated and will be removed in future major release
func (e *NucleiEngine) ExecuteWithCallback(callback ...func(event *output.ResultEvent)) error {
return e.ExecuteCallbackWithCtx(context.Background(), callback...)
ctx := context.Background()
if e.ctx != nil {
ctx = e.ctx
}
return e.ExecuteCallbackWithCtx(ctx, callback...)
}
// Options return nuclei Type Options
@ -287,9 +311,12 @@ func (e *NucleiEngine) Store() *loader.Store {
// NewNucleiEngineCtx creates a new nuclei engine instance with given context
func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*NucleiEngine, error) {
// default options
defaultOptions := types.DefaultOptions()
defaultOptions.ExecutionId = xid.New().String()
e := &NucleiEngine{
opts: types.DefaultOptions(),
opts: defaultOptions,
mode: singleInstance,
ctx: ctx,
}
for _, option := range options {
if err := option(e); err != nil {
@ -306,3 +333,18 @@ func NewNucleiEngineCtx(ctx context.Context, options ...NucleiSDKOptions) (*Nucl
func NewNucleiEngine(options ...NucleiSDKOptions) (*NucleiEngine, error) {
return NewNucleiEngineCtx(context.Background(), options...)
}
// GetParser returns the template parser with cache
func (e *NucleiEngine) GetParser() *templates.Parser {
return e.parser
}
// wait for a waitgroup to finish
func wait(wg *sync.WaitGroup) <-chan struct{} {
ch := make(chan struct{})
go func() {
defer close(ch)
wg.Wait()
}()
return ch
}

View File

@ -8,6 +8,7 @@ import (
"time"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/logrusorgru/aurora"
"github.com/pkg/errors"
@ -29,7 +30,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolinit"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/http/httpclientpool"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
@ -37,8 +37,6 @@ import (
"github.com/projectdiscovery/ratelimit"
)
var sharedInit *sync.Once
// applyRequiredDefaults to options
func (e *NucleiEngine) applyRequiredDefaults(ctx context.Context) {
mockoutput := testutils.NewMockOutputWriter(e.opts.OmitTemplate)
@ -98,27 +96,39 @@ func (e *NucleiEngine) applyRequiredDefaults(ctx context.Context) {
// init
func (e *NucleiEngine) init(ctx context.Context) error {
// Set a default logger if one isn't provided in the options
if e.opts.Logger != nil {
e.Logger = e.opts.Logger
} else {
e.opts.Logger = &gologger.Logger{}
}
e.Logger = e.opts.Logger
if e.opts.Verbose {
gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose)
e.Logger.SetMaxLevel(levels.LevelVerbose)
} else if e.opts.Debug {
gologger.DefaultLogger.SetMaxLevel(levels.LevelDebug)
e.Logger.SetMaxLevel(levels.LevelDebug)
} else if e.opts.Silent {
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
e.Logger.SetMaxLevel(levels.LevelSilent)
}
if err := runner.ValidateOptions(e.opts); err != nil {
return err
}
e.parser = templates.NewParser()
if sharedInit == nil || protocolstate.ShouldInit() {
sharedInit = &sync.Once{}
if e.opts.Parser != nil {
if op, ok := e.opts.Parser.(*templates.Parser); ok {
e.parser = op
}
}
sharedInit.Do(func() {
if e.parser == nil {
e.parser = templates.NewParser()
}
if protocolstate.ShouldInit(e.opts.ExecutionId) {
_ = protocolinit.Init(e.opts)
})
}
if e.opts.ProxyInternal && e.opts.AliveHttpProxy != "" || e.opts.AliveSocksProxy != "" {
httpclient, err := httpclientpool.Get(e.opts, &httpclientpool.Configuration{})
@ -160,7 +170,7 @@ func (e *NucleiEngine) init(ctx context.Context) error {
e.catalog = disk.NewCatalog(config.DefaultConfig.TemplatesDirectory)
}
e.executerOpts = protocols.ExecutorOptions{
e.executerOpts = &protocols.ExecutorOptions{
Output: e.customWriter,
Options: e.opts,
Progress: e.customProgress,
@ -173,12 +183,13 @@ func (e *NucleiEngine) init(ctx context.Context) error {
Browser: e.browserInstance,
Parser: e.parser,
InputHelper: input.NewHelper(),
Logger: e.opts.Logger,
}
if e.opts.ShouldUseHostError() && e.hostErrCache != nil {
e.executerOpts.HostErrorsCache = e.hostErrCache
}
if len(e.opts.SecretsFile) > 0 {
authTmplStore, err := runner.GetAuthTmplStore(*e.opts, e.catalog, e.executerOpts)
authTmplStore, err := runner.GetAuthTmplStore(e.opts, e.catalog, e.executerOpts)
if err != nil {
return errors.Wrap(err, "failed to load dynamic auth templates")
}
@ -220,6 +231,25 @@ func (e *NucleiEngine) init(ctx context.Context) error {
}
}
// Handle the case where the user passed an existing parser that we can use as a cache
if e.opts.Parser != nil {
if cachedParser, ok := e.opts.Parser.(*templates.Parser); ok {
e.parser = cachedParser
e.opts.Parser = cachedParser
e.executerOpts.Parser = cachedParser
e.executerOpts.Options.Parser = cachedParser
}
}
// Create a new parser if necessary
if e.parser == nil {
op := templates.NewParser()
e.parser = op
e.opts.Parser = op
e.executerOpts.Parser = op
e.executerOpts.Options.Parser = op
}
e.engine = core.New(e.opts)
e.engine.SetExecuterOptions(e.executerOpts)

37
lib/sdk_test.go Normal file
View File

@ -0,0 +1,37 @@
package nuclei_test
import (
"context"
"log"
"testing"
"time"
nuclei "github.com/projectdiscovery/nuclei/v3/lib"
"github.com/stretchr/testify/require"
)
func TestContextCancelNucleiEngine(t *testing.T) {
// create nuclei engine with options
ctx, cancel := context.WithCancel(context.Background())
ne, err := nuclei.NewNucleiEngineCtx(ctx,
nuclei.WithTemplateFilters(nuclei.TemplateFilters{Tags: []string{"oast"}}),
nuclei.EnableStatsWithOpts(nuclei.StatsOptions{MetricServerPort: 0}),
)
require.NoError(t, err, "could not create nuclei engine")
go func() {
time.Sleep(time.Second * 2)
cancel()
log.Println("Test: context cancelled")
}()
// load targets and optionally probe non http/https targets
ne.LoadTargets([]string{"http://honey.scanme.sh"}, false)
// when callback is nil it nuclei will print JSON output to stdout
err = ne.ExecuteWithCallback(nil)
if err != nil {
// we expect a context cancellation error
require.ErrorIs(t, err, context.Canceled, "was expecting context cancellation error")
}
defer ne.Close()
}

View File

@ -3,12 +3,12 @@ package authx
import (
"fmt"
"strings"
"sync"
"sync/atomic"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/replacer"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
sliceutil "github.com/projectdiscovery/utils/slice"
)
@ -30,8 +30,8 @@ type Dynamic struct {
Input string `json:"input" yaml:"input"` // (optional) target for the dynamic secret
Extracted map[string]interface{} `json:"-" yaml:"-"` // extracted values from the dynamic secret
fetchCallback LazyFetchSecret `json:"-" yaml:"-"`
m *sync.Mutex `json:"-" yaml:"-"` // mutex for lazy fetch
fetched bool `json:"-" yaml:"-"` // flag to check if the secret has been fetched
fetched *atomic.Bool `json:"-" yaml:"-"` // atomic flag to check if the secret has been fetched
fetching *atomic.Bool `json:"-" yaml:"-"` // atomic flag to prevent recursive fetch calls
error error `json:"-" yaml:"-"` // error if any
}
@ -53,7 +53,7 @@ func (d *Dynamic) GetDomainAndDomainRegex() ([]string, []string) {
func (d *Dynamic) UnmarshalJSON(data []byte) error {
if d == nil {
return errorutil.New("cannot unmarshal into nil Dynamic struct")
return errkit.New("cannot unmarshal into nil Dynamic struct")
}
// Use an alias type (auxiliary) to avoid a recursive call in this method.
@ -70,12 +70,13 @@ func (d *Dynamic) UnmarshalJSON(data []byte) error {
// Validate validates the dynamic secret
func (d *Dynamic) Validate() error {
d.m = &sync.Mutex{}
d.fetched = &atomic.Bool{}
d.fetching = &atomic.Bool{}
if d.TemplatePath == "" {
return errorutil.New(" template-path is required for dynamic secret")
return errkit.New(" template-path is required for dynamic secret")
}
if len(d.Variables) == 0 {
return errorutil.New("variables are required for dynamic secret")
return errkit.New("variables are required for dynamic secret")
}
if d.Secret != nil {
@ -97,9 +98,7 @@ func (d *Dynamic) Validate() error {
func (d *Dynamic) SetLazyFetchCallback(callback LazyFetchSecret) {
d.fetchCallback = func(d *Dynamic) error {
err := callback(d)
d.fetched = true
if err != nil {
d.error = err
return err
}
if len(d.Extracted) == 0 {
@ -184,9 +183,15 @@ func (d *Dynamic) applyValuesToSecret(secret *Secret) error {
// GetStrategy returns the auth strategies for the dynamic secret
func (d *Dynamic) GetStrategies() []AuthStrategy {
if !d.fetched {
if d.fetched.Load() {
if d.error != nil {
return nil
}
} else {
// Try to fetch if not already fetched
_ = d.Fetch(true)
}
if d.error != nil {
return nil
}
@ -203,12 +208,23 @@ func (d *Dynamic) GetStrategies() []AuthStrategy {
// Fetch fetches the dynamic secret
// if isFatal is true, it will stop the execution if the secret could not be fetched
func (d *Dynamic) Fetch(isFatal bool) error {
d.m.Lock()
defer d.m.Unlock()
if d.fetched {
return nil
if d.fetched.Load() {
return d.error
}
// Try to set fetching flag atomically
if !d.fetching.CompareAndSwap(false, true) {
// Already fetching, return current error
return d.error
}
// We're the only one fetching, call the callback
d.error = d.fetchCallback(d)
// Mark as fetched and clear fetching flag
d.fetched.Store(true)
d.fetching.Store(false)
if d.error != nil && isFatal {
gologger.Fatal().Msgf("Could not fetch dynamic secret: %s\n", d.error)
}

View File

@ -8,7 +8,7 @@ import (
"strings"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
"github.com/projectdiscovery/utils/generic"
stringsutil "github.com/projectdiscovery/utils/strings"
"gopkg.in/yaml.v3"
@ -55,7 +55,7 @@ type Secret struct {
Type string `json:"type" yaml:"type"`
Domains []string `json:"domains" yaml:"domains"`
DomainsRegex []string `json:"domains-regex" yaml:"domains-regex"`
Headers []KV `json:"headers" yaml:"headers"`
Headers []KV `json:"headers" yaml:"headers"` // Headers preserve exact casing (useful for case-sensitive APIs)
Cookies []Cookie `json:"cookies" yaml:"cookies"`
Params []KV `json:"params" yaml:"params"`
Username string `json:"username" yaml:"username"` // can be either email or username
@ -148,7 +148,7 @@ func (s *Secret) Validate() error {
}
type KV struct {
Key string `json:"key" yaml:"key"`
Key string `json:"key" yaml:"key"` // Header key (preserves exact casing)
Value string `json:"value" yaml:"value"`
}
@ -237,7 +237,9 @@ func GetAuthDataFromYAML(data []byte) (*Authx, error) {
var auth Authx
err := yaml.Unmarshal(data, &auth)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not unmarshal yaml")
errorErr := errkit.FromError(err)
errorErr.Msgf("could not unmarshal yaml")
return nil, errorErr
}
return &auth, nil
}
@ -247,7 +249,9 @@ func GetAuthDataFromJSON(data []byte) (*Authx, error) {
var auth Authx
err := json.Unmarshal(data, &auth)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not unmarshal json")
errorErr := errkit.FromError(err)
errorErr.Msgf("could not unmarshal json")
return nil, errorErr
}
return &auth, nil
}

View File

@ -21,15 +21,19 @@ func NewHeadersAuthStrategy(data *Secret) *HeadersAuthStrategy {
}
// Apply applies the headers auth strategy to the request
// NOTE: This preserves exact header casing (e.g., barAuthToken stays as barAuthToken)
// This is useful for APIs that require case-sensitive header names
func (s *HeadersAuthStrategy) Apply(req *http.Request) {
for _, header := range s.Data.Headers {
req.Header.Set(header.Key, header.Value)
req.Header[header.Key] = []string{header.Value}
}
}
// ApplyOnRR applies the headers auth strategy to the retryable request
// NOTE: This preserves exact header casing (e.g., barAuthToken stays as barAuthToken)
// This is useful for APIs that require case-sensitive header names
func (s *HeadersAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
for _, header := range s.Data.Headers {
req.Header.Set(header.Key, header.Value)
req.Header[header.Key] = []string{header.Value}
}
}

View File

@ -12,6 +12,8 @@ info:
# static secrets
static:
# for header based auth session
# NOTE: Headers preserve exact casing (e.g., x-pdcp-key stays as x-pdcp-key)
# This is useful for APIs that require case-sensitive header names
- type: header
domains:
- api.projectdiscovery.io
@ -20,6 +22,8 @@ static:
headers:
- key: x-pdcp-key
value: <api-key-here>
- key: barAuthToken
value: <auth-token-here>
# for query based auth session
- type: Query

View File

@ -7,7 +7,7 @@ import (
"strings"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider/authx"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
urlutil "github.com/projectdiscovery/utils/url"
)
@ -30,16 +30,20 @@ func NewFileAuthProvider(path string, callback authx.LazyFetchSecret) (AuthProvi
return nil, ErrNoSecrets
}
if len(store.Dynamic) > 0 && callback == nil {
return nil, errorutil.New("lazy fetch callback is required for dynamic secrets")
return nil, errkit.New("lazy fetch callback is required for dynamic secrets")
}
for _, secret := range store.Secrets {
if err := secret.Validate(); err != nil {
return nil, errorutil.NewWithErr(err).Msgf("invalid secret in file: %s", path)
errorErr := errkit.FromError(err)
errorErr.Msgf("invalid secret in file: %s", path)
return nil, errorErr
}
}
for i, dynamic := range store.Dynamic {
if err := dynamic.Validate(); err != nil {
return nil, errorutil.NewWithErr(err).Msgf("invalid dynamic in file: %s", path)
errorErr := errkit.FromError(err)
errorErr.Msgf("invalid dynamic in file: %s", path)
return nil, errorErr
}
dynamic.SetLazyFetchCallback(callback)
store.Dynamic[i] = dynamic

View File

@ -31,7 +31,7 @@ const (
CLIConfigFileName = "config.yaml"
ReportingConfigFilename = "reporting-config.yaml"
// Version is the current version of nuclei
Version = `v3.4.7`
Version = `v3.4.10`
// Directory Names of custom templates
CustomS3TemplatesDirName = "s3"
CustomGitHubTemplatesDirName = "github"
@ -46,18 +46,21 @@ const (
// if the current version is outdated
func IsOutdatedVersion(current, latest string) bool {
if latest == "" {
// if pdtm api call failed it's assumed that the current version is outdated
// and it will be confirmed while updating from GitHub
// this fixes `version string empty` errors
return true
// NOTE(dwisiswant0): if PDTM API call failed or returned empty, we
// cannot determine if templates are outdated w/o additional checks
// return false to avoid unnecessary updates.
return false
}
current = trimDevIfExists(current)
currentVer, _ := semver.NewVersion(current)
newVer, _ := semver.NewVersion(latest)
if currentVer == nil || newVer == nil {
// fallback to naive comparison
return current == latest
// fallback to naive comparison - return true only if they are different
return current != latest
}
return newVer.GreaterThan(currentVer)
}

View File

@ -4,17 +4,16 @@ import (
"bytes"
"crypto/md5"
"fmt"
"log"
"os"
"path/filepath"
"slices"
"strings"
"sync"
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/utils/env"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file"
folderutil "github.com/projectdiscovery/utils/folder"
)
@ -42,15 +41,18 @@ type Config struct {
// local cache of nuclei version check endpoint
// these fields are only update during nuclei version check
// TODO: move these fields to a separate unexported struct as they are not meant to be used directly
LatestNucleiVersion string `json:"nuclei-latest-version"`
LatestNucleiTemplatesVersion string `json:"nuclei-templates-latest-version"`
LatestNucleiIgnoreHash string `json:"nuclei-latest-ignore-hash,omitempty"`
LatestNucleiVersion string `json:"nuclei-latest-version"`
LatestNucleiTemplatesVersion string `json:"nuclei-templates-latest-version"`
LatestNucleiIgnoreHash string `json:"nuclei-latest-ignore-hash,omitempty"`
Logger *gologger.Logger `json:"-"` // logger
// internal / unexported fields
disableUpdates bool `json:"-"` // disable updates both version check and template updates
homeDir string `json:"-"` // User Home Directory
configDir string `json:"-"` // Nuclei Global Config Directory
debugArgs []string `json:"-"` // debug args
m sync.Mutex
}
// IsCustomTemplate determines whether a given template is custom-built or part of the official Nuclei templates.
@ -105,21 +107,29 @@ func (c *Config) GetTemplateDir() string {
// DisableUpdateCheck disables update check and template updates
func (c *Config) DisableUpdateCheck() {
c.m.Lock()
defer c.m.Unlock()
c.disableUpdates = true
}
// CanCheckForUpdates returns true if update check is enabled
func (c *Config) CanCheckForUpdates() bool {
c.m.Lock()
defer c.m.Unlock()
return !c.disableUpdates
}
// NeedsTemplateUpdate returns true if template installation/update is required
func (c *Config) NeedsTemplateUpdate() bool {
c.m.Lock()
defer c.m.Unlock()
return !c.disableUpdates && (c.TemplateVersion == "" || IsOutdatedVersion(c.TemplateVersion, c.LatestNucleiTemplatesVersion) || !fileutil.FolderExists(c.TemplatesDirectory))
}
// NeedsIgnoreFileUpdate returns true if Ignore file hash is different (aka ignore file is outdated)
func (c *Config) NeedsIgnoreFileUpdate() bool {
c.m.Lock()
defer c.m.Unlock()
return c.NucleiIgnoreHash == "" || c.NucleiIgnoreHash != c.LatestNucleiIgnoreHash
}
@ -130,13 +140,13 @@ func (c *Config) UpdateNucleiIgnoreHash() error {
if fileutil.FileExists(ignoreFilePath) {
bin, err := os.ReadFile(ignoreFilePath)
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not read nuclei ignore file")
return errkit.Newf("could not read nuclei ignore file: %v", err)
}
c.NucleiIgnoreHash = fmt.Sprintf("%x", md5.Sum(bin))
// write config to disk
return c.WriteTemplatesConfig()
}
return errorutil.NewWithTag("config", "ignore file not found: could not update nuclei ignore hash")
return errkit.New("ignore file not found: could not update nuclei ignore hash")
}
// GetConfigDir returns the nuclei configuration directory
@ -211,7 +221,7 @@ func (c *Config) GetCacheDir() string {
func (c *Config) SetConfigDir(dir string) {
c.configDir = dir
if err := c.createConfigDirIfNotExists(); err != nil {
gologger.Fatal().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
c.Logger.Fatal().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
}
// if folder already exists read config or create new
@ -219,7 +229,7 @@ func (c *Config) SetConfigDir(dir string) {
// create new config
applyDefaultConfig()
if err2 := c.WriteTemplatesConfig(); err2 != nil {
gologger.Fatal().Msgf("Could not create nuclei config file at %s: %s", c.getTemplatesConfigFilePath(), err2)
c.Logger.Fatal().Msgf("Could not create nuclei config file at %s: %s", c.getTemplatesConfigFilePath(), err2)
}
}
@ -247,7 +257,7 @@ func (c *Config) SetTemplatesVersion(version string) error {
c.TemplateVersion = version
// write config to disk
if err := c.WriteTemplatesConfig(); err != nil {
return errorutil.NewWithErr(err).Msgf("could not write nuclei config file at %s", c.getTemplatesConfigFilePath())
return errkit.Newf("could not write nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
}
return nil
}
@ -255,15 +265,15 @@ func (c *Config) SetTemplatesVersion(version string) error {
// ReadTemplatesConfig reads the nuclei templates config file
func (c *Config) ReadTemplatesConfig() error {
if !fileutil.FileExists(c.getTemplatesConfigFilePath()) {
return errorutil.NewWithTag("config", "nuclei config file at %s does not exist", c.getTemplatesConfigFilePath())
return errkit.Newf("nuclei config file at %s does not exist", c.getTemplatesConfigFilePath())
}
var cfg *Config
bin, err := os.ReadFile(c.getTemplatesConfigFilePath())
if err != nil {
return errorutil.NewWithErr(err).Msgf("could not read nuclei config file at %s", c.getTemplatesConfigFilePath())
return errkit.Newf("could not read nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
}
if err := json.Unmarshal(bin, &cfg); err != nil {
return errorutil.NewWithErr(err).Msgf("could not unmarshal nuclei config file at %s", c.getTemplatesConfigFilePath())
return errkit.Newf("could not unmarshal nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
}
// apply config
c.TemplatesDirectory = cfg.TemplatesDirectory
@ -282,10 +292,10 @@ func (c *Config) WriteTemplatesConfig() error {
}
bin, err := json.Marshal(c)
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to marshal nuclei config")
return errkit.Newf("failed to marshal nuclei config: %v", err)
}
if err = os.WriteFile(c.getTemplatesConfigFilePath(), bin, 0600); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write nuclei config file at %s", c.getTemplatesConfigFilePath())
return errkit.Newf("failed to write nuclei config file at %s: %v", c.getTemplatesConfigFilePath(), err)
}
return nil
}
@ -309,7 +319,7 @@ func (c *Config) getTemplatesConfigFilePath() string {
func (c *Config) createConfigDirIfNotExists() error {
if !fileutil.FolderExists(c.configDir) {
if err := fileutil.CreateFolder(c.configDir); err != nil {
return errorutil.NewWithErr(err).Msgf("could not create nuclei config directory at %s", c.configDir)
return errkit.Newf("could not create nuclei config directory at %s: %v", c.configDir, err)
}
}
return nil
@ -319,14 +329,14 @@ func (c *Config) createConfigDirIfNotExists() error {
// to the current config directory
func (c *Config) copyIgnoreFile() {
if err := c.createConfigDirIfNotExists(); err != nil {
gologger.Error().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
c.Logger.Error().Msgf("Could not create nuclei config directory at %s: %s", c.configDir, err)
return
}
ignoreFilePath := c.GetIgnoreFilePath()
if !fileutil.FileExists(ignoreFilePath) {
// copy ignore file from default config directory
if err := fileutil.CopyFile(filepath.Join(folderutil.AppConfigDirOrDefault(FallbackConfigFolderName, BinaryName), NucleiIgnoreFileName), ignoreFilePath); err != nil {
gologger.Error().Msgf("Could not copy nuclei ignore file at %s: %s", ignoreFilePath, err)
c.Logger.Error().Msgf("Could not copy nuclei ignore file at %s: %s", ignoreFilePath, err)
}
}
}
@ -367,9 +377,6 @@ func (c *Config) parseDebugArgs(data string) {
}
func init() {
// first attempt to migrate all files from old config directory to new config directory
goflags.AttemptConfigMigration() // regardless how many times this is called it will only migrate once based on condition
ConfigDir := folderutil.AppConfigDirOrDefault(FallbackConfigFolderName, BinaryName)
if cfgDir := os.Getenv(NucleiConfigDirEnv); cfgDir != "" {
@ -385,6 +392,7 @@ func init() {
DefaultConfig = &Config{
homeDir: folderutil.HomeDirOrDefault(""),
configDir: ConfigDir,
Logger: gologger.DefaultLogger,
}
// when enabled will log events in more verbosity than -v or -debug
@ -406,9 +414,7 @@ func init() {
gologger.Error().Msgf("failed to write config file at %s got: %s", DefaultConfig.getTemplatesConfigFilePath(), err)
}
}
// attempt to migrate resume files
// this also happens once regardless of how many times this is called
migrateResumeFiles()
// Loads/updates paths of custom templates
// Note: custom templates paths should not be updated in config file
// and even if it is changed we don't follow it since it is not expected behavior
@ -423,61 +429,3 @@ func applyDefaultConfig() {
// updates all necessary paths
DefaultConfig.SetTemplatesDir(DefaultConfig.TemplatesDirectory)
}
func migrateResumeFiles() {
// attempt to migrate old resume files to new directory structure
// after migration has been done in goflags
oldResumeDir := DefaultConfig.GetConfigDir()
// migrate old resume file to new directory structure
if !fileutil.FileOrFolderExists(DefaultConfig.GetCacheDir()) && fileutil.FileOrFolderExists(oldResumeDir) {
// this means new cache dir doesn't exist, so we need to migrate
// first check if old resume file exists if not then no need to migrate
exists := false
files, err := os.ReadDir(oldResumeDir)
if err != nil {
// log silently
log.Printf("could not read old resume dir: %s\n", err)
return
}
for _, file := range files {
if strings.HasSuffix(file.Name(), ".cfg") {
exists = true
break
}
}
if !exists {
// no need to migrate
return
}
// create new cache dir
err = os.MkdirAll(DefaultConfig.GetCacheDir(), os.ModePerm)
if err != nil {
// log silently
log.Printf("could not create new cache dir: %s\n", err)
return
}
err = filepath.WalkDir(oldResumeDir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
if !strings.HasSuffix(path, ".cfg") {
return nil
}
err = os.Rename(path, filepath.Join(DefaultConfig.GetCacheDir(), filepath.Base(path)))
if err != nil {
return err
}
return nil
})
if err != nil {
// log silently
log.Printf("could not migrate old resume files: %s\n", err)
return
}
}
}

View File

@ -7,13 +7,15 @@ import (
"path/filepath"
"strings"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions"
fileutil "github.com/projectdiscovery/utils/file"
stringsutil "github.com/projectdiscovery/utils/strings"
)
var knownConfigFiles = []string{"cves.json", "contributors.json", "TEMPLATES-STATS.json"}
var (
knownConfigFiles = []string{"cves.json", "contributors.json", "TEMPLATES-STATS.json"}
knownMiscDirectories = []string{".git", ".github", "helpers"}
)
// TemplateFormat
type TemplateFormat uint8
@ -24,6 +26,25 @@ const (
Unknown
)
// GetKnownConfigFiles returns known config files.
func GetKnownConfigFiles() []string {
return knownConfigFiles
}
// GetKnownMiscDirectories returns known misc directories with trailing slashes.
//
// The trailing slash ensures that directory matching is explicit and avoids
// falsely match files with similar names (e.g. "helpers" matching
// "some-helpers.yaml"), since [IsTemplate] checks against normalized full paths.
func GetKnownMiscDirectories() []string {
trailedSlashDirs := make([]string, 0, len(knownMiscDirectories))
for _, dir := range knownMiscDirectories {
trailedSlashDirs = append(trailedSlashDirs, dir+string(os.PathSeparator))
}
return trailedSlashDirs
}
// GetTemplateFormatFromExt returns template format
func GetTemplateFormatFromExt(filePath string) TemplateFormat {
fileExt := strings.ToLower(filepath.Ext(filePath))
@ -42,13 +63,22 @@ func GetSupportTemplateFileExtensions() []string {
return []string{extensions.YAML, extensions.JSON}
}
// IsTemplate is a callback function used by goflags to decide if given file should be read
// if it is not a nuclei-template file only then file is read
func IsTemplate(filename string) bool {
if stringsutil.ContainsAny(filename, knownConfigFiles...) {
// IsTemplate returns true if the file is a template based on its path.
// It used by goflags and other places to filter out non-template files.
func IsTemplate(fpath string) bool {
fpath = filepath.FromSlash(fpath)
fname := filepath.Base(fpath)
fext := strings.ToLower(filepath.Ext(fpath))
if stringsutil.ContainsAny(fname, GetKnownConfigFiles()...) {
return false
}
return stringsutil.EqualFoldAny(filepath.Ext(filename), GetSupportTemplateFileExtensions()...)
if stringsutil.ContainsAny(fpath, GetKnownMiscDirectories()...) {
return false
}
return stringsutil.EqualFoldAny(fext, GetSupportTemplateFileExtensions()...)
}
type template struct {
@ -98,7 +128,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
return index, nil
}
}
gologger.Error().Msgf("failed to read index file creating new one: %v", err)
DefaultConfig.Logger.Error().Msgf("failed to read index file creating new one: %v", err)
}
ignoreDirs := DefaultConfig.GetAllCustomTemplateDirs()
@ -109,7 +139,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
}
err := filepath.WalkDir(DefaultConfig.TemplatesDirectory, func(path string, d os.DirEntry, err error) error {
if err != nil {
gologger.Verbose().Msgf("failed to walk path=%v err=%v", path, err)
DefaultConfig.Logger.Verbose().Msgf("failed to walk path=%v err=%v", path, err)
return nil
}
if d.IsDir() || !IsTemplate(path) || stringsutil.ContainsAny(path, ignoreDirs...) {
@ -118,7 +148,7 @@ func GetNucleiTemplatesIndex() (map[string]string, error) {
// get template id from file
id, err := getTemplateID(path)
if err != nil || id == "" {
gologger.Verbose().Msgf("failed to get template id from file=%v got id=%v err=%v", path, id, err)
DefaultConfig.Logger.Verbose().Msgf("failed to get template id from file=%v got id=%v err=%v", path, id, err)
return nil
}
index[id] = path

View File

@ -8,7 +8,6 @@ import (
"github.com/logrusorgru/aurora"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
stringsutil "github.com/projectdiscovery/utils/strings"
updateutils "github.com/projectdiscovery/utils/update"
@ -84,7 +83,7 @@ func (c *DiskCatalog) GetTemplatePath(target string) ([]string, error) {
absPath = BackwardsCompatiblePaths(c.templatesDirectory, target)
if absPath != target && strings.TrimPrefix(absPath, c.templatesDirectory+string(filepath.Separator)) != target {
if config.DefaultConfig.LogAllEvents {
gologger.DefaultLogger.Print().Msgf("[%v] requested Template path %s is deprecated, please update to %s\n", aurora.Yellow("WRN").String(), target, absPath)
config.DefaultConfig.Logger.Print().Msgf("[%v] requested Template path %s is deprecated, please update to %s\n", aurora.Yellow("WRN").String(), target, absPath)
}
deprecatedPathsCounter++
}
@ -258,7 +257,7 @@ func (c *DiskCatalog) findDirectoryMatches(absPath string, processed map[string]
if err != nil {
return nil
}
if !d.IsDir() && config.GetTemplateFormatFromExt(path) != config.Unknown {
if !d.IsDir() && config.IsTemplate(path) {
if _, ok := processed[path]; !ok {
results = append(results, path)
processed[path] = struct{}{}
@ -282,7 +281,7 @@ func (c *DiskCatalog) findDirectoryMatches(absPath string, processed map[string]
if err != nil {
return nil
}
if !d.IsDir() && config.GetTemplateFormatFromExt(path) != config.Unknown {
if !d.IsDir() && config.IsTemplate(path) {
if _, ok := processed[path]; !ok {
results = append(results, path)
processed[path] = struct{}{}
@ -302,6 +301,6 @@ func PrintDeprecatedPathsMsgIfApplicable(isSilent bool) {
return
}
if deprecatedPathsCounter > 0 && !isSilent {
gologger.Print().Msgf("[%v] Found %v template[s] loaded with deprecated paths, update before v3 for continued support.\n", aurora.Yellow("WRN").String(), deprecatedPathsCounter)
config.DefaultConfig.Logger.Print().Msgf("[%v] Found %v template[s] loaded with deprecated paths, update before v3 for continued support.\n", aurora.Yellow("WRN").String(), deprecatedPathsCounter)
}
}

View File

@ -10,12 +10,11 @@ import (
"strings"
"github.com/alecthomas/chroma/quick"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/retryablehttp-go"
pdcpauth "github.com/projectdiscovery/utils/auth/pdcp"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
const (
@ -34,31 +33,31 @@ type AITemplateResponse struct {
func getAIGeneratedTemplates(prompt string, options *types.Options) ([]string, error) {
prompt = strings.TrimSpace(prompt)
if len(prompt) < 5 {
return nil, errorutil.New("Prompt is too short. Please provide a more descriptive prompt")
return nil, errkit.Newf("Prompt is too short. Please provide a more descriptive prompt")
}
if len(prompt) > 3000 {
return nil, errorutil.New("Prompt is too long. Please limit to 3000 characters")
return nil, errkit.Newf("Prompt is too long. Please limit to 3000 characters")
}
template, templateID, err := generateAITemplate(prompt)
if err != nil {
return nil, errorutil.New("Failed to generate template: %v", err)
return nil, errkit.Newf("Failed to generate template: %v", err)
}
pdcpTemplateDir := filepath.Join(config.DefaultConfig.GetTemplateDir(), "pdcp")
if err := os.MkdirAll(pdcpTemplateDir, 0755); err != nil {
return nil, errorutil.New("Failed to create pdcp template directory: %v", err)
return nil, errkit.Newf("Failed to create pdcp template directory: %v", err)
}
templateFile := filepath.Join(pdcpTemplateDir, templateID+".yaml")
err = os.WriteFile(templateFile, []byte(template), 0644)
if err != nil {
return nil, errorutil.New("Failed to generate template: %v", err)
return nil, errkit.Newf("Failed to generate template: %v", err)
}
gologger.Info().Msgf("Generated template available at: https://cloud.projectdiscovery.io/templates/%s", templateID)
gologger.Info().Msgf("Generated template path: %s", templateFile)
options.Logger.Info().Msgf("Generated template available at: https://cloud.projectdiscovery.io/templates/%s", templateID)
options.Logger.Info().Msgf("Generated template path: %s", templateFile)
// Check if we should display the template
// This happens when:
@ -76,7 +75,7 @@ func getAIGeneratedTemplates(prompt string, options *types.Options) ([]string, e
template = buf.String()
}
}
gologger.Silent().Msgf("\n%s", template)
options.Logger.Debug().Msgf("\n%s", template)
// FIXME:
// we should not be exiting the program here
// but we need to find a better way to handle this
@ -92,22 +91,22 @@ func generateAITemplate(prompt string) (string, string, error) {
}
jsonBody, err := json.Marshal(reqBody)
if err != nil {
return "", "", errorutil.New("Failed to marshal request body: %v", err)
return "", "", errkit.Newf("Failed to marshal request body: %v", err)
}
req, err := http.NewRequest(http.MethodPost, aiTemplateGeneratorAPIEndpoint, bytes.NewBuffer(jsonBody))
if err != nil {
return "", "", errorutil.New("Failed to create HTTP request: %v", err)
return "", "", errkit.Newf("Failed to create HTTP request: %v", err)
}
ph := pdcpauth.PDCPCredHandler{}
creds, err := ph.GetCreds()
if err != nil {
return "", "", errorutil.New("Failed to get PDCP credentials: %v", err)
return "", "", errkit.Newf("Failed to get PDCP credentials: %v", err)
}
if creds == nil {
return "", "", errorutil.New("PDCP API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
return "", "", errkit.Newf("PDCP API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
}
req.Header.Set("Content-Type", "application/json")
@ -115,28 +114,28 @@ func generateAITemplate(prompt string) (string, string, error) {
resp, err := retryablehttp.DefaultClient().Do(req)
if err != nil {
return "", "", errorutil.New("Failed to send HTTP request: %v", err)
return "", "", errkit.Newf("Failed to send HTTP request: %v", err)
}
defer func() {
_ = resp.Body.Close()
}()
if resp.StatusCode == http.StatusUnauthorized {
return "", "", errorutil.New("Invalid API Key or API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
return "", "", errkit.Newf("Invalid API Key or API Key not configured, Create one for free at https://cloud.projectdiscovery.io/")
}
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", "", errorutil.New("API returned status code %d: %s", resp.StatusCode, string(body))
return "", "", errkit.Newf("API returned status code %d: %s", resp.StatusCode, string(body))
}
var result AITemplateResponse
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", "", errorutil.New("Failed to decode API response: %v", err)
return "", "", errkit.Newf("Failed to decode API response: %v", err)
}
if result.TemplateID == "" || result.Completion == "" {
return "", "", errorutil.New("Failed to generate template")
return "", "", errkit.Newf("Failed to generate template")
}
return result.Completion, result.TemplateID, nil

View File

@ -7,7 +7,6 @@ import (
"os"
"sort"
"strings"
"sync"
"github.com/logrusorgru/aurora"
"github.com/pkg/errors"
@ -18,16 +17,20 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/keys"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
templateTypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/workflows"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
mapsutil "github.com/projectdiscovery/utils/maps"
sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings"
syncutil "github.com/projectdiscovery/utils/sync"
urlutil "github.com/projectdiscovery/utils/url"
"github.com/rs/xid"
)
const (
@ -65,7 +68,8 @@ type Config struct {
IncludeConditions []string
Catalog catalog.Catalog
ExecutorOptions protocols.ExecutorOptions
ExecutorOptions *protocols.ExecutorOptions
Logger *gologger.Logger
}
// Store is a storage for loaded nuclei templates
@ -82,13 +86,15 @@ type Store struct {
preprocessor templates.Preprocessor
logger *gologger.Logger
// NotFoundCallback is called for each not found template
// This overrides error handling for not found templates
NotFoundCallback func(template string) bool
}
// NewConfig returns a new loader config
func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts protocols.ExecutorOptions) *Config {
func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts *protocols.ExecutorOptions) *Config {
loaderConfig := Config{
Templates: options.Templates,
Workflows: options.Workflows,
@ -111,6 +117,7 @@ func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts pro
Catalog: catalog,
ExecutorOptions: executerOpts,
AITemplatePrompt: options.AITemplatePrompt,
Logger: options.Logger,
}
loaderConfig.RemoteTemplateDomainList = append(loaderConfig.RemoteTemplateDomainList, TrustedTemplateDomains...)
return &loaderConfig
@ -145,6 +152,7 @@ func New(cfg *Config) (*Store, error) {
}, cfg.Catalog),
finalTemplates: cfg.Templates,
finalWorkflows: cfg.Workflows,
logger: cfg.Logger,
}
// Do a check to see if we have URLs in templates flag, if so
@ -231,15 +239,15 @@ func (store *Store) ReadTemplateFromURI(uri string, remote bool) ([]byte, error)
uri = handleTemplatesEditorURLs(uri)
remoteTemplates, _, err := getRemoteTemplatesAndWorkflows([]string{uri}, nil, store.config.RemoteTemplateDomainList)
if err != nil || len(remoteTemplates) == 0 {
return nil, errorutil.NewWithErr(err).Msgf("Could not load template %s: got %v", uri, remoteTemplates)
return nil, errkit.Wrapf(err, "Could not load template %s: got %v", uri, remoteTemplates)
}
resp, err := retryablehttp.Get(remoteTemplates[0])
if err != nil {
return nil, err
}
defer func() {
_ = resp.Body.Close()
}()
_ = resp.Body.Close()
}()
return io.ReadAll(resp.Body)
} else {
return os.ReadFile(uri)
@ -295,11 +303,11 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
if strings.Contains(err.Error(), templates.ErrExcluded.Error()) {
stats.Increment(templates.TemplatesExcludedStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
store.logger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
}
continue
}
gologger.Warning().Msg(err.Error())
store.logger.Warning().Msg(err.Error())
}
}
parserItem, ok := store.config.ExecutorOptions.Parser.(*templates.Parser)
@ -308,6 +316,8 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
}
templatesCache := parserItem.Cache()
loadedTemplateIDs := mapsutil.NewSyncLockMap[string, struct{}]()
for templatePath := range validPaths {
template, _, _ := templatesCache.Has(templatePath)
@ -332,6 +342,12 @@ func (store *Store) LoadTemplatesOnlyMetadata() error {
}
if template != nil {
if loadedTemplateIDs.Has(template.ID) {
store.logger.Debug().Msgf("Skipping duplicate template ID '%s' from path '%s'", template.ID, templatePath)
continue
}
_ = loadedTemplateIDs.Set(template.ID, struct{}{})
template.Path = templatePath
store.templates = append(store.templates, template)
}
@ -358,15 +374,13 @@ func (store *Store) ValidateTemplates() error {
func (store *Store) areWorkflowsValid(filteredWorkflowPaths map[string]struct{}) bool {
return store.areWorkflowOrTemplatesValid(filteredWorkflowPaths, true, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) {
return false, nil
// return store.config.ExecutorOptions.Parser.LoadWorkflow(templatePath, store.config.Catalog)
return store.config.ExecutorOptions.Parser.LoadWorkflow(templatePath, store.config.Catalog)
})
}
func (store *Store) areTemplatesValid(filteredTemplatePaths map[string]struct{}) bool {
return store.areWorkflowOrTemplatesValid(filteredTemplatePaths, false, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) {
return false, nil
// return store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog)
return store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog)
})
}
@ -375,7 +389,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
for templatePath := range filteredTemplatePaths {
if _, err := load(templatePath, store.tagFilter); err != nil {
if isParsingError("Error occurred loading template %s: %s\n", templatePath, err) {
if isParsingError(store, "Error occurred loading template %s: %s\n", templatePath, err) {
areTemplatesValid = false
continue
}
@ -383,7 +397,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
template, err := templates.Parse(templatePath, store.preprocessor, store.config.ExecutorOptions)
if err != nil {
if isParsingError("Error occurred parsing template %s: %s\n", templatePath, err) {
if isParsingError(store, "Error occurred parsing template %s: %s\n", templatePath, err) {
areTemplatesValid = false
continue
}
@ -408,7 +422,7 @@ func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string
// TODO: until https://github.com/projectdiscovery/nuclei-templates/issues/11324 is deployed
// disable strict validation to allow GH actions to run
// areTemplatesValid = false
gologger.Warning().Msgf("Found duplicate template ID during validation '%s' => '%s': %s\n", templatePath, existingTemplatePath, template.ID)
store.logger.Warning().Msgf("Found duplicate template ID during validation '%s' => '%s': %s\n", templatePath, existingTemplatePath, template.ID)
}
if !isWorkflow && len(template.Workflows) > 0 {
continue
@ -431,7 +445,7 @@ func areWorkflowTemplatesValid(store *Store, workflows []*workflows.WorkflowTemp
}
_, err := store.config.Catalog.GetTemplatePath(workflow.Template)
if err != nil {
if isParsingError("Error occurred loading template %s: %s\n", workflow.Template, err) {
if isParsingError(store, "Error occurred loading template %s: %s\n", workflow.Template, err) {
return false
}
}
@ -439,14 +453,14 @@ func areWorkflowTemplatesValid(store *Store, workflows []*workflows.WorkflowTemp
return true
}
func isParsingError(message string, template string, err error) bool {
func isParsingError(store *Store, message string, template string, err error) bool {
if errors.Is(err, templates.ErrExcluded) {
return false
}
if errors.Is(err, templates.ErrCreateTemplateExecutor) {
return false
}
gologger.Error().Msgf(message, template, err)
store.logger.Error().Msgf(message, template, err)
return true
}
@ -465,12 +479,12 @@ func (store *Store) LoadWorkflows(workflowsList []string) []*templates.Template
for workflowPath := range workflowPathMap {
loaded, err := store.config.ExecutorOptions.Parser.LoadWorkflow(workflowPath, store.config.Catalog)
if err != nil {
gologger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err)
store.logger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err)
}
if loaded {
parsed, err := templates.Parse(workflowPath, store.preprocessor, store.config.ExecutorOptions)
if err != nil {
gologger.Warning().Msgf("Could not parse workflow %s: %s\n", workflowPath, err)
store.logger.Warning().Msgf("Could not parse workflow %s: %s\n", workflowPath, err)
} else if parsed != nil {
loadedWorkflows = append(loadedWorkflows, parsed)
}
@ -487,8 +501,16 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
templatePathMap := store.pathFilter.Match(includedTemplates)
loadedTemplates := sliceutil.NewSyncSlice[*templates.Template]()
loadedTemplateIDs := mapsutil.NewSyncLockMap[string, struct{}]()
loadTemplate := func(tmpl *templates.Template) {
if loadedTemplateIDs.Has(tmpl.ID) {
store.logger.Debug().Msgf("Skipping duplicate template ID '%s' from path '%s'", tmpl.ID, tmpl.Path)
return
}
_ = loadedTemplateIDs.Set(tmpl.ID, struct{}{})
loadedTemplates.Append(tmpl)
// increment signed/unsigned counters
if tmpl.Verified {
@ -502,10 +524,22 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
}
}
var wgLoadTemplates sync.WaitGroup
wgLoadTemplates, errWg := syncutil.New(syncutil.WithSize(50))
if errWg != nil {
panic("could not create wait group")
}
if store.config.ExecutorOptions.Options.ExecutionId == "" {
store.config.ExecutorOptions.Options.ExecutionId = xid.New().String()
}
dialers := protocolstate.GetDialersWithId(store.config.ExecutorOptions.Options.ExecutionId)
if dialers == nil {
panic("dialers with executionId " + store.config.ExecutorOptions.Options.ExecutionId + " not found")
}
for templatePath := range templatePathMap {
wgLoadTemplates.Add(1)
wgLoadTemplates.Add()
go func(templatePath string) {
defer wgLoadTemplates.Done()
@ -517,7 +551,7 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
if !errors.Is(err, templates.ErrIncompatibleWithOfflineMatching) {
stats.Increment(templates.RuntimeWarningsStats)
}
gologger.Warning().Msgf("Could not parse template %s: %s\n", templatePath, err)
store.logger.Warning().Msgf("Could not parse template %s: %s\n", templatePath, err)
} else if parsed != nil {
if !parsed.Verified && store.config.ExecutorOptions.Options.DisableUnsignedTemplates {
// skip unverified templates when prompted to
@ -546,19 +580,26 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// check if the template is a DAST template
// also allow global matchers template to be loaded
if parsed.IsFuzzing() || parsed.Options.GlobalMatchers != nil && parsed.Options.GlobalMatchers.HasMatchers() {
loadTemplate(parsed)
if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
stats.Increment(templates.ExcludedHeadlessTmplStats)
if config.DefaultConfig.LogAllEvents {
store.logger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else {
loadTemplate(parsed)
}
}
} else if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
// donot include headless template in final list if headless flag is not set
stats.Increment(templates.ExcludedHeadlessTmplStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
store.logger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if len(parsed.RequestsCode) > 0 && !store.config.ExecutorOptions.Options.EnableCodeTemplates {
// donot include 'Code' protocol custom template in final list if code flag is not set
stats.Increment(templates.ExcludedCodeTmplStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Code flag is required for code protocol template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
store.logger.Print().Msgf("[%v] Code flag is required for code protocol template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if len(parsed.RequestsCode) > 0 && !parsed.Verified && len(parsed.Workflows) == 0 {
// donot include unverified 'Code' protocol custom template in final list
@ -566,12 +607,12 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// these will be skipped so increment skip counter
stats.Increment(templates.SkippedUnsignedStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Tampered/Unsigned template at %v.\n", aurora.Yellow("WRN").String(), templatePath)
store.logger.Print().Msgf("[%v] Tampered/Unsigned template at %v.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if parsed.IsFuzzing() && !store.config.ExecutorOptions.Options.DAST {
stats.Increment(templates.ExludedDastTmplStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] -dast flag is required for DAST template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
store.logger.Print().Msgf("[%v] -dast flag is required for DAST template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else {
loadTemplate(parsed)
@ -582,11 +623,11 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
if strings.Contains(err.Error(), templates.ErrExcluded.Error()) {
stats.Increment(templates.TemplatesExcludedStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
store.logger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
}
return
}
gologger.Warning().Msg(err.Error())
store.logger.Warning().Msg(err.Error())
}
}(templatePath)
}
@ -642,7 +683,7 @@ func workflowContainsProtocol(workflow []*workflows.WorkflowTemplate) bool {
func (s *Store) logErroredTemplates(erred map[string]error) {
for template, err := range erred {
if s.NotFoundCallback == nil || !s.NotFoundCallback(template) {
gologger.Error().Msgf("Could not find template '%s': %s", template, err)
s.logger.Error().Msgf("Could not find template '%s': %s", template, err)
}
}
}

View File

@ -5,13 +5,16 @@ import (
"fmt"
"net/url"
"strings"
"sync"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/retryablehttp-go"
sliceutil "github.com/projectdiscovery/utils/slice"
stringsutil "github.com/projectdiscovery/utils/strings"
syncutil "github.com/projectdiscovery/utils/sync"
)
type ContentType string
@ -28,67 +31,73 @@ type RemoteContent struct {
}
func getRemoteTemplatesAndWorkflows(templateURLs, workflowURLs, remoteTemplateDomainList []string) ([]string, []string, error) {
remoteContentChannel := make(chan RemoteContent)
var (
err error
muErr sync.Mutex
)
remoteTemplateList := sliceutil.NewSyncSlice[string]()
remoteWorkFlowList := sliceutil.NewSyncSlice[string]()
for _, templateURL := range templateURLs {
go getRemoteContent(templateURL, remoteTemplateDomainList, remoteContentChannel, Template)
}
for _, workflowURL := range workflowURLs {
go getRemoteContent(workflowURL, remoteTemplateDomainList, remoteContentChannel, Workflow)
awg, errAwg := syncutil.New(syncutil.WithSize(50))
if errAwg != nil {
return nil, nil, errAwg
}
var remoteTemplateList []string
var remoteWorkFlowList []string
var err error
for i := 0; i < (len(templateURLs) + len(workflowURLs)); i++ {
remoteContent := <-remoteContentChannel
loadItem := func(URL string, contentType ContentType) {
defer awg.Done()
remoteContent := getRemoteContent(URL, remoteTemplateDomainList, contentType)
if remoteContent.Error != nil {
muErr.Lock()
if err != nil {
err = errors.New(remoteContent.Error.Error() + ": " + err.Error())
} else {
err = remoteContent.Error
}
muErr.Unlock()
} else {
switch remoteContent.Type {
case Template:
remoteTemplateList = append(remoteTemplateList, remoteContent.Content...)
remoteTemplateList.Append(remoteContent.Content...)
case Workflow:
remoteWorkFlowList = append(remoteWorkFlowList, remoteContent.Content...)
remoteWorkFlowList.Append(remoteContent.Content...)
}
}
}
return remoteTemplateList, remoteWorkFlowList, err
for _, templateURL := range templateURLs {
awg.Add()
go loadItem(templateURL, Template)
}
for _, workflowURL := range workflowURLs {
awg.Add()
go loadItem(workflowURL, Workflow)
}
awg.Wait()
return remoteTemplateList.Slice, remoteWorkFlowList.Slice, err
}
func getRemoteContent(URL string, remoteTemplateDomainList []string, remoteContentChannel chan<- RemoteContent, contentType ContentType) {
func getRemoteContent(URL string, remoteTemplateDomainList []string, contentType ContentType) RemoteContent {
if err := validateRemoteTemplateURL(URL, remoteTemplateDomainList); err != nil {
remoteContentChannel <- RemoteContent{
Error: err,
}
return
return RemoteContent{Error: err}
}
if strings.HasPrefix(URL, "http") && stringsutil.HasSuffixAny(URL, extensions.YAML) {
remoteContentChannel <- RemoteContent{
return RemoteContent{
Content: []string{URL},
Type: contentType,
}
return
}
response, err := retryablehttp.DefaultClient().Get(URL)
if err != nil {
remoteContentChannel <- RemoteContent{
Error: err,
}
return
return RemoteContent{Error: err}
}
defer func() {
_ = response.Body.Close()
}()
_ = response.Body.Close()
}()
if response.StatusCode < 200 || response.StatusCode > 299 {
remoteContentChannel <- RemoteContent{
Error: fmt.Errorf("get \"%s\": unexpect status %d", URL, response.StatusCode),
}
return
return RemoteContent{Error: fmt.Errorf("get \"%s\": unexpect status %d", URL, response.StatusCode)}
}
scanner := bufio.NewScanner(response.Body)
@ -100,23 +109,17 @@ func getRemoteContent(URL string, remoteTemplateDomainList []string, remoteConte
}
if utils.IsURL(text) {
if err := validateRemoteTemplateURL(text, remoteTemplateDomainList); err != nil {
remoteContentChannel <- RemoteContent{
Error: err,
}
return
return RemoteContent{Error: err}
}
}
templateList = append(templateList, text)
}
if err := scanner.Err(); err != nil {
remoteContentChannel <- RemoteContent{
Error: errors.Wrap(err, "get \"%s\""),
}
return
return RemoteContent{Error: errors.Wrap(err, "get \"%s\"")}
}
remoteContentChannel <- RemoteContent{
return RemoteContent{
Content: templateList,
Type: contentType,
}

View File

@ -1,6 +1,7 @@
package core
import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
@ -17,14 +18,16 @@ import (
type Engine struct {
workPool *WorkPool
options *types.Options
executerOpts protocols.ExecutorOptions
executerOpts *protocols.ExecutorOptions
Callback func(*output.ResultEvent) // Executed on results
Logger *gologger.Logger
}
// New returns a new Engine instance
func New(options *types.Options) *Engine {
engine := &Engine{
options: options,
Logger: options.Logger,
}
engine.workPool = engine.GetWorkPool()
return engine
@ -47,12 +50,12 @@ func (e *Engine) GetWorkPool() *WorkPool {
// SetExecuterOptions sets the executer options for the engine. This is required
// before using the engine to perform any execution.
func (e *Engine) SetExecuterOptions(options protocols.ExecutorOptions) {
func (e *Engine) SetExecuterOptions(options *protocols.ExecutorOptions) {
e.executerOpts = options
}
// ExecuterOptions returns protocols.ExecutorOptions for nuclei engine.
func (e *Engine) ExecuterOptions() protocols.ExecutorOptions {
func (e *Engine) ExecuterOptions() *protocols.ExecutorOptions {
return e.executerOpts
}

View File

@ -5,7 +5,6 @@ import (
"sync"
"sync/atomic"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
@ -50,7 +49,7 @@ func (e *Engine) ExecuteScanWithOpts(ctx context.Context, templatesList []*templ
totalReqAfterClustering := getRequestCount(finalTemplates) * int(target.Count())
if !noCluster && totalReqAfterClustering < totalReqBeforeCluster {
gologger.Info().Msgf("Templates clustered: %d (Reduced %d Requests)", clusterCount, totalReqBeforeCluster-totalReqAfterClustering)
e.Logger.Info().Msgf("Templates clustered: %d (Reduced %d Requests)", clusterCount, totalReqBeforeCluster-totalReqAfterClustering)
}
// 0 matches means no templates were found in the directory

View File

@ -4,9 +4,10 @@ import (
"context"
"sync"
"sync/atomic"
"time"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/scan"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
@ -38,7 +39,7 @@ func (e *Engine) executeAllSelfContained(ctx context.Context, alltemplates []*te
match, err = template.Executer.Execute(ctx)
}
if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step (self-contained): %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
e.options.Logger.Warning().Msgf("[%s] Could not execute step (self-contained): %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
}
results.CompareAndSwap(false, match)
}(v)
@ -47,8 +48,15 @@ func (e *Engine) executeAllSelfContained(ctx context.Context, alltemplates []*te
// executeTemplateWithTargets executes a given template on x targets (with a internal targetpool(i.e concurrency))
func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templates.Template, target provider.InputProvider, results *atomic.Bool) {
// this is target pool i.e max target to execute
wg := e.workPool.InputPool(template.Type())
if e.workPool == nil {
e.workPool = e.GetWorkPool()
}
// Bounded worker pool using input concurrency
pool := e.workPool.InputPool(template.Type())
workerCount := 1
if pool != nil && pool.Size > 0 {
workerCount = pool.Size
}
var (
index uint32
@ -77,6 +85,41 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
currentInfo.Unlock()
}
// task represents a single target execution unit
type task struct {
index uint32
skip bool
value *contextargs.MetaInput
}
tasks := make(chan task)
var workersWg sync.WaitGroup
workersWg.Add(workerCount)
for i := 0; i < workerCount; i++ {
go func() {
defer workersWg.Done()
for t := range tasks {
func() {
defer cleanupInFlight(t.index)
select {
case <-ctx.Done():
return
default:
}
if t.skip {
return
}
match, err := e.executeTemplateOnInput(ctx, template, t.value)
if err != nil {
e.options.Logger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), t.value.Input, err)
}
results.CompareAndSwap(false, match)
}()
}
}()
}
target.Iterate(func(scannedValue *contextargs.MetaInput) bool {
select {
case <-ctx.Done():
@ -88,13 +131,13 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
// skips indexes lower than the minimum in-flight at interruption time
var skip bool
if resumeFromInfo.Completed { // the template was completed
gologger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Template already completed\n", template.ID, scannedValue.Input)
e.options.Logger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Template already completed", template.ID, scannedValue.Input)
skip = true
} else if index < resumeFromInfo.SkipUnder { // index lower than the sliding window (bulk-size)
gologger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Target already processed\n", template.ID, scannedValue.Input)
e.options.Logger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Target already processed", template.ID, scannedValue.Input)
skip = true
} else if _, isInFlight := resumeFromInfo.InFlight[index]; isInFlight { // the target wasn't completed successfully
gologger.Debug().Msgf("[%s] Repeating \"%s\": Resume - Target wasn't completed\n", template.ID, scannedValue.Input)
e.options.Logger.Debug().Msgf("[%s] Repeating \"%s\": Resume - Target wasn't completed", template.ID, scannedValue.Input)
// skip is already false, but leaving it here for clarity
skip = false
} else if index > resumeFromInfo.DoAbove { // index above the sliding window (bulk-size)
@ -108,46 +151,32 @@ func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templ
// Skip if the host has had errors
if e.executerOpts.HostErrorsCache != nil && e.executerOpts.HostErrorsCache.Check(e.executerOpts.ProtocolType.String(), contextargs.NewWithMetaInput(ctx, scannedValue)) {
skipEvent := &output.ResultEvent{
TemplateID: template.ID,
TemplatePath: template.Path,
Info: template.Info,
Type: e.executerOpts.ProtocolType.String(),
Host: scannedValue.Input,
MatcherStatus: false,
Error: "host was skipped as it was found unresponsive",
Timestamp: time.Now(),
}
if e.Callback != nil {
e.Callback(skipEvent)
} else if e.executerOpts.Output != nil {
_ = e.executerOpts.Output.Write(skipEvent)
}
return true
}
wg.Add()
go func(index uint32, skip bool, value *contextargs.MetaInput) {
defer wg.Done()
defer cleanupInFlight(index)
if skip {
return
}
var match bool
var err error
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
ctx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
match = e.executeWorkflow(ctx, template.CompiledWorkflow)
default:
if e.Callback != nil {
if results, err := template.Executer.ExecuteWithResults(ctx); err == nil {
for _, result := range results {
e.Callback(result)
}
}
match = true
} else {
match, err = template.Executer.Execute(ctx)
}
}
if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), value.Input, err)
}
results.CompareAndSwap(false, match)
}(index, skip, scannedValue)
tasks <- task{index: index, skip: skip, value: scannedValue}
index++
return true
})
wg.Wait()
close(tasks)
workersWg.Wait()
// on completion marks the template as completed
currentInfo.Lock()
@ -185,30 +214,35 @@ func (e *Engine) executeTemplatesOnTarget(ctx context.Context, alltemplates []*t
go func(template *templates.Template, value *contextargs.MetaInput, wg *syncutil.AdaptiveWaitGroup) {
defer wg.Done()
var match bool
var err error
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
ctx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
match = e.executeWorkflow(ctx, template.CompiledWorkflow)
default:
if e.Callback != nil {
if results, err := template.Executer.ExecuteWithResults(ctx); err == nil {
for _, result := range results {
e.Callback(result)
}
}
match = true
} else {
match, err = template.Executer.Execute(ctx)
}
}
match, err := e.executeTemplateOnInput(ctx, template, value)
if err != nil {
gologger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), value.Input, err)
e.options.Logger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), value.Input, err)
}
results.CompareAndSwap(false, match)
}(tpl, target, sg)
}
}
// executeTemplateOnInput performs template execution for a single input and returns match status and error
func (e *Engine) executeTemplateOnInput(ctx context.Context, template *templates.Template, value *contextargs.MetaInput) (bool, error) {
ctxArgs := contextargs.New(ctx)
ctxArgs.MetaInput = value
scanCtx := scan.NewScanContext(ctx, ctxArgs)
switch template.Type() {
case types.WorkflowProtocol:
return e.executeWorkflow(scanCtx, template.CompiledWorkflow), nil
default:
if e.Callback != nil {
results, err := template.Executer.ExecuteWithResults(scanCtx)
if err != nil {
return false, err
}
for _, result := range results {
e.Callback(result)
}
return len(results) > 0, nil
}
return template.Executer.Execute(scanCtx)
}
}

148
pkg/core/executors_test.go Normal file
View File

@ -0,0 +1,148 @@
package core
import (
"context"
"fmt"
"sync/atomic"
"testing"
"time"
inputtypes "github.com/projectdiscovery/nuclei/v3/pkg/input/types"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/scan"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
tmpltypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
)
// fakeExecuter is a simple stub for protocols.Executer used to test executeTemplateOnInput
type fakeExecuter struct {
withResults bool
}
func (f *fakeExecuter) Compile() error { return nil }
func (f *fakeExecuter) Requests() int { return 1 }
func (f *fakeExecuter) Execute(ctx *scan.ScanContext) (bool, error) { return !f.withResults, nil }
func (f *fakeExecuter) ExecuteWithResults(ctx *scan.ScanContext) ([]*output.ResultEvent, error) {
if !f.withResults {
return nil, nil
}
return []*output.ResultEvent{{Host: "h"}}, nil
}
// newTestEngine creates a minimal Engine for tests
func newTestEngine() *Engine {
return New(&types.Options{})
}
func Test_executeTemplateOnInput_CallbackPath(t *testing.T) {
e := newTestEngine()
called := 0
e.Callback = func(*output.ResultEvent) { called++ }
tpl := &templates.Template{}
tpl.Executer = &fakeExecuter{withResults: true}
ok, err := e.executeTemplateOnInput(context.Background(), tpl, &contextargs.MetaInput{Input: "x"})
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if !ok {
t.Fatalf("expected match true")
}
if called == 0 {
t.Fatalf("expected callback to be called")
}
}
func Test_executeTemplateOnInput_ExecutePath(t *testing.T) {
e := newTestEngine()
tpl := &templates.Template{}
tpl.Executer = &fakeExecuter{withResults: false}
ok, err := e.executeTemplateOnInput(context.Background(), tpl, &contextargs.MetaInput{Input: "x"})
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if !ok {
t.Fatalf("expected match true from Execute path")
}
}
type fakeExecuterErr struct{}
func (f *fakeExecuterErr) Compile() error { return nil }
func (f *fakeExecuterErr) Requests() int { return 1 }
func (f *fakeExecuterErr) Execute(ctx *scan.ScanContext) (bool, error) { return false, nil }
func (f *fakeExecuterErr) ExecuteWithResults(ctx *scan.ScanContext) ([]*output.ResultEvent, error) {
return nil, fmt.Errorf("boom")
}
func Test_executeTemplateOnInput_CallbackErrorPropagates(t *testing.T) {
e := newTestEngine()
e.Callback = func(*output.ResultEvent) {}
tpl := &templates.Template{}
tpl.Executer = &fakeExecuterErr{}
ok, err := e.executeTemplateOnInput(context.Background(), tpl, &contextargs.MetaInput{Input: "x"})
if err == nil {
t.Fatalf("expected error to propagate")
}
if ok {
t.Fatalf("expected match to be false on error")
}
}
type fakeTargetProvider struct {
values []*contextargs.MetaInput
}
func (f *fakeTargetProvider) Count() int64 { return int64(len(f.values)) }
func (f *fakeTargetProvider) Iterate(cb func(value *contextargs.MetaInput) bool) {
for _, v := range f.values {
if !cb(v) {
return
}
}
}
func (f *fakeTargetProvider) Set(string, string) {}
func (f *fakeTargetProvider) SetWithProbe(string, string, inputtypes.InputLivenessProbe) error {
return nil
}
func (f *fakeTargetProvider) SetWithExclusions(string, string) error { return nil }
func (f *fakeTargetProvider) InputType() string { return "test" }
func (f *fakeTargetProvider) Close() {}
type slowExecuter struct{}
func (s *slowExecuter) Compile() error { return nil }
func (s *slowExecuter) Requests() int { return 1 }
func (s *slowExecuter) Execute(ctx *scan.ScanContext) (bool, error) {
select {
case <-ctx.Context().Done():
return false, ctx.Context().Err()
case <-time.After(200 * time.Millisecond):
return true, nil
}
}
func (s *slowExecuter) ExecuteWithResults(ctx *scan.ScanContext) ([]*output.ResultEvent, error) {
return nil, nil
}
func Test_executeTemplateWithTargets_RespectsCancellation(t *testing.T) {
e := newTestEngine()
e.SetExecuterOptions(&protocols.ExecutorOptions{Logger: e.Logger, ResumeCfg: types.NewResumeCfg(), ProtocolType: tmpltypes.HTTPProtocol})
tpl := &templates.Template{}
tpl.Executer = &slowExecuter{}
targets := &fakeTargetProvider{values: []*contextargs.MetaInput{{Input: "a"}, {Input: "b"}, {Input: "c"}}}
ctx, cancel := context.WithCancel(context.Background())
cancel()
var matched atomic.Bool
e.executeTemplateWithTargets(ctx, tpl, targets, &matched)
}

View File

@ -12,7 +12,7 @@ import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
var _ Provider = &customTemplateAzureBlob{}
@ -29,7 +29,9 @@ func NewAzureProviders(options *types.Options) ([]*customTemplateAzureBlob, erro
// Establish a connection to Azure and build a client object with which to download templates from Azure Blob Storage
azClient, err := getAzureBlobClient(options.AzureTenantID, options.AzureClientID, options.AzureClientSecret, options.AzureServiceURL)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("Error establishing Azure Blob client for %s", options.AzureContainerName)
errx := errkit.FromError(err)
errx.Msgf("Error establishing Azure Blob client for %s", options.AzureContainerName)
return nil, errx
}
// Create a new Azure Blob Storage container object

View File

@ -13,6 +13,7 @@ import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file"
folderutil "github.com/projectdiscovery/utils/folder"
"golang.org/x/oauth2"
@ -46,19 +47,45 @@ func (customTemplate *customTemplateGitHubRepo) Update(ctx context.Context) {
downloadPath := config.DefaultConfig.CustomGitHubTemplatesDirectory
clonePath := customTemplate.getLocalRepoClonePath(downloadPath)
// If folder does not exits then clone/download the repo
// If folder does not exist then clone/download the repo
if !fileutil.FolderExists(clonePath) {
customTemplate.Download(ctx)
return
}
// Attempt to pull changes and handle the result
customTemplate.handlePullChanges(clonePath)
}
// handlePullChanges attempts to pull changes and logs the appropriate message
func (customTemplate *customTemplateGitHubRepo) handlePullChanges(clonePath string) {
err := customTemplate.pullChanges(clonePath, customTemplate.githubToken)
if err != nil {
gologger.Error().Msgf("%s", err)
} else {
gologger.Info().Msgf("Repo %s/%s successfully pulled the changes.\n", customTemplate.owner, customTemplate.reponame)
switch {
case err == nil:
customTemplate.logPullSuccess()
case errors.Is(err, git.NoErrAlreadyUpToDate):
customTemplate.logAlreadyUpToDate(err)
default:
customTemplate.logPullError(err)
}
}
// logPullSuccess logs a success message when changes are pulled
func (customTemplate *customTemplateGitHubRepo) logPullSuccess() {
gologger.Info().Msgf("Repo %s/%s successfully pulled the changes.\n", customTemplate.owner, customTemplate.reponame)
}
// logAlreadyUpToDate logs an info message when repo is already up to date
func (customTemplate *customTemplateGitHubRepo) logAlreadyUpToDate(err error) {
gologger.Info().Msgf("%s", err)
}
// logPullError logs an error message when pull fails
func (customTemplate *customTemplateGitHubRepo) logPullError(err error) {
gologger.Error().Msgf("%s", err)
}
// NewGitHubProviders returns new instance of GitHub providers for downloading custom templates
func NewGitHubProviders(options *types.Options) ([]*customTemplateGitHubRepo, error) {
providers := []*customTemplateGitHubRepo{}
@ -187,7 +214,7 @@ func (ctr *customTemplateGitHubRepo) pullChanges(repoPath, githubToken string) e
err = w.Pull(pullOpts)
if err != nil {
return errors.Errorf("%s/%s: %s", ctr.owner, ctr.reponame, err.Error())
return errkit.Wrapf(err, "%s/%s", ctr.owner, ctr.reponame)
}
return nil

View File

@ -1,23 +1,25 @@
package customtemplates
import (
"bytes"
"context"
"path/filepath"
"strings"
"testing"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/gologger/levels"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
osutils "github.com/projectdiscovery/utils/os"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/stretchr/testify/require"
)
func TestDownloadCustomTemplatesFromGitHub(t *testing.T) {
if osutils.IsOSX() {
t.Skip("skipping on macos due to unknown failure (works locally)")
}
gologger.DefaultLogger.SetWriter(&testutils.NoopWriter{})
// Capture output to check for rate limit errors
outputBuffer := &bytes.Buffer{}
gologger.DefaultLogger.SetWriter(&utils.CaptureWriter{Buffer: outputBuffer})
gologger.DefaultLogger.SetMaxLevel(levels.LevelDebug)
templatesDirectory := t.TempDir()
config.DefaultConfig.SetTemplatesDir(templatesDirectory)
@ -29,5 +31,12 @@ func TestDownloadCustomTemplatesFromGitHub(t *testing.T) {
require.Nil(t, err, "could not create custom templates manager")
ctm.Download(context.Background())
// Check if output contains rate limit error and skip test if so
output := outputBuffer.String()
if strings.Contains(output, "API rate limit exceeded") {
t.Skip("GitHub API rate limit exceeded, skipping test")
}
require.DirExists(t, filepath.Join(templatesDirectory, "github", "projectdiscovery", "nuclei-templates-test"), "cloned directory does not exists")
}

View File

@ -9,7 +9,7 @@ import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
gitlab "gitlab.com/gitlab-org/api/client-go"
)
@ -28,7 +28,9 @@ func NewGitLabProviders(options *types.Options) ([]*customTemplateGitLabRepo, er
// Establish a connection to GitLab and build a client object with which to download templates from GitLab
gitLabClient, err := getGitLabClient(options.GitLabServerURL, options.GitLabToken)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("Error establishing GitLab client for %s %s", options.GitLabServerURL, err)
errx := errkit.FromError(err)
errx.Msgf("Error establishing GitLab client for %s %s", options.GitLabServerURL, err)
return nil, errx
}
// Create a new GitLab service client

View File

@ -14,7 +14,7 @@ import (
"github.com/projectdiscovery/gologger"
nucleiConfig "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
stringsutil "github.com/projectdiscovery/utils/strings"
)
@ -64,7 +64,9 @@ func NewS3Providers(options *types.Options) ([]*customTemplateS3Bucket, error) {
if options.AwsBucketName != "" && !options.AwsTemplateDisableDownload {
s3c, err := getS3Client(context.TODO(), options.AwsAccessKey, options.AwsSecretKey, options.AwsRegion, options.AwsProfile)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("error downloading s3 bucket %s", options.AwsBucketName)
errx := errkit.FromError(err)
errx.Msgf("error downloading s3 bucket %s", options.AwsBucketName)
return nil, errx
}
ctBucket := &customTemplateS3Bucket{
bucketName: options.AwsBucketName,

View File

@ -4,7 +4,7 @@ import (
"context"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
)
type Provider interface {
@ -38,7 +38,9 @@ func NewCustomTemplatesManager(options *types.Options) (*CustomTemplatesManager,
// Add GitHub providers
githubProviders, err := NewGitHubProviders(options)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create github providers for custom templates")
errx := errkit.FromError(err)
errx.Msgf("could not create github providers for custom templates")
return nil, errx
}
for _, v := range githubProviders {
ctm.providers = append(ctm.providers, v)
@ -47,7 +49,9 @@ func NewCustomTemplatesManager(options *types.Options) (*CustomTemplatesManager,
// Add AWS S3 providers
s3Providers, err := NewS3Providers(options)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create s3 providers for custom templates")
errx := errkit.FromError(err)
errx.Msgf("could not create s3 providers for custom templates")
return nil, errx
}
for _, v := range s3Providers {
ctm.providers = append(ctm.providers, v)
@ -56,7 +60,9 @@ func NewCustomTemplatesManager(options *types.Options) (*CustomTemplatesManager,
// Add Azure providers
azureProviders, err := NewAzureProviders(options)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create azure providers for custom templates")
errx := errkit.FromError(err)
errx.Msgf("could not create azure providers for custom templates")
return nil, errx
}
for _, v := range azureProviders {
ctm.providers = append(ctm.providers, v)
@ -65,7 +71,9 @@ func NewCustomTemplatesManager(options *types.Options) (*CustomTemplatesManager,
// Add GitLab providers
gitlabProviders, err := NewGitLabProviders(options)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create gitlab providers for custom templates")
errx := errkit.FromError(err)
errx.Msgf("could not create gitlab providers for custom templates")
return nil, errx
}
for _, v := range gitlabProviders {
ctm.providers = append(ctm.providers, v)

View File

@ -61,7 +61,6 @@ func checkTimingDependency(
var requestsSent []requestsSentMetadata
for requestsLeft > 0 {
isCorrelationPossible, delayRecieved, err := sendRequestAndTestConfidence(regression, highSleepTimeSeconds, requestSender, baselineDelay)
if err != nil {
return false, "", err

View File

@ -7,7 +7,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/dataformat"
"github.com/projectdiscovery/retryablehttp-go"
mapsutil "github.com/projectdiscovery/utils/maps"
urlutil "github.com/projectdiscovery/utils/url"
)
@ -38,12 +37,18 @@ func (q *Path) Parse(req *retryablehttp.Request) (bool, error) {
splitted := strings.Split(req.Path, "/")
values := make(map[string]interface{})
for i := range splitted {
pathTillNow := strings.Join(splitted[:i+1], "/")
if pathTillNow == "" {
for i, segment := range splitted {
if segment == "" && i == 0 {
// Skip the first empty segment from leading "/"
continue
}
values[strconv.Itoa(i)] = pathTillNow
if segment == "" {
// Skip any other empty segments
continue
}
// Use 1-based indexing and store individual segments
key := strconv.Itoa(len(values) + 1)
values[key] = segment
}
q.value.SetParsed(dataformat.KVMap(values), "")
return true, nil
@ -64,7 +69,7 @@ func (q *Path) Iterate(callback func(key string, value interface{}) error) (err
// SetValue sets a value in the component
// for a key
func (q *Path) SetValue(key string, value string) error {
escaped := urlutil.ParamEncode(value)
escaped := urlutil.PathEncode(value)
if !q.value.SetParsedValue(key, escaped) {
return ErrSetValue
}
@ -82,40 +87,48 @@ func (q *Path) Delete(key string) error {
// Rebuild returns a new request with the
// component rebuilt
func (q *Path) Rebuild() (*retryablehttp.Request, error) {
originalValues := mapsutil.Map[string, any]{}
splitted := strings.Split(q.req.Path, "/")
for i := range splitted {
pathTillNow := strings.Join(splitted[:i+1], "/")
if pathTillNow == "" {
continue
}
originalValues[strconv.Itoa(i)] = pathTillNow
// Get the original path segments
originalSplitted := strings.Split(q.req.Path, "/")
// Create a new slice to hold the rebuilt segments
rebuiltSegments := make([]string, 0, len(originalSplitted))
// Add the first empty segment (from leading "/")
if len(originalSplitted) > 0 && originalSplitted[0] == "" {
rebuiltSegments = append(rebuiltSegments, "")
}
originalPath := q.req.Path
lengthSplitted := len(q.value.parsed.Map)
for i := lengthSplitted; i > 0; i-- {
key := strconv.Itoa(i)
original, ok := originalValues.GetOrDefault(key, "").(string)
if !ok {
// Process each segment
segmentIndex := 1 // 1-based indexing for our stored values
for i := 1; i < len(originalSplitted); i++ {
originalSegment := originalSplitted[i]
if originalSegment == "" {
// Skip empty segments
continue
}
new, ok := q.value.parsed.Map.GetOrDefault(key, "").(string)
if !ok {
continue
// Check if we have a replacement for this segment
key := strconv.Itoa(segmentIndex)
if newValue, exists := q.value.parsed.Map.GetOrDefault(key, "").(string); exists && newValue != "" {
rebuiltSegments = append(rebuiltSegments, newValue)
} else {
rebuiltSegments = append(rebuiltSegments, originalSegment)
}
if new == original {
// no need to replace
continue
}
originalPath = strings.Replace(originalPath, original, new, 1)
segmentIndex++
}
// Join the segments back into a path
rebuiltPath := strings.Join(rebuiltSegments, "/")
if unescaped, err := urlutil.PathDecode(rebuiltPath); err == nil {
// this is handle the case where anyportion of path has url encoded data
// by default the http/request official library will escape/encode special characters in path
// to avoid double encoding we unescape/decode already encoded value
//
// if there is a invalid url encoded value like %99 then it will still be encoded as %2599 and not %99
// the only way to make sure it stays as %99 is to implement raw request and unsafe for fuzzing as well
rebuiltPath = unescaped
}
rebuiltPath := originalPath
// Clone the request and update the path
cloned := q.req.Clone(context.Background())

View File

@ -29,9 +29,9 @@ func TestURLComponent(t *testing.T) {
})
require.Equal(t, []string{"1"}, keys, "unexpected keys")
require.Equal(t, []string{"/testpath"}, values, "unexpected values")
require.Equal(t, []string{"testpath"}, values, "unexpected values")
err = urlComponent.SetValue("1", "/newpath")
err = urlComponent.SetValue("1", "newpath")
if err != nil {
t.Fatal(err)
}
@ -61,9 +61,10 @@ func TestURLComponent_NestedPaths(t *testing.T) {
isSet := false
_ = path.Iterate(func(key string, value interface{}) error {
if !isSet && value.(string) == "/user/753" {
t.Logf("Key: %s, Value: %s", key, value.(string))
if !isSet && value.(string) == "753" {
isSet = true
if setErr := path.SetValue(key, "/user/753'"); setErr != nil {
if setErr := path.SetValue(key, "753'"); setErr != nil {
t.Fatal(setErr)
}
}
@ -75,6 +76,54 @@ func TestURLComponent_NestedPaths(t *testing.T) {
t.Fatal(err)
}
if newReq.Path != "/user/753'/profile" {
t.Fatal("expected path to be modified")
t.Fatalf("expected path to be '/user/753'/profile', got '%s'", newReq.Path)
}
}
func TestPathComponent_SQLInjection(t *testing.T) {
path := NewPath()
req, err := retryablehttp.NewRequest(http.MethodGet, "https://example.com/user/55/profile", nil)
if err != nil {
t.Fatal(err)
}
found, err := path.Parse(req)
if err != nil {
t.Fatal(err)
}
if !found {
t.Fatal("expected path to be found")
}
t.Logf("Original path: %s", req.Path)
// Let's see what path segments are available for fuzzing
err = path.Iterate(func(key string, value interface{}) error {
t.Logf("Key: %s, Value: %s", key, value.(string))
// Try fuzzing the "55" segment specifically (which should be key "2")
if value.(string) == "55" {
if setErr := path.SetValue(key, "55 OR True"); setErr != nil {
t.Fatal(setErr)
}
}
return nil
})
if err != nil {
t.Fatal(err)
}
newReq, err := path.Rebuild()
if err != nil {
t.Fatal(err)
}
t.Logf("Modified path: %s", newReq.Path)
// Now with PathEncode, spaces are preserved correctly for SQL injection
if newReq.Path != "/user/55 OR True/profile" {
t.Fatalf("expected path to be '/user/55 OR True/profile', got '%s'", newReq.Path)
}
// Let's also test what the actual URL looks like
t.Logf("Full URL: %s", newReq.String())
}

View File

@ -27,7 +27,29 @@ var (
// NewMultiPartForm returns a new MultiPartForm encoder
func NewMultiPartForm() *MultiPartForm {
return &MultiPartForm{}
return &MultiPartForm{
filesMetadata: make(map[string]FileMetadata),
}
}
// SetFileMetadata sets the file metadata for a given field name
func (m *MultiPartForm) SetFileMetadata(fieldName string, metadata FileMetadata) {
if m.filesMetadata == nil {
m.filesMetadata = make(map[string]FileMetadata)
}
m.filesMetadata[fieldName] = metadata
}
// GetFileMetadata gets the file metadata for a given field name
func (m *MultiPartForm) GetFileMetadata(fieldName string) (FileMetadata, bool) {
if m.filesMetadata == nil {
return FileMetadata{}, false
}
metadata, exists := m.filesMetadata[fieldName]
return metadata, exists
}
// IsType returns true if the data is MultiPartForm encoded
@ -49,42 +71,61 @@ func (m *MultiPartForm) Encode(data KV) (string, error) {
var fw io.Writer
var err error
if filesArray, ok := value.([]interface{}); ok {
fileMetadata, ok := m.filesMetadata[key]
if !ok {
Itererr = fmt.Errorf("file metadata not found for key %s", key)
return false
}
if fileMetadata, ok := m.filesMetadata[key]; ok {
if filesArray, isArray := value.([]any); isArray {
for _, file := range filesArray {
h := make(textproto.MIMEHeader)
h.Set("Content-Disposition",
fmt.Sprintf(`form-data; name=%q; filename=%q`,
key, fileMetadata.Filename))
h.Set("Content-Type", fileMetadata.ContentType)
for _, file := range filesArray {
h := make(textproto.MIMEHeader)
h.Set("Content-Disposition",
fmt.Sprintf(`form-data; name=%q; filename=%q`,
key, fileMetadata.Filename))
h.Set("Content-Type", fileMetadata.ContentType)
if fw, err = w.CreatePart(h); err != nil {
Itererr = err
return false
}
if fw, err = w.CreatePart(h); err != nil {
Itererr = err
return false
if _, err = fw.Write([]byte(file.(string))); err != nil {
Itererr = err
return false
}
}
if _, err = fw.Write([]byte(file.(string))); err != nil {
Itererr = err
return false
}
return true
}
return true
}
// Add field
if fw, err = w.CreateFormField(key); err != nil {
Itererr = err
return false
var values []string
switch v := value.(type) {
case nil:
values = []string{""}
case string:
values = []string{v}
case []string:
values = v
case []any:
values = make([]string, len(v))
for i, item := range v {
if item == nil {
values[i] = ""
} else {
values[i] = fmt.Sprint(item)
}
}
default:
values = []string{fmt.Sprintf("%v", v)}
}
if _, err = fw.Write([]byte(value.(string))); err != nil {
Itererr = err
return false
for _, val := range values {
if fw, err = w.CreateFormField(key); err != nil {
Itererr = err
return false
}
if _, err = fw.Write([]byte(val)); err != nil {
Itererr = err
return false
}
}
return true
})
@ -106,16 +147,24 @@ func (m *MultiPartForm) ParseBoundary(contentType string) error {
if m.boundary == "" {
return fmt.Errorf("no boundary found in the content type")
}
// NOTE(dwisiswant0): boundary cannot exceed 70 characters according to
// RFC-2046.
if len(m.boundary) > 70 {
return fmt.Errorf("boundary exceeds maximum length of 70 characters")
}
return nil
}
// Decode decodes the data from MultiPartForm format
func (m *MultiPartForm) Decode(data string) (KV, error) {
if m.boundary == "" {
return KV{}, fmt.Errorf("boundary not set, call ParseBoundary first")
}
// Create a buffer from the string data
b := bytes.NewBufferString(data)
// The boundary parameter should be extracted from the Content-Type header of the HTTP request
// which is not available in this context, so this is a placeholder for demonstration.
// You will need to pass the actual boundary value to this function.
r := multipart.NewReader(b, m.boundary)
form, err := r.ReadForm(32 << 20) // 32MB is the max memory used to parse the form
@ -134,30 +183,44 @@ func (m *MultiPartForm) Decode(data string) (KV, error) {
result.Set(key, values[0])
}
}
m.filesMetadata = make(map[string]FileMetadata)
if m.filesMetadata == nil {
m.filesMetadata = make(map[string]FileMetadata)
}
for key, files := range form.File {
fileContents := []interface{}{}
var fileMetadataList []FileMetadata
for _, fileHeader := range files {
file, err := fileHeader.Open()
if err != nil {
return KV{}, err
}
defer func() {
_ = file.Close()
}()
buffer := new(bytes.Buffer)
if _, err := buffer.ReadFrom(file); err != nil {
_ = file.Close()
return KV{}, err
}
_ = file.Close()
fileContents = append(fileContents, buffer.String())
m.filesMetadata[key] = FileMetadata{
fileMetadataList = append(fileMetadataList, FileMetadata{
ContentType: fileHeader.Header.Get("Content-Type"),
Filename: fileHeader.Filename,
}
})
}
result.Set(key, fileContents)
// NOTE(dwisiswant0): store the first file's metadata instead of the
// last one
if len(fileMetadataList) > 0 {
m.filesMetadata[key] = fileMetadataList[0]
}
}
return KVOrderedMap(&result), nil
}

View File

@ -0,0 +1,370 @@
package dataformat
import (
"testing"
mapsutil "github.com/projectdiscovery/utils/maps"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestMultiPartFormEncode(t *testing.T) {
tests := []struct {
name string
fields map[string]any
wantErr bool
expected map[string]any
}{
{
name: "duplicate fields ([]string) - checkbox scenario",
fields: map[string]any{
"interests": []string{"sports", "music", "reading"},
"colors": []string{"red", "blue"},
},
expected: map[string]any{
"interests": []string{"sports", "music", "reading"},
"colors": []string{"red", "blue"},
},
},
{
name: "single string fields - backward compatibility",
fields: map[string]any{
"username": "john",
"email": "john@example.com",
},
expected: map[string]any{
"username": "john",
"email": "john@example.com",
},
},
{
name: "mixed types",
fields: map[string]any{
"string": "text",
"array": []string{"item1", "item2"},
"number": 42, // tests fmt.Sprint fallback
"float": 3.14, // tests float conversion
"boolean": true, // tests boolean conversion
"zero": 0, // tests zero value
"emptyStr": "", // tests empty string
"negative": -123, // tests negative number
"nil": nil, // tests nil value
"mixedArray": []any{"str", 123, false, nil}, // tests mixed type array
},
expected: map[string]any{
"string": "text",
"array": []string{"item1", "item2"},
"number": "42", // numbers are converted to strings in multipart
"float": "3.14", // floats are converted to strings
"boolean": "true", // booleans are converted to strings
"zero": "0", // zero value converted to string
"emptyStr": "", // empty string remains empty
"negative": "-123", // negative numbers converted to strings
"nil": "", // nil values converted to "" string
"mixedArray": []string{"str", "123", "false", ""}, // mixed array converted to string array
},
},
{
name: "empty array - should not appear in output",
fields: map[string]any{
"emptyArray": []string{},
"normalField": "value",
},
expected: map[string]any{
"normalField": "value",
// emptyArray should not appear in decoded output
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
defer func() {
if r := recover(); r != nil {
t.Errorf("Test panicked: %v", r)
}
}()
form := NewMultiPartForm()
form.boundary = "----WebKitFormBoundary7MA4YWxkTrZu0gW"
kv := mapsutil.NewOrderedMap[string, any]()
for k, v := range tt.fields {
kv.Set(k, v)
}
encoded, err := form.Encode(KVOrderedMap(&kv))
if tt.wantErr {
require.Error(t, err)
return
}
require.NoError(t, err)
// Decode the encoded multipart data
decoded, err := form.Decode(encoded)
require.NoError(t, err)
// Compare decoded values with expected values
for expectedKey, expectedValue := range tt.expected {
actualValue := decoded.Get(expectedKey)
switch expected := expectedValue.(type) {
case []string:
actual, ok := actualValue.([]string)
require.True(t, ok, "Expected []string for key %s, got %T", expectedKey, actualValue)
assert.ElementsMatch(t, expected, actual, "Values mismatch for key %s", expectedKey)
case []any:
actual, ok := actualValue.([]any)
require.True(t, ok, "Expected []any for key %s, got %T", expectedKey, actualValue)
assert.ElementsMatch(t, expected, actual, "Values mismatch for key %s", expectedKey)
case string:
actual, ok := actualValue.(string)
require.True(t, ok, "Expected string for key %s, got %T", expectedKey, actualValue)
assert.Equal(t, expected, actual, "Values mismatch for key %s", expectedKey)
default:
assert.Equal(t, expected, actualValue, "Values mismatch for key %s", expectedKey)
}
}
// Ensure no unexpected keys are present in decoded output
decoded.Iterate(func(key string, value any) bool {
_, exists := tt.expected[key]
assert.True(t, exists, "Unexpected key %s found in decoded output", key)
return true
})
t.Logf("Encoded output:\n%s", encoded)
})
}
}
func TestMultiPartFormRoundTrip(t *testing.T) {
defer func() {
if r := recover(); r != nil {
t.Errorf("Test panicked: %v", r)
}
}()
form := NewMultiPartForm()
form.boundary = "----WebKitFormBoundary7MA4YWxkTrZu0gW"
original := mapsutil.NewOrderedMap[string, any]()
original.Set("username", "john")
original.Set("interests", []string{"sports", "music", "reading"})
encoded, err := form.Encode(KVOrderedMap(&original))
require.NoError(t, err)
decoded, err := form.Decode(encoded)
require.NoError(t, err)
assert.Equal(t, "john", decoded.Get("username"))
assert.ElementsMatch(t, []string{"sports", "music", "reading"}, decoded.Get("interests"))
t.Logf("Encoded output:\n%s", encoded)
}
func TestMultiPartFormFileUpload(t *testing.T) {
defer func() {
if r := recover(); r != nil {
t.Errorf("Test panicked: %v", r)
}
}()
// Test decoding of a manually crafted multipart form with files
form := NewMultiPartForm()
form.boundary = "----WebKitFormBoundaryFileUploadTest"
// Manually craft a multipart form with file uploads
multipartData := `------WebKitFormBoundaryFileUploadTest
Content-Disposition: form-data; name="name"
John Doe
------WebKitFormBoundaryFileUploadTest
Content-Disposition: form-data; name="email"
john@example.com
------WebKitFormBoundaryFileUploadTest
Content-Disposition: form-data; name="profile_picture"; filename="profile.jpg"
Content-Type: image/jpeg
fake_jpeg_binary_data_here
------WebKitFormBoundaryFileUploadTest
Content-Disposition: form-data; name="documents"; filename="resume.pdf"
Content-Type: application/pdf
fake_pdf_content_1
------WebKitFormBoundaryFileUploadTest
Content-Disposition: form-data; name="documents"; filename="cover_letter.pdf"
Content-Type: application/pdf
fake_pdf_content_2
------WebKitFormBoundaryFileUploadTest
Content-Disposition: form-data; name="skills"
Go
------WebKitFormBoundaryFileUploadTest
Content-Disposition: form-data; name="skills"
JavaScript
------WebKitFormBoundaryFileUploadTest
Content-Disposition: form-data; name="skills"
Python
------WebKitFormBoundaryFileUploadTest--
`
// Test decoding
decoded, err := form.Decode(multipartData)
require.NoError(t, err)
// Verify regular fields
assert.Equal(t, "John Doe", decoded.Get("name"))
assert.Equal(t, "john@example.com", decoded.Get("email"))
assert.Equal(t, []string{"Go", "JavaScript", "Python"}, decoded.Get("skills"))
// Verify file fields
profilePicture := decoded.Get("profile_picture")
require.NotNil(t, profilePicture)
profileArray, ok := profilePicture.([]interface{})
require.True(t, ok, "Expected []interface{} for profile_picture")
require.Len(t, profileArray, 1)
assert.Equal(t, "fake_jpeg_binary_data_here", profileArray[0])
documents := decoded.Get("documents")
require.NotNil(t, documents)
documentsArray, ok := documents.([]interface{})
require.True(t, ok, "Expected []interface{} for documents")
require.Len(t, documentsArray, 2)
assert.Contains(t, documentsArray, "fake_pdf_content_1")
assert.Contains(t, documentsArray, "fake_pdf_content_2")
}
func TestMultiPartForm_SetGetFileMetadata(t *testing.T) {
form := NewMultiPartForm()
metadata := FileMetadata{
ContentType: "image/jpeg",
Filename: "test.jpg",
}
form.SetFileMetadata("avatar", metadata)
// Test GetFileMetadata for existing field
retrievedMetadata, exists := form.GetFileMetadata("avatar")
assert.True(t, exists)
assert.Equal(t, metadata.ContentType, retrievedMetadata.ContentType)
assert.Equal(t, metadata.Filename, retrievedMetadata.Filename)
// Test GetFileMetadata for non-existing field
_, exists = form.GetFileMetadata("nonexistent")
assert.False(t, exists)
}
func TestMultiPartForm_FilesMetadataInitialization(t *testing.T) {
form := NewMultiPartForm()
assert.NotNil(t, form.filesMetadata)
metadata := FileMetadata{
ContentType: "text/plain",
Filename: "test.txt",
}
form.SetFileMetadata("file", metadata)
retrievedMetadata, exists := form.GetFileMetadata("file")
assert.True(t, exists)
assert.Equal(t, metadata, retrievedMetadata)
}
func TestMultiPartForm_BoundaryValidation(t *testing.T) {
form := NewMultiPartForm()
// Test valid boundary
err := form.ParseBoundary("multipart/form-data; boundary=testboundary")
assert.NoError(t, err)
assert.Equal(t, "testboundary", form.boundary)
// Test missing boundary
err = form.ParseBoundary("multipart/form-data")
assert.Error(t, err)
assert.Contains(t, err.Error(), "no boundary found")
// Test boundary too long (over 70 characters)
longBoundary := "multipart/form-data; boundary=" + string(make([]byte, 71))
for i := range longBoundary[len("multipart/form-data; boundary="):] {
longBoundary = longBoundary[:len("multipart/form-data; boundary=")+i] + "a" + longBoundary[len("multipart/form-data; boundary=")+i+1:]
}
err = form.ParseBoundary(longBoundary)
assert.Error(t, err)
assert.Contains(t, err.Error(), "boundary exceeds maximum length")
}
func TestMultiPartForm_DecodeRequiresBoundary(t *testing.T) {
form := NewMultiPartForm()
// Decode should fail if boundary is not set
_, err := form.Decode("some data")
assert.Error(t, err)
assert.Contains(t, err.Error(), "boundary not set")
}
func TestMultiPartForm_MultipleFilesMetadata(t *testing.T) {
form := NewMultiPartForm()
form.boundary = "----WebKitFormBoundaryMultiFileTest"
// Test with multiple files having the same field name
multipartData := `------WebKitFormBoundaryMultiFileTest
Content-Disposition: form-data; name="documents"; filename="file1.txt"
Content-Type: text/plain
content1
------WebKitFormBoundaryMultiFileTest
Content-Disposition: form-data; name="documents"; filename="file2.txt"
Content-Type: text/plain
content2
------WebKitFormBoundaryMultiFileTest--
`
decoded, err := form.Decode(multipartData)
require.NoError(t, err)
// Verify files are decoded correctly
documents := decoded.Get("documents")
require.NotNil(t, documents)
documentsArray, ok := documents.([]interface{})
require.True(t, ok)
require.Len(t, documentsArray, 2)
assert.Contains(t, documentsArray, "content1")
assert.Contains(t, documentsArray, "content2")
// Verify metadata for the field exists (should be from the first file)
metadata, exists := form.GetFileMetadata("documents")
assert.True(t, exists)
assert.Equal(t, "text/plain", metadata.ContentType)
assert.Equal(t, "file1.txt", metadata.Filename) // Should be from first file, not last
}
func TestMultiPartForm_SetFileMetadataWithNilMap(t *testing.T) {
form := &MultiPartForm{}
// SetFileMetadata should handle nil filesMetadata
metadata := FileMetadata{
ContentType: "application/pdf",
Filename: "document.pdf",
}
form.SetFileMetadata("doc", metadata)
// Should be able to retrieve the metadata
retrievedMetadata, exists := form.GetFileMetadata("doc")
assert.True(t, exists)
assert.Equal(t, metadata, retrievedMetadata)
}
func TestMultiPartForm_GetFileMetadataWithNilMap(t *testing.T) {
form := &MultiPartForm{}
// GetFileMetadata should handle nil filesMetadata gracefully
_, exists := form.GetFileMetadata("anything")
assert.False(t, exists)
}

View File

@ -14,16 +14,17 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/expressions"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/marker"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
mapsutil "github.com/projectdiscovery/utils/maps"
sliceutil "github.com/projectdiscovery/utils/slice"
urlutil "github.com/projectdiscovery/utils/url"
)
var (
ErrRuleNotApplicable = errorutil.NewWithFmt("rule not applicable : %v")
ErrRuleNotApplicable = errkit.New("rule not applicable")
)
// IsErrRuleNotApplicable checks if an error is due to rule not applicable
@ -88,10 +89,10 @@ type GeneratedRequest struct {
// goroutines.
func (rule *Rule) Execute(input *ExecuteRuleInput) (err error) {
if !rule.isInputURLValid(input.Input) {
return ErrRuleNotApplicable.Msgf("invalid input url: %v", input.Input.MetaInput.Input)
return errkit.Newf("rule not applicable: invalid input url: %v", input.Input.MetaInput.Input)
}
if input.BaseRequest == nil && input.Input.MetaInput.ReqResp == nil {
return ErrRuleNotApplicable.Msgf("both base request and reqresp are nil for %v", input.Input.MetaInput.Input)
return errkit.Newf("rule not applicable: both base request and reqresp are nil for %v", input.Input.MetaInput.Input)
}
var finalComponentList []component.Component
@ -143,7 +144,7 @@ func (rule *Rule) Execute(input *ExecuteRuleInput) (err error) {
}
if len(finalComponentList) == 0 {
return ErrRuleNotApplicable.Msgf("no component matched on this rule")
return errkit.Newf("rule not applicable: no component matched on this rule")
}
baseValues := input.Values
@ -189,6 +190,33 @@ mainLoop:
return nil
}
// evaluateVars evaluates variables in a string using available executor options
func (rule *Rule) evaluateVars(input string) (string, error) {
if rule.options == nil {
return input, nil
}
data := generators.MergeMaps(
rule.options.Variables.GetAll(),
rule.options.Constants,
rule.options.Options.Vars.AsMap(),
)
exprs := expressions.FindExpressions(input, marker.ParenthesisOpen, marker.ParenthesisClose, data)
err := expressions.ContainsUnresolvedVariables(exprs...)
if err != nil {
return input, err
}
eval, err := expressions.Evaluate(input, data)
if err != nil {
return input, err
}
return eval, nil
}
// evaluateVarsWithInteractsh evaluates the variables with Interactsh URLs and updates them accordingly.
func (rule *Rule) evaluateVarsWithInteractsh(data map[string]interface{}, interactshUrls []string) (map[string]interface{}, []string) {
// Check if Interactsh options are configured
@ -341,23 +369,47 @@ func (rule *Rule) Compile(generator *generators.PayloadGenerator, options *proto
if len(rule.Keys) > 0 {
rule.keysMap = make(map[string]struct{})
}
// eval vars in "keys"
for _, key := range rule.Keys {
rule.keysMap[strings.ToLower(key)] = struct{}{}
evaluatedKey, err := rule.evaluateVars(key)
if err != nil {
return errors.Wrap(err, "could not evaluate key")
}
rule.keysMap[strings.ToLower(evaluatedKey)] = struct{}{}
}
// eval vars in "values"
for _, value := range rule.ValuesRegex {
compiled, err := regexp.Compile(value)
evaluatedValue, err := rule.evaluateVars(value)
if err != nil {
return errors.Wrap(err, "could not evaluate value regex")
}
compiled, err := regexp.Compile(evaluatedValue)
if err != nil {
return errors.Wrap(err, "could not compile value regex")
}
rule.valuesRegex = append(rule.valuesRegex, compiled)
}
// eval vars in "keys-regex"
for _, value := range rule.KeysRegex {
compiled, err := regexp.Compile(value)
evaluatedValue, err := rule.evaluateVars(value)
if err != nil {
return errors.Wrap(err, "could not evaluate key regex")
}
compiled, err := regexp.Compile(evaluatedValue)
if err != nil {
return errors.Wrap(err, "could not compile key regex")
}
rule.keysRegex = append(rule.keysRegex, compiled)
}
if rule.ruleType != replaceRegexRuleType {
if rule.ReplaceRegex != "" {
return errors.Errorf("replace-regex is only applicable for replace and replace-regex rule types")
@ -366,11 +418,19 @@ func (rule *Rule) Compile(generator *generators.PayloadGenerator, options *proto
if rule.ReplaceRegex == "" {
return errors.Errorf("replace-regex is required for replace-regex rule type")
}
compiled, err := regexp.Compile(rule.ReplaceRegex)
evalReplaceRegex, err := rule.evaluateVars(rule.ReplaceRegex)
if err != nil {
return errors.Wrap(err, "could not evaluate replace regex")
}
compiled, err := regexp.Compile(evalReplaceRegex)
if err != nil {
return errors.Wrap(err, "could not compile replace regex")
}
rule.replaceRegex = compiled
}
return nil
}

View File

@ -3,6 +3,11 @@ package fuzz
import (
"testing"
"github.com/projectdiscovery/goflags"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/variables"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/stretchr/testify/require"
)
@ -37,3 +42,219 @@ func TestRuleMatchKeyOrValue(t *testing.T) {
require.False(t, result, "could not get correct result")
})
}
func TestEvaluateVariables(t *testing.T) {
t.Run("keys", func(t *testing.T) {
rule := &Rule{
Keys: []string{"{{foo_var}}"},
Part: "query",
}
// mock
templateVars := variables.Variable{
InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1),
}
templateVars.Set("foo_var", "foo_var_value")
constants := map[string]interface{}{
"const_key": "const_value",
}
options := &types.Options{}
// runtime vars (to simulate CLI)
runtimeVars := goflags.RuntimeMap{}
_ = runtimeVars.Set("runtime_key=runtime_value")
options.Vars = runtimeVars
executorOpts := &protocols.ExecutorOptions{
Variables: templateVars,
Constants: constants,
Options: options,
}
err := rule.Compile(nil, executorOpts)
require.NoError(t, err, "could not compile rule")
result := rule.matchKeyOrValue("foo_var_value", "test_value")
require.True(t, result, "should match evaluated variable key")
result = rule.matchKeyOrValue("{{foo_var}}", "test_value")
require.False(t, result, "should not match unevaluated variable key")
})
t.Run("keys-regex", func(t *testing.T) {
rule := &Rule{
KeysRegex: []string{"^{{foo_var}}"},
Part: "query",
}
templateVars := variables.Variable{
InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1),
}
templateVars.Set("foo_var", "foo_var_value")
executorOpts := &protocols.ExecutorOptions{
Variables: templateVars,
Constants: map[string]interface{}{},
Options: &types.Options{},
}
err := rule.Compile(nil, executorOpts)
require.NoError(t, err, "could not compile rule")
result := rule.matchKeyOrValue("foo_var_value", "test_value")
require.True(t, result, "should match evaluated variable in regex")
result = rule.matchKeyOrValue("other_key", "test_value")
require.False(t, result, "should not match non-matching key")
})
t.Run("values-regex", func(t *testing.T) {
rule := &Rule{
ValuesRegex: []string{"{{foo_var}}"},
Part: "query",
}
templateVars := variables.Variable{
InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1),
}
templateVars.Set("foo_var", "test_pattern")
executorOpts := &protocols.ExecutorOptions{
Variables: templateVars,
Constants: map[string]interface{}{},
Options: &types.Options{},
}
err := rule.Compile(nil, executorOpts)
require.NoError(t, err, "could not compile rule")
result := rule.matchKeyOrValue("test_key", "test_pattern")
require.True(t, result, "should match evaluated variable in values regex")
result = rule.matchKeyOrValue("test_key", "other_value")
require.False(t, result, "should not match non-matching value")
})
// complex vars w/ consts and runtime vars
t.Run("complex-variables", func(t *testing.T) {
rule := &Rule{
Keys: []string{"{{template_var}}", "{{const_key}}", "{{runtime_key}}"},
Part: "query",
}
templateVars := variables.Variable{
InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1),
}
templateVars.Set("template_var", "template_value")
constants := map[string]interface{}{
"const_key": "const_value",
}
options := &types.Options{}
runtimeVars := goflags.RuntimeMap{}
_ = runtimeVars.Set("runtime_key=runtime_value")
options.Vars = runtimeVars
executorOpts := &protocols.ExecutorOptions{
Variables: templateVars,
Constants: constants,
Options: options,
}
err := rule.Compile(nil, executorOpts)
require.NoError(t, err, "could not compile rule")
result := rule.matchKeyOrValue("template_value", "test")
require.True(t, result, "should match template variable")
result = rule.matchKeyOrValue("const_value", "test")
require.True(t, result, "should match constant")
result = rule.matchKeyOrValue("runtime_value", "test")
require.True(t, result, "should match runtime variable")
result = rule.matchKeyOrValue("{{template_var}}", "test")
require.False(t, result, "should not match unevaluated template variable")
})
t.Run("invalid-variables", func(t *testing.T) {
rule := &Rule{
Keys: []string{"{{nonexistent_var}}"},
Part: "query",
}
executorOpts := &protocols.ExecutorOptions{
Variables: variables.Variable{
InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(0),
},
Constants: map[string]interface{}{},
Options: &types.Options{},
}
err := rule.Compile(nil, executorOpts)
if err != nil {
require.Contains(t, err.Error(), "unresolved", "error should mention unresolved variables")
} else {
result := rule.matchKeyOrValue("some_key", "some_value")
require.False(t, result, "should not match when variables are unresolved")
}
})
t.Run("evaluateVars-function", func(t *testing.T) {
rule := &Rule{}
templateVars := variables.Variable{
InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1),
}
templateVars.Set("test_var", "test_value")
constants := map[string]interface{}{
"const_var": "const_value",
}
options := &types.Options{}
runtimeVars := goflags.RuntimeMap{}
_ = runtimeVars.Set("runtime_var=runtime_value")
options.Vars = runtimeVars
executorOpts := &protocols.ExecutorOptions{
Variables: templateVars,
Constants: constants,
Options: options,
}
rule.options = executorOpts
// Test simple var substitution
result, err := rule.evaluateVars("{{test_var}}")
require.NoError(t, err, "should evaluate template variable")
require.Equal(t, "test_value", result, "should return evaluated value")
// Test constant substitution
result, err = rule.evaluateVars("{{const_var}}")
require.NoError(t, err, "should evaluate constant")
require.Equal(t, "const_value", result, "should return constant value")
// Test runtime var substitution
result, err = rule.evaluateVars("{{runtime_var}}")
require.NoError(t, err, "should evaluate runtime variable")
require.Equal(t, "runtime_value", result, "should return runtime value")
// Test mixed content
result, err = rule.evaluateVars("prefix-{{test_var}}-suffix")
require.NoError(t, err, "should evaluate mixed content")
require.Equal(t, "prefix-test_value-suffix", result, "should return mixed evaluated content")
// Test unresolved var - should either fail during evaluation or return original string
result2, err := rule.evaluateVars("{{nonexistent}}")
if err != nil {
require.Contains(t, err.Error(), "unresolved", "should fail for unresolved variable")
} else {
// If no error, it should return the original unresolved variable
require.Equal(t, "{{nonexistent}}", result2, "should return original string for unresolved variable")
}
})
}

View File

@ -28,6 +28,12 @@ type InputFormatOptions struct {
// RequiredOnly only uses required fields when generating requests
// instead of all fields
RequiredOnly bool
// VarsTextTemplating uses Variables and inject it into the input
// this is used for text templating of variables based on carvel ytt
// Only available for Yaml formats
VarsTextTemplating bool
// VarsFilePaths is the path to the file containing variables
VarsFilePaths []string
}
// Format is an interface implemented by all input formats

View File

@ -2,6 +2,7 @@ package openapi
import (
"fmt"
"maps"
"slices"
"github.com/getkin/kin-openapi/openapi3"
@ -162,9 +163,7 @@ func openAPIExample(schema *openapi3.Schema, cache map[*openapi3.Schema]*cachedS
return nil, ErrNoExample
}
for k, v := range value {
example[k] = v
}
maps.Copy(example, value)
}
return example, nil
}

View File

@ -20,7 +20,7 @@ import (
httpTypes "github.com/projectdiscovery/nuclei/v3/pkg/input/types"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
"github.com/projectdiscovery/utils/generic"
mapsutil "github.com/projectdiscovery/utils/maps"
"github.com/valyala/fasttemplate"
@ -395,7 +395,7 @@ func generateRequestsFromOp(opts *generateReqOptions) error {
func GetGlobalParamsForSecurityRequirement(schema *openapi3.T, requirement *openapi3.SecurityRequirements) ([]*openapi3.ParameterRef, error) {
globalParams := openapi3.NewParameters()
if len(schema.Components.SecuritySchemes) == 0 {
return nil, errorutil.NewWithTag("openapi", "security requirements (%+v) without any security schemes found in openapi file", schema.Security)
return nil, errkit.Newf("security requirements (%+v) without any security schemes found in openapi file", schema.Security)
}
found := false
// this api is protected for each security scheme pull its corresponding scheme
@ -415,11 +415,11 @@ schemaLabel:
}
if !found && len(security) > 1 {
// if this is case then both security schemes are required
return nil, errorutil.NewWithTag("openapi", "security requirement (%+v) not found in openapi file", security)
return nil, errkit.Newf("security requirement (%+v) not found in openapi file", security)
}
}
if !found {
return nil, errorutil.NewWithTag("openapi", "security requirement (%+v) not found in openapi file", requirement)
return nil, errkit.Newf("security requirement (%+v) not found in openapi file", requirement)
}
return globalParams, nil
@ -428,12 +428,12 @@ schemaLabel:
// GenerateParameterFromSecurityScheme generates an example from a schema object
func GenerateParameterFromSecurityScheme(scheme *openapi3.SecuritySchemeRef) (*openapi3.Parameter, error) {
if !generic.EqualsAny(scheme.Value.Type, "http", "apiKey") {
return nil, errorutil.NewWithTag("openapi", "unsupported security scheme type (%s) found in openapi file", scheme.Value.Type)
return nil, errkit.Newf("unsupported security scheme type (%s) found in openapi file", scheme.Value.Type)
}
if scheme.Value.Type == "http" {
// check scheme
if !generic.EqualsAny(scheme.Value.Scheme, "basic", "bearer") {
return nil, errorutil.NewWithTag("openapi", "unsupported security scheme (%s) found in openapi file", scheme.Value.Scheme)
return nil, errkit.Newf("unsupported security scheme (%s) found in openapi file", scheme.Value.Scheme)
}
// HTTP authentication schemes basic or bearer use the Authorization header
headerName := scheme.Value.Name
@ -458,10 +458,10 @@ func GenerateParameterFromSecurityScheme(scheme *openapi3.SecuritySchemeRef) (*o
if scheme.Value.Type == "apiKey" {
// validate name and in
if scheme.Value.Name == "" {
return nil, errorutil.NewWithTag("openapi", "security scheme (%s) name is empty", scheme.Value.Type)
return nil, errkit.Newf("security scheme (%s) name is empty", scheme.Value.Type)
}
if !generic.EqualsAny(scheme.Value.In, "query", "header", "cookie") {
return nil, errorutil.NewWithTag("openapi", "unsupported security scheme (%s) in (%s) found in openapi file", scheme.Value.Type, scheme.Value.In)
return nil, errkit.Newf("unsupported security scheme (%s) in (%s) found in openapi file", scheme.Value.Type, scheme.Value.In)
}
// create parameters using the scheme
switch scheme.Value.In {
@ -482,5 +482,5 @@ func GenerateParameterFromSecurityScheme(scheme *openapi3.SecuritySchemeRef) (*o
return c, nil
}
}
return nil, errorutil.NewWithTag("openapi", "unsupported security scheme type (%s) found in openapi file", scheme.Value.Type)
return nil, errkit.Newf("unsupported security scheme type (%s) found in openapi file", scheme.Value.Type)
}

View File

@ -0,0 +1,25 @@
#@ load("@ytt:data", "data")
#@ load("@ytt:json", "json")
#@ def get_value(key, default=""):
#@ if hasattr(data.values, key):
#@ return str(getattr(data.values, key))
#@ else:
#@ return default
#@ end
#@ end
timestamp: 2024-02-20T19:24:13+05:32
url: https://ginandjuice.shop/users/3
request:
#@yaml/text-templated-strings
raw: |+
POST /users/3 HTTP/1.1
Host: ginandjuice.shop
Authorization: Bearer (@= get_value("token", "3x4mpl3t0k3n") @)
Accept-Encoding: gzip
Content-Type: application/x-www-form-urlencoded
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36
foo=(@= json.encode(data.values.foo) @)&bar=(@= get_value("bar") @)&debug=(@= get_value("debug", "false") @)

View File

@ -0,0 +1,11 @@
list: pkg/input/formats/testdata/ytt/ginandjuice.ytt.yaml
input-mode: yaml
templates:
- integration_tests/fuzz/fuzz-body.yaml
var:
- debug=true
- bar=bar
vars-text-templating: true
var-file-paths:
- pkg/input/formats/testdata/ytt/ytt-vars.yaml
dast: true

View File

@ -0,0 +1,3 @@
token: foobar
foo:
bar: baz

View File

@ -1,8 +1,8 @@
package yaml
import (
"bytes"
"io"
"strings"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
@ -46,23 +46,41 @@ func (j *YamlMultiDocFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
func (j *YamlMultiDocFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
decoder := YamlUtil.NewDecoder(input)
finalInput := input
// Apply text templating if enabled
if j.opts.VarsTextTemplating {
data, err := io.ReadAll(input)
if err != nil {
return errors.Wrap(err, "could not read input")
}
tpl := []string{string(data)}
dvs := mapToKeyValueSlice(j.opts.Variables)
finalData, err := ytt(tpl, dvs, j.opts.VarsFilePaths)
if err != nil {
return errors.Wrap(err, "could not apply ytt templating")
}
finalInput = bytes.NewReader(finalData)
}
decoder := YamlUtil.NewDecoder(finalInput)
for {
var request proxifyRequest
err := decoder.Decode(&request)
if err == io.EOF {
break
if err := decoder.Decode(&request); err != nil {
if err == io.EOF {
break
}
return errors.Wrap(err, "could not decode yaml file")
}
if err != nil {
return errors.Wrap(err, "could not decode json file")
}
if strings.TrimSpace(request.Request.Raw) == "" {
raw := request.Request.Raw
if raw == "" {
continue
}
rawRequest, err := types.ParseRawRequestWithURL(request.Request.Raw, request.URL)
rawRequest, err := types.ParseRawRequestWithURL(raw, request.URL)
if err != nil {
gologger.Warning().Msgf("multidoc-yaml: Could not parse raw request %s: %s\n", request.URL, err)
gologger.Warning().Msgf("multidoc-yaml: Could not parse raw request %s: %s", request.URL, err)
continue
}
resultsCb(rawRequest)

View File

@ -2,8 +2,10 @@ package yaml
import (
"os"
"strings"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
"github.com/stretchr/testify/require"
)
@ -33,3 +35,48 @@ func TestYamlFormatterParse(t *testing.T) {
require.Len(t, urls, len(expectedUrls), "invalid number of urls")
require.ElementsMatch(t, urls, expectedUrls, "invalid urls")
}
func TestYamlFormatterParseWithVariables(t *testing.T) {
format := New()
proxifyYttFile := "../testdata/ytt/ginandjuice.ytt.yaml"
expectedUrls := []string{
"https://ginandjuice.shop/users/3",
}
format.SetOptions(formats.InputFormatOptions{
VarsTextTemplating: true,
Variables: map[string]interface{}{
"foo": "catalog",
"bar": "product",
},
})
file, err := os.Open(proxifyYttFile)
require.Nilf(t, err, "error opening proxify ytt input file: %v", err)
defer func() {
_ = file.Close()
}()
var urls []string
err = format.Parse(file, func(request *types.RequestResponse) bool {
urls = append(urls, request.URL.String())
expectedRaw := `POST /users/3 HTTP/1.1
Host: ginandjuice.shop
Authorization: Bearer 3x4mpl3t0k3n
Accept-Encoding: gzip
Content-Type: application/x-www-form-urlencoded
Connection: close
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36
foo="catalog"&bar=product&debug=false`
normalised := strings.ReplaceAll(request.Request.Raw, "\r\n", "\n")
require.Equal(t, expectedRaw, strings.TrimSuffix(normalised, "\n"), "request raw does not match expected value")
return false
}, proxifyYttFile)
require.Nilf(t, err, "error parsing yaml file: %v", err)
require.Len(t, urls, len(expectedUrls), "invalid number of urls")
require.ElementsMatch(t, urls, expectedUrls, "invalid urls")
}

View File

@ -0,0 +1,70 @@
package yaml
import (
"fmt"
"strings"
yttcmd "carvel.dev/ytt/pkg/cmd/template"
yttui "carvel.dev/ytt/pkg/cmd/ui"
yttfiles "carvel.dev/ytt/pkg/files"
"gopkg.in/yaml.v2"
)
func ytt(tpl, dvs []string, varFiles []string) ([]byte, error) {
// create and invoke ytt "template" command
templatingOptions := yttcmd.NewOptions()
input, err := templatesAsInput(tpl...)
if err != nil {
return nil, err
}
if len(varFiles) > 0 {
// Load vaarFiles into the templating options.
templatingOptions.DataValuesFlags.FromFiles = varFiles
}
// equivalent to `--data-value-yaml`
templatingOptions.DataValuesFlags.KVsFromYAML = dvs
// for in-memory use, pipe output to "/dev/null"
noopUI := yttui.NewCustomWriterTTY(false, noopWriter{}, noopWriter{})
// Evaluate the template given the configured data values...
output := templatingOptions.RunWithFiles(input, noopUI)
if output.Err != nil {
return nil, output.Err
}
return output.DocSet.AsBytes()
}
// templatesAsInput conveniently wraps one or more strings, each in a files.File, into a template.Input.
func templatesAsInput(tpl ...string) (yttcmd.Input, error) {
var files []*yttfiles.File
for i, t := range tpl {
// to make this less brittle, you'll probably want to use well-defined names for `path`, here, for each input.
// this matters when you're processing errors which report based on these paths.
file, err := yttfiles.NewFileFromSource(yttfiles.NewBytesSource(fmt.Sprintf("tpl%d.yml", i), []byte(t)))
if err != nil {
return yttcmd.Input{}, err
}
files = append(files, file)
}
return yttcmd.Input{Files: files}, nil
}
func mapToKeyValueSlice(m map[string]interface{}) []string {
var result []string
for k, v := range m {
y, _ := yaml.Marshal(v)
result = append(result, fmt.Sprintf("%s=%s", k, strings.TrimSpace(string(y))))
}
return result
}
type noopWriter struct{}
func (w noopWriter) Write(data []byte) (int, error) { return len(data), nil }

View File

@ -115,17 +115,17 @@ func (i *HttpInputProvider) Iterate(callback func(value *contextargs.MetaInput)
// Set adds item to input provider
// No-op for this provider
func (i *HttpInputProvider) Set(value string) {}
func (i *HttpInputProvider) Set(_ string, value string) {}
// SetWithProbe adds item to input provider with http probing
// No-op for this provider
func (i *HttpInputProvider) SetWithProbe(value string, probe types.InputLivenessProbe) error {
func (i *HttpInputProvider) SetWithProbe(_ string, value string, probe types.InputLivenessProbe) error {
return nil
}
// SetWithExclusions adds item to input provider if it doesn't match any of the exclusions
// No-op for this provider
func (i *HttpInputProvider) SetWithExclusions(value string) error {
func (i *HttpInputProvider) SetWithExclusions(_ string, value string) error {
return nil
}

View File

@ -13,12 +13,12 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators"
configTypes "github.com/projectdiscovery/nuclei/v3/pkg/types"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
stringsutil "github.com/projectdiscovery/utils/strings"
)
var (
ErrNotImplemented = errorutil.NewWithFmt("provider %s does not implement %s")
ErrNotImplemented = errkit.New("provider does not implement method")
ErrInactiveInput = fmt.Errorf("input is inactive")
)
@ -59,11 +59,11 @@ type InputProvider interface {
// Iterate over all inputs in order
Iterate(callback func(value *contextargs.MetaInput) bool)
// Set adds item to input provider
Set(value string)
Set(executionId string, value string)
// SetWithProbe adds item to input provider with http probing
SetWithProbe(value string, probe types.InputLivenessProbe) error
SetWithProbe(executionId string, value string, probe types.InputLivenessProbe) error
// SetWithExclusions adds item to input provider if it doesn't match any of the exclusions
SetWithExclusions(value string) error
SetWithExclusions(executionId string, value string) error
// InputType returns the type of input provider
InputType() string
// Close the input provider and cleanup any resources
@ -116,6 +116,8 @@ func NewInputProvider(opts InputOptions) (InputProvider, error) {
Variables: generators.MergeMaps(extraVars, opts.Options.Vars.AsMap()),
SkipFormatValidation: opts.Options.SkipFormatValidation,
RequiredOnly: opts.Options.FormatUseRequiredOnly,
VarsTextTemplating: opts.Options.VarsTextTemplating,
VarsFilePaths: opts.Options.VarsFilePaths,
},
})
}

View File

@ -139,7 +139,7 @@ func (i *ListInputProvider) Iterate(callback func(value *contextargs.MetaInput)
}
// Set normalizes and stores passed input values
func (i *ListInputProvider) Set(value string) {
func (i *ListInputProvider) Set(executionId string, value string) {
URL := strings.TrimSpace(value)
if URL == "" {
return
@ -169,7 +169,12 @@ func (i *ListInputProvider) Set(value string) {
if i.ipOptions.ScanAllIPs {
// scan all ips
dnsData, err := protocolstate.Dialer.GetDNSData(urlx.Hostname())
dialers := protocolstate.GetDialersWithId(executionId)
if dialers == nil {
panic("dialers with executionId " + executionId + " not found")
}
dnsData, err := dialers.Fastdialer.GetDNSData(urlx.Hostname())
if err == nil {
if (len(dnsData.A) + len(dnsData.AAAA)) > 0 {
var ips []string
@ -201,7 +206,12 @@ func (i *ListInputProvider) Set(value string) {
ips := []string{}
// only scan the target but ipv6 if it has one
if i.ipOptions.IPV6 {
dnsData, err := protocolstate.Dialer.GetDNSData(urlx.Hostname())
dialers := protocolstate.GetDialersWithId(executionId)
if dialers == nil {
panic("dialers with executionId " + executionId + " not found")
}
dnsData, err := dialers.Fastdialer.GetDNSData(urlx.Hostname())
if err == nil && len(dnsData.AAAA) > 0 {
// pick/ prefer 1st
ips = append(ips, dnsData.AAAA[0])
@ -228,17 +238,17 @@ func (i *ListInputProvider) Set(value string) {
}
// SetWithProbe only sets the input if it is live
func (i *ListInputProvider) SetWithProbe(value string, probe providerTypes.InputLivenessProbe) error {
func (i *ListInputProvider) SetWithProbe(executionId string, value string, probe providerTypes.InputLivenessProbe) error {
probedValue, err := probe.ProbeURL(value)
if err != nil {
return err
}
i.Set(probedValue)
i.Set(executionId, probedValue)
return nil
}
// SetWithExclusions normalizes and stores passed input values if not excluded
func (i *ListInputProvider) SetWithExclusions(value string) error {
func (i *ListInputProvider) SetWithExclusions(executionId string, value string) error {
URL := strings.TrimSpace(value)
if URL == "" {
return nil
@ -247,7 +257,7 @@ func (i *ListInputProvider) SetWithExclusions(value string) error {
i.skippedCount++
return nil
}
i.Set(URL)
i.Set(executionId, URL)
return nil
}
@ -273,18 +283,20 @@ func (i *ListInputProvider) initializeInputSources(opts *Options) error {
switch {
case iputil.IsCIDR(target):
ips := expand.CIDR(target)
i.addTargets(ips)
i.addTargets(options.ExecutionId, ips)
case asn.IsASN(target):
ips := expand.ASN(target)
i.addTargets(ips)
i.addTargets(options.ExecutionId, ips)
default:
i.Set(target)
i.Set(options.ExecutionId, target)
}
}
// Handle stdin
if options.Stdin {
i.scanInputFromReader(readerutil.TimeoutReader{Reader: os.Stdin, Timeout: time.Duration(options.InputReadTimeout)})
i.scanInputFromReader(
options.ExecutionId,
readerutil.TimeoutReader{Reader: os.Stdin, Timeout: time.Duration(options.InputReadTimeout)})
}
// Handle target file
@ -297,7 +309,7 @@ func (i *ListInputProvider) initializeInputSources(opts *Options) error {
}
}
if input != nil {
i.scanInputFromReader(input)
i.scanInputFromReader(options.ExecutionId, input)
_ = input.Close()
}
}
@ -317,7 +329,7 @@ func (i *ListInputProvider) initializeInputSources(opts *Options) error {
return err
}
for c := range ch {
i.Set(c)
i.Set(options.ExecutionId, c)
}
}
@ -331,7 +343,7 @@ func (i *ListInputProvider) initializeInputSources(opts *Options) error {
ips := expand.ASN(target)
i.removeTargets(ips)
default:
i.Del(target)
i.Del(options.ExecutionId, target)
}
}
}
@ -340,19 +352,19 @@ func (i *ListInputProvider) initializeInputSources(opts *Options) error {
}
// scanInputFromReader scans a line of input from reader and passes it for storage
func (i *ListInputProvider) scanInputFromReader(reader io.Reader) {
func (i *ListInputProvider) scanInputFromReader(executionId string, reader io.Reader) {
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
item := scanner.Text()
switch {
case iputil.IsCIDR(item):
ips := expand.CIDR(item)
i.addTargets(ips)
i.addTargets(executionId, ips)
case asn.IsASN(item):
ips := expand.ASN(item)
i.addTargets(ips)
i.addTargets(executionId, ips)
default:
i.Set(item)
i.Set(executionId, item)
}
}
}
@ -371,7 +383,7 @@ func (i *ListInputProvider) isExcluded(URL string) bool {
return exists
}
func (i *ListInputProvider) Del(value string) {
func (i *ListInputProvider) Del(executionId string, value string) {
URL := strings.TrimSpace(value)
if URL == "" {
return
@ -401,7 +413,12 @@ func (i *ListInputProvider) Del(value string) {
if i.ipOptions.ScanAllIPs {
// scan all ips
dnsData, err := protocolstate.Dialer.GetDNSData(urlx.Hostname())
dialers := protocolstate.GetDialersWithId(executionId)
if dialers == nil {
panic("dialers with executionId " + executionId + " not found")
}
dnsData, err := dialers.Fastdialer.GetDNSData(urlx.Hostname())
if err == nil {
if (len(dnsData.A) + len(dnsData.AAAA)) > 0 {
var ips []string
@ -433,7 +450,12 @@ func (i *ListInputProvider) Del(value string) {
ips := []string{}
// only scan the target but ipv6 if it has one
if i.ipOptions.IPV6 {
dnsData, err := protocolstate.Dialer.GetDNSData(urlx.Hostname())
dialers := protocolstate.GetDialersWithId(executionId)
if dialers == nil {
panic("dialers with executionId " + executionId + " not found")
}
dnsData, err := dialers.Fastdialer.GetDNSData(urlx.Hostname())
if err == nil && len(dnsData.AAAA) > 0 {
// pick/ prefer 1st
ips = append(ips, dnsData.AAAA[0])
@ -519,9 +541,9 @@ func (i *ListInputProvider) setHostMapStream(data string) {
}
}
func (i *ListInputProvider) addTargets(targets []string) {
func (i *ListInputProvider) addTargets(executionId string, targets []string) {
for _, target := range targets {
i.Set(target)
i.Set(executionId, target)
}
}

View File

@ -36,7 +36,7 @@ func Test_expandCIDR(t *testing.T) {
input := &ListInputProvider{hostMap: hm}
ips := expand.CIDR(tt.cidr)
input.addTargets(ips)
input.addTargets("", ips)
// scan
got := []string{}
input.hostMap.Scan(func(k, _ []byte) error {
@ -137,7 +137,7 @@ func Test_scanallips_normalizeStoreInputValue(t *testing.T) {
},
}
input.Set(tt.hostname)
input.Set("", tt.hostname)
// scan
got := []string{}
input.hostMap.Scan(func(k, v []byte) error {
@ -180,7 +180,7 @@ func Test_expandASNInputValue(t *testing.T) {
input := &ListInputProvider{hostMap: hm}
// get the IP addresses for ASN number
ips := expand.ASN(tt.asn)
input.addTargets(ips)
input.addTargets("", ips)
// scan the hmap
got := []string{}
input.hostMap.Scan(func(k, v []byte) error {

View File

@ -19,10 +19,10 @@ func NewSimpleInputProvider() *SimpleInputProvider {
}
// NewSimpleInputProviderWithUrls creates a new simple input provider with the given urls
func NewSimpleInputProviderWithUrls(urls ...string) *SimpleInputProvider {
func NewSimpleInputProviderWithUrls(executionId string, urls ...string) *SimpleInputProvider {
provider := NewSimpleInputProvider()
for _, url := range urls {
provider.Set(url)
provider.Set(executionId, url)
}
return provider
}
@ -42,14 +42,14 @@ func (s *SimpleInputProvider) Iterate(callback func(value *contextargs.MetaInput
}
// Set adds an item to the input provider
func (s *SimpleInputProvider) Set(value string) {
func (s *SimpleInputProvider) Set(_ string, value string) {
metaInput := contextargs.NewMetaInput()
metaInput.Input = value
s.Inputs = append(s.Inputs, metaInput)
}
// SetWithProbe adds an item to the input provider with HTTP probing
func (s *SimpleInputProvider) SetWithProbe(value string, probe types.InputLivenessProbe) error {
func (s *SimpleInputProvider) SetWithProbe(_ string, value string, probe types.InputLivenessProbe) error {
probedValue, err := probe.ProbeURL(value)
if err != nil {
return err
@ -61,7 +61,7 @@ func (s *SimpleInputProvider) SetWithProbe(value string, probe types.InputLivene
}
// SetWithExclusions adds an item to the input provider if it doesn't match any of the exclusions
func (s *SimpleInputProvider) SetWithExclusions(value string) error {
func (s *SimpleInputProvider) SetWithExclusions(_ string, value string) error {
metaInput := contextargs.NewMetaInput()
metaInput.Input = value
s.Inputs = append(s.Inputs, metaInput)

View File

@ -17,7 +17,7 @@ import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/external/customtemplates"
errorutil "github.com/projectdiscovery/utils/errors"
"github.com/projectdiscovery/utils/errkit"
fileutil "github.com/projectdiscovery/utils/file"
stringsutil "github.com/projectdiscovery/utils/strings"
updateutils "github.com/projectdiscovery/utils/update"
@ -53,11 +53,14 @@ func (t *templateUpdateResults) String() string {
},
}
table := tablewriter.NewWriter(&buff)
table.SetHeader([]string{"Total", "Added", "Modified", "Removed"})
table.Header([]string{"Total", "Added", "Modified", "Removed"})
for _, v := range data {
table.Append(v)
_ = table.Append(v)
}
table.Render()
_ = table.Render()
defer func() {
_ = table.Close()
}()
return buff.String()
}
@ -77,7 +80,7 @@ func (t *TemplateManager) FreshInstallIfNotExists() error {
}
gologger.Info().Msgf("nuclei-templates are not installed, installing...")
if err := t.installTemplatesAt(config.DefaultConfig.TemplatesDirectory); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to install templates at %s", config.DefaultConfig.TemplatesDirectory)
return errkit.Wrapf(err, "failed to install templates at %s", config.DefaultConfig.TemplatesDirectory)
}
if t.CustomTemplates != nil {
t.CustomTemplates.Download(context.TODO())
@ -91,7 +94,24 @@ func (t *TemplateManager) UpdateIfOutdated() error {
if !fileutil.FolderExists(config.DefaultConfig.TemplatesDirectory) {
return t.FreshInstallIfNotExists()
}
if config.DefaultConfig.NeedsTemplateUpdate() {
needsUpdate := config.DefaultConfig.NeedsTemplateUpdate()
// NOTE(dwisiswant0): if PDTM API data is not available
// (LatestNucleiTemplatesVersion is empty) but we have a current template
// version, so we MUST verify against GitHub directly.
if !needsUpdate && config.DefaultConfig.LatestNucleiTemplatesVersion == "" && config.DefaultConfig.TemplateVersion != "" {
ghrd, err := updateutils.NewghReleaseDownloader(config.OfficialNucleiTemplatesRepoName)
if err == nil {
latestVersion := ghrd.Latest.GetTagName()
if config.IsOutdatedVersion(config.DefaultConfig.TemplateVersion, latestVersion) {
needsUpdate = true
gologger.Debug().Msgf("PDTM API unavailable, verified update needed via GitHub API: %s -> %s", config.DefaultConfig.TemplateVersion, latestVersion)
}
}
}
if needsUpdate {
return t.updateTemplatesAt(config.DefaultConfig.TemplatesDirectory)
}
return nil
@ -101,7 +121,7 @@ func (t *TemplateManager) UpdateIfOutdated() error {
func (t *TemplateManager) installTemplatesAt(dir string) error {
if !fileutil.FolderExists(dir) {
if err := fileutil.CreateFolder(dir); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to create directory at %s", dir)
return errkit.Wrapf(err, "failed to create directory at %s", dir)
}
}
if t.DisablePublicTemplates {
@ -110,12 +130,12 @@ func (t *TemplateManager) installTemplatesAt(dir string) error {
}
ghrd, err := updateutils.NewghReleaseDownloader(config.OfficialNucleiTemplatesRepoName)
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to install templates at %s", dir)
return errkit.Wrapf(err, "failed to install templates at %s", dir)
}
// write templates to disk
if err := t.writeTemplatesToDisk(ghrd, dir); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write templates to disk at %s", dir)
return errkit.Wrapf(err, "failed to write templates to disk at %s", dir)
}
gologger.Info().Msgf("Successfully installed nuclei-templates at %s", dir)
return nil
@ -136,10 +156,17 @@ func (t *TemplateManager) updateTemplatesAt(dir string) error {
ghrd, err := updateutils.NewghReleaseDownloader(config.OfficialNucleiTemplatesRepoName)
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to install templates at %s", dir)
return errkit.Wrapf(err, "failed to install templates at %s", dir)
}
gologger.Info().Msgf("Your current nuclei-templates %s are outdated. Latest is %s\n", config.DefaultConfig.TemplateVersion, ghrd.Latest.GetTagName())
latestVersion := ghrd.Latest.GetTagName()
currentVersion := config.DefaultConfig.TemplateVersion
if config.IsOutdatedVersion(currentVersion, latestVersion) {
gologger.Info().Msgf("Your current nuclei-templates %s are outdated. Latest is %s\n", currentVersion, latestVersion)
} else {
gologger.Debug().Msgf("Updating nuclei-templates from %s to %s (forced update)\n", currentVersion, latestVersion)
}
// write templates to disk
if err := t.writeTemplatesToDisk(ghrd, dir); err != nil {
@ -150,7 +177,7 @@ func (t *TemplateManager) updateTemplatesAt(dir string) error {
newchecksums, err := t.getChecksumFromDir(dir)
if err != nil {
// unlikely this case will happen
return errorutil.NewWithErr(err).Msgf("failed to get checksums from %s after update", dir)
return errkit.Wrapf(err, "failed to get checksums from %s after update", dir)
}
// summarize all changes
@ -272,7 +299,7 @@ func (t *TemplateManager) writeTemplatesToDisk(ghrd *updateutils.GHReleaseDownlo
bin, err := io.ReadAll(r)
if err != nil {
// if error occurs, iteration also stops
return errorutil.NewWithErr(err).Msgf("failed to read file %s", uri)
return errkit.Wrapf(err, "failed to read file %s", uri)
}
// TODO: It might be better to just download index file from nuclei templates repo
// instead of creating it from scratch
@ -283,7 +310,7 @@ func (t *TemplateManager) writeTemplatesToDisk(ghrd *updateutils.GHReleaseDownlo
if oldPath != writePath {
// write new template at a new path and delete old template
if err := os.WriteFile(writePath, bin, f.Mode()); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write file %s", uri)
return errkit.Wrapf(err, "failed to write file %s", uri)
}
// after successful write, remove old template
if err := os.Remove(oldPath); err != nil {
@ -298,20 +325,20 @@ func (t *TemplateManager) writeTemplatesToDisk(ghrd *updateutils.GHReleaseDownlo
}
err = ghrd.DownloadSourceWithCallback(!HideProgressBar, callbackFunc)
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to download templates")
return errkit.Wrap(err, "failed to download templates")
}
if err := config.DefaultConfig.WriteTemplatesConfig(); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write templates config")
return errkit.Wrap(err, "failed to write templates config")
}
// update ignore hash after writing new templates
if err := config.DefaultConfig.UpdateNucleiIgnoreHash(); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to update nuclei ignore hash")
return errkit.Wrap(err, "failed to update nuclei ignore hash")
}
// update templates version in config file
if err := config.DefaultConfig.SetTemplatesVersion(ghrd.Latest.GetTagName()); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to update templates version")
return errkit.Wrap(err, "failed to update templates version")
}
PurgeEmptyDirectories(dir)
@ -321,11 +348,11 @@ func (t *TemplateManager) writeTemplatesToDisk(ghrd *updateutils.GHReleaseDownlo
index, err := config.GetNucleiTemplatesIndex()
if err != nil {
return errorutil.NewWithErr(err).Msgf("failed to get nuclei templates index")
return errkit.Wrap(err, "failed to get nuclei templates index")
}
if err = config.DefaultConfig.WriteTemplatesIndex(index); err != nil {
return errorutil.NewWithErr(err).Msgf("failed to write nuclei templates index")
return errkit.Wrap(err, "failed to write nuclei templates index")
}
if !HideReleaseNotes {
@ -421,5 +448,5 @@ func (t *TemplateManager) calculateChecksumMap(dir string) (map[string]string, e
}
return nil
})
return checksumMap, errorutil.WrapfWithNil(err, "failed to calculate checksums of templates")
return checksumMap, errkit.Wrap(err, "failed to calculate checksums of templates")
}

View File

@ -59,3 +59,42 @@ func TestTemplateInstallation(t *testing.T) {
require.FileExists(t, config.DefaultConfig.GetIgnoreFilePath())
t.Logf("Installed %d templates", counter)
}
func TestIsOutdatedVersion(t *testing.T) {
testCases := []struct {
current string
latest string
expected bool
desc string
}{
// Test the empty latest version case (main bug fix)
{"v10.2.7", "", false, "Empty latest version should not trigger update"},
// Test same versions
{"v10.2.7", "v10.2.7", false, "Same versions should not trigger update"},
// Test outdated version
{"v10.2.6", "v10.2.7", true, "Older version should trigger update"},
// Test newer current version (edge case)
{"v10.2.8", "v10.2.7", false, "Newer current version should not trigger update"},
// Test dev versions
{"v10.2.7-dev", "v10.2.7", false, "Dev version matching release should not trigger update"},
{"v10.2.6-dev", "v10.2.7", true, "Outdated dev version should trigger update"},
// Test invalid semver fallback
{"invalid-version", "v10.2.7", true, "Invalid current version should trigger update (fallback)"},
{"v10.2.7", "invalid-version", true, "Invalid latest version should trigger update (fallback)"},
{"same-invalid", "same-invalid", false, "Same invalid versions should not trigger update (fallback)"},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
result := config.IsOutdatedVersion(tc.current, tc.latest)
require.Equal(t, tc.expected, result,
"IsOutdatedVersion(%q, %q) = %t, expected %t",
tc.current, tc.latest, result, tc.expected)
})
}
}

Some files were not shown because too many files have changed in this diff Show More