2023-01-20 23:49:04 +05:30
|
|
|
package core
|
|
|
|
|
|
|
|
|
|
import (
|
2024-04-25 15:37:56 +05:30
|
|
|
"context"
|
2023-01-20 23:49:04 +05:30
|
|
|
"sync"
|
|
|
|
|
"sync/atomic"
|
2025-07-19 00:11:25 +05:30
|
|
|
"time"
|
2023-01-20 23:49:04 +05:30
|
|
|
|
2024-03-14 03:08:53 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
|
2025-07-19 00:11:25 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v3/pkg/output"
|
2023-10-17 17:44:13 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
|
2023-11-27 19:54:45 +01:00
|
|
|
"github.com/projectdiscovery/nuclei/v3/pkg/scan"
|
2023-10-17 17:44:13 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
|
|
|
|
|
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
|
|
|
|
|
generalTypes "github.com/projectdiscovery/nuclei/v3/pkg/types"
|
2024-04-03 17:50:57 +02:00
|
|
|
syncutil "github.com/projectdiscovery/utils/sync"
|
2023-01-20 23:49:04 +05:30
|
|
|
)
|
|
|
|
|
|
2023-03-05 20:07:40 +01:00
|
|
|
// Executors are low level executors that deals with template execution on a target
|
2023-01-20 23:49:04 +05:30
|
|
|
|
|
|
|
|
// executeAllSelfContained executes all self contained templates that do not use `target`
|
2024-04-25 15:37:56 +05:30
|
|
|
func (e *Engine) executeAllSelfContained(ctx context.Context, alltemplates []*templates.Template, results *atomic.Bool, sg *sync.WaitGroup) {
|
2023-01-20 23:49:04 +05:30
|
|
|
for _, v := range alltemplates {
|
|
|
|
|
sg.Add(1)
|
|
|
|
|
go func(template *templates.Template) {
|
|
|
|
|
defer sg.Done()
|
|
|
|
|
var err error
|
|
|
|
|
var match bool
|
2024-04-25 15:37:56 +05:30
|
|
|
ctx := scan.NewScanContext(ctx, contextargs.New(ctx))
|
2023-01-20 23:49:04 +05:30
|
|
|
if e.Callback != nil {
|
2024-07-15 19:32:14 +08:00
|
|
|
if results, err := template.Executer.ExecuteWithResults(ctx); err == nil {
|
2023-11-27 19:54:45 +01:00
|
|
|
for _, result := range results {
|
2023-01-20 23:49:04 +05:30
|
|
|
e.Callback(result)
|
|
|
|
|
}
|
2023-11-27 19:54:45 +01:00
|
|
|
}
|
|
|
|
|
|
2023-01-20 23:49:04 +05:30
|
|
|
match = true
|
|
|
|
|
} else {
|
2023-11-27 19:54:45 +01:00
|
|
|
match, err = template.Executer.Execute(ctx)
|
2023-01-20 23:49:04 +05:30
|
|
|
}
|
|
|
|
|
if err != nil {
|
2025-07-09 14:47:26 -05:00
|
|
|
e.options.Logger.Warning().Msgf("[%s] Could not execute step (self-contained): %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
|
2023-01-20 23:49:04 +05:30
|
|
|
}
|
|
|
|
|
results.CompareAndSwap(false, match)
|
|
|
|
|
}(v)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-31 16:01:09 +08:00
|
|
|
// executeTemplateWithTargets executes a given template on x targets (with a internal targetpool(i.e concurrency))
|
2024-04-25 15:37:56 +05:30
|
|
|
func (e *Engine) executeTemplateWithTargets(ctx context.Context, template *templates.Template, target provider.InputProvider, results *atomic.Bool) {
|
2025-09-15 23:48:02 +05:30
|
|
|
if e.workPool == nil {
|
|
|
|
|
e.workPool = e.GetWorkPool()
|
|
|
|
|
}
|
|
|
|
|
// Bounded worker pool using input concurrency
|
|
|
|
|
pool := e.workPool.InputPool(template.Type())
|
|
|
|
|
workerCount := 1
|
|
|
|
|
if pool != nil && pool.Size > 0 {
|
|
|
|
|
workerCount = pool.Size
|
|
|
|
|
}
|
2023-01-20 23:49:04 +05:30
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
index uint32
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
e.executerOpts.ResumeCfg.Lock()
|
|
|
|
|
currentInfo, ok := e.executerOpts.ResumeCfg.Current[template.ID]
|
|
|
|
|
if !ok {
|
|
|
|
|
currentInfo = &generalTypes.ResumeInfo{}
|
|
|
|
|
e.executerOpts.ResumeCfg.Current[template.ID] = currentInfo
|
|
|
|
|
}
|
|
|
|
|
if currentInfo.InFlight == nil {
|
|
|
|
|
currentInfo.InFlight = make(map[uint32]struct{})
|
|
|
|
|
}
|
|
|
|
|
resumeFromInfo, ok := e.executerOpts.ResumeCfg.ResumeFrom[template.ID]
|
|
|
|
|
if !ok {
|
|
|
|
|
resumeFromInfo = &generalTypes.ResumeInfo{}
|
|
|
|
|
e.executerOpts.ResumeCfg.ResumeFrom[template.ID] = resumeFromInfo
|
|
|
|
|
}
|
|
|
|
|
e.executerOpts.ResumeCfg.Unlock()
|
|
|
|
|
|
|
|
|
|
// track progression
|
|
|
|
|
cleanupInFlight := func(index uint32) {
|
|
|
|
|
currentInfo.Lock()
|
|
|
|
|
delete(currentInfo.InFlight, index)
|
|
|
|
|
currentInfo.Unlock()
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-15 23:48:02 +05:30
|
|
|
// task represents a single target execution unit
|
|
|
|
|
type task struct {
|
|
|
|
|
index uint32
|
|
|
|
|
skip bool
|
|
|
|
|
value *contextargs.MetaInput
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
tasks := make(chan task)
|
|
|
|
|
var workersWg sync.WaitGroup
|
|
|
|
|
workersWg.Add(workerCount)
|
|
|
|
|
for i := 0; i < workerCount; i++ {
|
|
|
|
|
go func() {
|
|
|
|
|
defer workersWg.Done()
|
|
|
|
|
for t := range tasks {
|
|
|
|
|
func() {
|
|
|
|
|
defer cleanupInFlight(t.index)
|
|
|
|
|
select {
|
|
|
|
|
case <-ctx.Done():
|
|
|
|
|
return
|
|
|
|
|
default:
|
|
|
|
|
}
|
|
|
|
|
if t.skip {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match, err := e.executeTemplateOnInput(ctx, template, t.value)
|
|
|
|
|
if err != nil {
|
|
|
|
|
e.options.Logger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), t.value.Input, err)
|
|
|
|
|
}
|
|
|
|
|
results.CompareAndSwap(false, match)
|
|
|
|
|
}()
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-14 03:08:53 +05:30
|
|
|
target.Iterate(func(scannedValue *contextargs.MetaInput) bool {
|
2024-04-25 15:37:56 +05:30
|
|
|
select {
|
|
|
|
|
case <-ctx.Done():
|
|
|
|
|
return false // exit
|
|
|
|
|
default:
|
|
|
|
|
}
|
|
|
|
|
|
2023-01-20 23:49:04 +05:30
|
|
|
// Best effort to track the host progression
|
|
|
|
|
// skips indexes lower than the minimum in-flight at interruption time
|
|
|
|
|
var skip bool
|
|
|
|
|
if resumeFromInfo.Completed { // the template was completed
|
2025-07-09 14:47:26 -05:00
|
|
|
e.options.Logger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Template already completed", template.ID, scannedValue.Input)
|
2023-01-20 23:49:04 +05:30
|
|
|
skip = true
|
|
|
|
|
} else if index < resumeFromInfo.SkipUnder { // index lower than the sliding window (bulk-size)
|
2025-07-09 14:47:26 -05:00
|
|
|
e.options.Logger.Debug().Msgf("[%s] Skipping \"%s\": Resume - Target already processed", template.ID, scannedValue.Input)
|
2023-01-20 23:49:04 +05:30
|
|
|
skip = true
|
|
|
|
|
} else if _, isInFlight := resumeFromInfo.InFlight[index]; isInFlight { // the target wasn't completed successfully
|
2025-07-09 14:47:26 -05:00
|
|
|
e.options.Logger.Debug().Msgf("[%s] Repeating \"%s\": Resume - Target wasn't completed", template.ID, scannedValue.Input)
|
2023-01-20 23:49:04 +05:30
|
|
|
// skip is already false, but leaving it here for clarity
|
|
|
|
|
skip = false
|
|
|
|
|
} else if index > resumeFromInfo.DoAbove { // index above the sliding window (bulk-size)
|
|
|
|
|
// skip is already false - but leaving it here for clarity
|
|
|
|
|
skip = false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
currentInfo.Lock()
|
|
|
|
|
currentInfo.InFlight[index] = struct{}{}
|
|
|
|
|
currentInfo.Unlock()
|
|
|
|
|
|
|
|
|
|
// Skip if the host has had errors
|
2024-09-28 17:20:35 +04:00
|
|
|
if e.executerOpts.HostErrorsCache != nil && e.executerOpts.HostErrorsCache.Check(e.executerOpts.ProtocolType.String(), contextargs.NewWithMetaInput(ctx, scannedValue)) {
|
2025-07-19 00:11:25 +05:30
|
|
|
skipEvent := &output.ResultEvent{
|
|
|
|
|
TemplateID: template.ID,
|
|
|
|
|
TemplatePath: template.Path,
|
|
|
|
|
Info: template.Info,
|
|
|
|
|
Type: e.executerOpts.ProtocolType.String(),
|
|
|
|
|
Host: scannedValue.Input,
|
|
|
|
|
MatcherStatus: false,
|
|
|
|
|
Error: "host was skipped as it was found unresponsive",
|
|
|
|
|
Timestamp: time.Now(),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if e.Callback != nil {
|
|
|
|
|
e.Callback(skipEvent)
|
|
|
|
|
} else if e.executerOpts.Output != nil {
|
|
|
|
|
_ = e.executerOpts.Output.Write(skipEvent)
|
|
|
|
|
}
|
2023-01-20 23:49:04 +05:30
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-15 23:48:02 +05:30
|
|
|
tasks <- task{index: index, skip: skip, value: scannedValue}
|
2023-01-20 23:49:04 +05:30
|
|
|
index++
|
|
|
|
|
return true
|
|
|
|
|
})
|
2025-09-15 23:48:02 +05:30
|
|
|
|
|
|
|
|
close(tasks)
|
|
|
|
|
workersWg.Wait()
|
2023-01-20 23:49:04 +05:30
|
|
|
|
|
|
|
|
// on completion marks the template as completed
|
|
|
|
|
currentInfo.Lock()
|
|
|
|
|
currentInfo.Completed = true
|
|
|
|
|
currentInfo.Unlock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// executeTemplatesOnTarget execute given templates on given single target
|
2024-04-25 15:37:56 +05:30
|
|
|
func (e *Engine) executeTemplatesOnTarget(ctx context.Context, alltemplates []*templates.Template, target *contextargs.MetaInput, results *atomic.Bool) {
|
2023-01-20 23:49:04 +05:30
|
|
|
// all templates are executed on single target
|
|
|
|
|
|
|
|
|
|
// wp is workpool that contains different waitgroups for
|
|
|
|
|
// headless and non-headless templates
|
|
|
|
|
// global waitgroup should not be used here
|
2024-05-10 18:26:56 +02:00
|
|
|
wp := e.GetWorkPool()
|
2025-03-07 20:34:27 +05:30
|
|
|
defer wp.Wait()
|
2023-01-20 23:49:04 +05:30
|
|
|
|
|
|
|
|
for _, tpl := range alltemplates {
|
2024-04-25 15:37:56 +05:30
|
|
|
select {
|
|
|
|
|
case <-ctx.Done():
|
|
|
|
|
return
|
|
|
|
|
default:
|
|
|
|
|
}
|
|
|
|
|
|
2024-04-03 18:50:46 +02:00
|
|
|
// resize check point - nop if there are no changes
|
|
|
|
|
wp.RefreshWithConfig(e.GetWorkPoolConfig())
|
|
|
|
|
|
2024-04-03 17:50:57 +02:00
|
|
|
var sg *syncutil.AdaptiveWaitGroup
|
2023-01-20 23:49:04 +05:30
|
|
|
if tpl.Type() == types.HeadlessProtocol {
|
|
|
|
|
sg = wp.Headless
|
|
|
|
|
} else {
|
|
|
|
|
sg = wp.Default
|
|
|
|
|
}
|
|
|
|
|
sg.Add()
|
2024-04-03 17:50:57 +02:00
|
|
|
go func(template *templates.Template, value *contextargs.MetaInput, wg *syncutil.AdaptiveWaitGroup) {
|
2023-01-20 23:49:04 +05:30
|
|
|
defer wg.Done()
|
|
|
|
|
|
2025-09-15 23:48:02 +05:30
|
|
|
match, err := e.executeTemplateOnInput(ctx, template, value)
|
2023-01-20 23:49:04 +05:30
|
|
|
if err != nil {
|
2025-07-09 14:47:26 -05:00
|
|
|
e.options.Logger.Warning().Msgf("[%s] Could not execute step on %s: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), value.Input, err)
|
2023-01-20 23:49:04 +05:30
|
|
|
}
|
|
|
|
|
results.CompareAndSwap(false, match)
|
|
|
|
|
}(tpl, target, sg)
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-09-15 23:48:02 +05:30
|
|
|
|
|
|
|
|
// executeTemplateOnInput performs template execution for a single input and returns match status and error
|
|
|
|
|
func (e *Engine) executeTemplateOnInput(ctx context.Context, template *templates.Template, value *contextargs.MetaInput) (bool, error) {
|
|
|
|
|
ctxArgs := contextargs.New(ctx)
|
|
|
|
|
ctxArgs.MetaInput = value
|
|
|
|
|
scanCtx := scan.NewScanContext(ctx, ctxArgs)
|
|
|
|
|
|
|
|
|
|
switch template.Type() {
|
|
|
|
|
case types.WorkflowProtocol:
|
|
|
|
|
return e.executeWorkflow(scanCtx, template.CompiledWorkflow), nil
|
|
|
|
|
default:
|
|
|
|
|
if e.Callback != nil {
|
|
|
|
|
results, err := template.Executer.ExecuteWithResults(scanCtx)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return false, err
|
|
|
|
|
}
|
|
|
|
|
for _, result := range results {
|
|
|
|
|
e.Callback(result)
|
|
|
|
|
}
|
|
|
|
|
return len(results) > 0, nil
|
|
|
|
|
}
|
|
|
|
|
return template.Executer.Execute(scanCtx)
|
|
|
|
|
}
|
|
|
|
|
}
|